You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@asterixdb.apache.org by im...@apache.org on 2015/08/25 18:43:49 UTC

[01/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Repository: incubator-asterixdb
Updated Branches:
  refs/heads/master 95350e253 -> 34d816305


http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IndexDropStatement.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IndexDropStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IndexDropStatement.java
deleted file mode 100644
index 3eb2655..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IndexDropStatement.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class IndexDropStatement implements Statement {
-
-    private Identifier dataverseName;
-    private Identifier datasetName;
-    private Identifier indexName;
-    private boolean ifExists;
-
-    public IndexDropStatement(Identifier dataverseName, Identifier datasetName, Identifier indexName, boolean ifExists) {
-        this.dataverseName = dataverseName;
-        this.datasetName = datasetName;
-        this.indexName = indexName;
-        this.ifExists = ifExists;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.INDEX_DROP;
-    }
-
-    public Identifier getDataverseName() {
-        return dataverseName;
-    }
-
-    public Identifier getDatasetName() {
-        return datasetName;
-    }
-
-    public Identifier getIndexName() {
-        return indexName;
-    }
-
-    public boolean getIfExists() {
-        return ifExists;
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitIndexDropStatement(this, arg);
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-}


[08/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/feeds/FeedLoadManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedLoadManager.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedLoadManager.java
new file mode 100644
index 0000000..b45368a
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedLoadManager.java
@@ -0,0 +1,298 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.feeds;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.feeds.FeedActivity;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.common.feeds.FeedJobInfo.FeedJobState;
+import edu.uci.ics.asterix.common.feeds.FeedRuntimeId;
+import edu.uci.ics.asterix.common.feeds.NodeLoadReport;
+import edu.uci.ics.asterix.common.feeds.api.IFeedLoadManager;
+import edu.uci.ics.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
+import edu.uci.ics.asterix.common.feeds.api.IFeedTrackingManager;
+import edu.uci.ics.asterix.common.feeds.message.FeedCongestionMessage;
+import edu.uci.ics.asterix.common.feeds.message.FeedReportMessage;
+import edu.uci.ics.asterix.common.feeds.message.ScaleInReportMessage;
+import edu.uci.ics.asterix.common.feeds.message.ThrottlingEnabledFeedMessage;
+import edu.uci.ics.asterix.file.FeedOperations;
+import edu.uci.ics.asterix.metadata.feeds.FeedUtil;
+import edu.uci.ics.asterix.metadata.feeds.PrepareStallMessage;
+import edu.uci.ics.asterix.metadata.feeds.TerminateDataFlowMessage;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+public class FeedLoadManager implements IFeedLoadManager {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedLoadManager.class.getName());
+
+    private static final long MIN_MODIFICATION_INTERVAL = 180000; // 10 seconds
+    private final TreeSet<NodeLoadReport> nodeReports;
+    private final Map<FeedConnectionId, FeedActivity> feedActivities;
+    private final Map<String, Pair<Integer, Integer>> feedMetrics;
+
+    private FeedConnectionId lastModified;
+    private long lastModifiedTimestamp;
+
+    private static final int UNKNOWN = -1;
+
+    public FeedLoadManager() {
+        this.nodeReports = new TreeSet<NodeLoadReport>();
+        this.feedActivities = new HashMap<FeedConnectionId, FeedActivity>();
+        this.feedMetrics = new HashMap<String, Pair<Integer, Integer>>();
+    }
+
+    @Override
+    public void submitNodeLoadReport(NodeLoadReport report) {
+        nodeReports.remove(report);
+        nodeReports.add(report);
+    }
+
+    @Override
+    public void reportCongestion(FeedCongestionMessage message) throws AsterixException {
+        FeedRuntimeId runtimeId = message.getRuntimeId();
+        FeedJobState jobState = FeedLifecycleListener.INSTANCE.getFeedJobState(message.getConnectionId());
+        if (jobState == null
+                || (jobState.equals(FeedJobState.UNDER_RECOVERY))
+                || (message.getConnectionId().equals(lastModified) && System.currentTimeMillis()
+                        - lastModifiedTimestamp < MIN_MODIFICATION_INTERVAL)) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Ignoring congestion report from " + runtimeId);
+            }
+            return;
+        } else {
+            try {
+                FeedLifecycleListener.INSTANCE.setJobState(message.getConnectionId(), FeedJobState.UNDER_RECOVERY);
+                int inflowRate = message.getInflowRate();
+                int outflowRate = message.getOutflowRate();
+                List<String> currentComputeLocations = new ArrayList<String>();
+                currentComputeLocations.addAll(FeedLifecycleListener.INSTANCE.getComputeLocations(message
+                        .getConnectionId().getFeedId()));
+                int computeCardinality = currentComputeLocations.size();
+                int requiredCardinality = (int) Math
+                        .ceil((double) ((computeCardinality * inflowRate) / (double) outflowRate)) + 5;
+                int additionalComputeNodes = requiredCardinality - computeCardinality;
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("INCREASING COMPUTE CARDINALITY from " + computeCardinality + " by "
+                            + additionalComputeNodes);
+                }
+
+                List<String> helperComputeNodes = getNodeForSubstitution(additionalComputeNodes);
+
+                // Step 1) Alter the original feed job to adjust the cardinality
+                JobSpecification jobSpec = FeedLifecycleListener.INSTANCE.getCollectJobSpecification(message
+                        .getConnectionId());
+                helperComputeNodes.addAll(currentComputeLocations);
+                List<String> newLocations = new ArrayList<String>();
+                newLocations.addAll(currentComputeLocations);
+                newLocations.addAll(helperComputeNodes);
+                FeedUtil.increaseCardinality(jobSpec, FeedRuntimeType.COMPUTE, requiredCardinality, newLocations);
+
+                // Step 2) send prepare to  stall message
+                gracefullyTerminateDataFlow(message.getConnectionId(), Integer.MAX_VALUE);
+
+                // Step 3) run the altered job specification 
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("New Job after adjusting to the workload " + jobSpec);
+                }
+
+                Thread.sleep(10000);
+                runJob(jobSpec, false);
+                lastModified = message.getConnectionId();
+                lastModifiedTimestamp = System.currentTimeMillis();
+
+            } catch (Exception e) {
+                e.printStackTrace();
+                if (LOGGER.isLoggable(Level.SEVERE)) {
+                    LOGGER.severe("Unable to form the required job for scaling in/out" + e.getMessage());
+                }
+                throw new AsterixException(e);
+            }
+        }
+    }
+
+    @Override
+    public void submitScaleInPossibleReport(ScaleInReportMessage message) throws Exception {
+        FeedJobState jobState = FeedLifecycleListener.INSTANCE.getFeedJobState(message.getConnectionId());
+        if (jobState == null || (jobState.equals(FeedJobState.UNDER_RECOVERY))) {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("JobState information for job " + "[" + message.getConnectionId() + "]" + " not found ");
+            }
+            return;
+        } else {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Processing scale-in message " + message);
+            }
+            FeedLifecycleListener.INSTANCE.setJobState(message.getConnectionId(), FeedJobState.UNDER_RECOVERY);
+            JobSpecification jobSpec = FeedLifecycleListener.INSTANCE.getCollectJobSpecification(message
+                    .getConnectionId());
+            int reducedCardinality = message.getReducedCardinaliy();
+            List<String> currentComputeLocations = new ArrayList<String>();
+            currentComputeLocations.addAll(FeedLifecycleListener.INSTANCE.getComputeLocations(message.getConnectionId()
+                    .getFeedId()));
+            FeedUtil.decreaseComputeCardinality(jobSpec, FeedRuntimeType.COMPUTE, reducedCardinality,
+                    currentComputeLocations);
+
+            gracefullyTerminateDataFlow(message.getConnectionId(), reducedCardinality - 1);
+            Thread.sleep(3000);
+            JobId newJobId = runJob(jobSpec, false);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Launch modified job" + "[" + newJobId + "]" + "for scale-in \n" + jobSpec);
+            }
+
+        }
+    }
+
+    private void gracefullyTerminateDataFlow(FeedConnectionId connectionId, int computePartitionRetainLimit)
+            throws Exception {
+        // Step 1) send prepare to  stall message
+        PrepareStallMessage stallMessage = new PrepareStallMessage(connectionId, computePartitionRetainLimit);
+        List<String> intakeLocations = FeedLifecycleListener.INSTANCE.getCollectLocations(connectionId);
+        List<String> computeLocations = FeedLifecycleListener.INSTANCE.getComputeLocations(connectionId.getFeedId());
+        List<String> storageLocations = FeedLifecycleListener.INSTANCE.getStoreLocations(connectionId);
+
+        Set<String> operatorLocations = new HashSet<String>();
+
+        operatorLocations.addAll(intakeLocations);
+        operatorLocations.addAll(computeLocations);
+        operatorLocations.addAll(storageLocations);
+
+        JobSpecification messageJobSpec = FeedOperations.buildPrepareStallMessageJob(stallMessage, operatorLocations);
+        runJob(messageJobSpec, true);
+
+        // Step 2)
+        TerminateDataFlowMessage terminateMesg = new TerminateDataFlowMessage(connectionId);
+        messageJobSpec = FeedOperations.buildTerminateFlowMessageJob(terminateMesg, intakeLocations);
+        runJob(messageJobSpec, true);
+    }
+
+    public static JobId runJob(JobSpecification spec, boolean waitForCompletion) throws Exception {
+        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+        JobId jobId = hcc.startJob(spec);
+        if (waitForCompletion) {
+            hcc.waitForCompletion(jobId);
+        }
+        return jobId;
+    }
+
+    @Override
+    public void submitFeedRuntimeReport(FeedReportMessage report) {
+        String key = "" + report.getConnectionId() + ":" + report.getRuntimeId().getFeedRuntimeType();
+        Pair<Integer, Integer> value = feedMetrics.get(key);
+        if (value == null) {
+            value = new Pair<Integer, Integer>(report.getValue(), 1);
+            feedMetrics.put(key, value);
+        } else {
+            value.first = value.first + report.getValue();
+            value.second = value.second + 1;
+        }
+    }
+
+    @Override
+    public int getOutflowRate(FeedConnectionId connectionId, FeedRuntimeType runtimeType) {
+        int rVal;
+        String key = "" + connectionId + ":" + runtimeType;
+        feedMetrics.get(key);
+        Pair<Integer, Integer> value = feedMetrics.get(key);
+        if (value == null) {
+            rVal = UNKNOWN;
+        } else {
+            rVal = value.first / value.second;
+        }
+        return rVal;
+    }
+
+    private List<String> getNodeForSubstitution(int nRequired) {
+        List<String> nodeIds = new ArrayList<String>();
+        Iterator<NodeLoadReport> it = null;
+        int nAdded = 0;
+        while (nAdded < nRequired) {
+            it = nodeReports.iterator();
+            while (it.hasNext()) {
+                nodeIds.add(it.next().getNodeId());
+                nAdded++;
+            }
+        }
+        return nodeIds;
+    }
+
+    @Override
+    public synchronized List<String> getNodes(int required) {
+        Iterator<NodeLoadReport> it;
+        List<String> allocated = new ArrayList<String>();
+        while (allocated.size() < required) {
+            it = nodeReports.iterator();
+            while (it.hasNext() && allocated.size() < required) {
+                allocated.add(it.next().getNodeId());
+            }
+        }
+        return allocated;
+    }
+
+    @Override
+    public void reportThrottlingEnabled(ThrottlingEnabledFeedMessage mesg) throws AsterixException, Exception {
+        System.out.println("Throttling Enabled for " + mesg.getConnectionId() + " " + mesg.getFeedRuntimeId());
+        FeedConnectionId connectionId = mesg.getConnectionId();
+        List<String> destinationLocations = new ArrayList<String>();
+        List<String> storageLocations = FeedLifecycleListener.INSTANCE.getStoreLocations(connectionId);
+        List<String> collectLocations = FeedLifecycleListener.INSTANCE.getCollectLocations(connectionId);
+
+        destinationLocations.addAll(storageLocations);
+        destinationLocations.addAll(collectLocations);
+        JobSpecification messageJobSpec = FeedOperations.buildNotifyThrottlingEnabledMessageJob(mesg,
+                destinationLocations);
+        runJob(messageJobSpec, true);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.warning("Acking disabled for " + mesg.getConnectionId() + " in view of activated throttling");
+        }
+        IFeedTrackingManager trackingManager = CentralFeedManager.getInstance().getFeedTrackingManager();
+        trackingManager.disableAcking(connectionId);
+    }
+
+    @Override
+    public void reportFeedActivity(FeedConnectionId connectionId, FeedActivity activity) {
+        feedActivities.put(connectionId, activity);
+    }
+
+    @Override
+    public FeedActivity getFeedActivity(FeedConnectionId connectionId) {
+        return feedActivities.get(connectionId);
+    }
+
+    @Override
+    public Collection<FeedActivity> getFeedActivities() {
+        return feedActivities.values();
+    }
+
+    @Override
+    public void removeFeedActivity(FeedConnectionId connectionId) {
+        feedActivities.remove(connectionId);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/feeds/FeedManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedManager.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedManager.java
new file mode 100644
index 0000000..e3020aa
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedManager.java
@@ -0,0 +1,140 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.feeds;
+
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.config.AsterixFeedProperties;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.feeds.FeedMemoryManager;
+import edu.uci.ics.asterix.common.feeds.FeedMessageService;
+import edu.uci.ics.asterix.common.feeds.FeedMetricCollector;
+import edu.uci.ics.asterix.common.feeds.NodeLoadReportService;
+import edu.uci.ics.asterix.common.feeds.api.IFeedConnectionManager;
+import edu.uci.ics.asterix.common.feeds.api.IFeedManager;
+import edu.uci.ics.asterix.common.feeds.api.IFeedMemoryManager;
+import edu.uci.ics.asterix.common.feeds.api.IFeedMessageService;
+import edu.uci.ics.asterix.common.feeds.api.IFeedMetadataManager;
+import edu.uci.ics.asterix.common.feeds.api.IFeedMetricCollector;
+import edu.uci.ics.asterix.common.feeds.api.IFeedSubscriptionManager;
+import edu.uci.ics.asterix.metadata.feeds.FeedConnectionManager;
+import edu.uci.ics.asterix.metadata.feeds.FeedSubscriptionManager;
+import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * An implementation of the IFeedManager interface.
+ * Provider necessary central repository for registering/retrieving
+ * artifacts/services associated with a feed.
+ */
+public class FeedManager implements IFeedManager {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedManager.class.getName());
+
+    private final IFeedSubscriptionManager feedSubscriptionManager;
+
+    private final IFeedConnectionManager feedConnectionManager;
+
+    private final IFeedMemoryManager feedMemoryManager;
+
+    private final IFeedMetricCollector feedMetricCollector;
+
+    private final IFeedMetadataManager feedMetadataManager;
+
+    private final IFeedMessageService feedMessageService;
+
+    private final NodeLoadReportService nodeLoadReportService;
+
+    private final AsterixFeedProperties asterixFeedProperties;
+
+    private final String nodeId;
+
+    private final int frameSize;
+
+    public FeedManager(String nodeId, AsterixFeedProperties feedProperties, int frameSize) throws AsterixException, HyracksDataException {
+        this.nodeId = nodeId;
+        this.feedSubscriptionManager = new FeedSubscriptionManager(nodeId);
+        this.feedConnectionManager = new FeedConnectionManager(nodeId);
+        this.feedMetadataManager = new FeedMetadataManager(nodeId);
+        this.feedMemoryManager = new FeedMemoryManager(nodeId, feedProperties, frameSize);
+        String ccClusterIp = AsterixClusterProperties.INSTANCE.getCluster() != null ? AsterixClusterProperties.INSTANCE
+                .getCluster().getMasterNode().getClusterIp() : "localhost";
+        this.feedMessageService = new FeedMessageService(feedProperties, nodeId, ccClusterIp);
+        this.nodeLoadReportService = new NodeLoadReportService(nodeId, this);
+        try {
+            this.feedMessageService.start();
+            this.nodeLoadReportService.start();
+        } catch (Exception e) {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Unable to start feed services " + e.getMessage());
+            }
+            e.printStackTrace();
+        }
+        this.feedMetricCollector = new FeedMetricCollector(nodeId);
+        this.frameSize = frameSize;
+        this.asterixFeedProperties = feedProperties;
+    }
+
+    @Override
+    public IFeedSubscriptionManager getFeedSubscriptionManager() {
+        return feedSubscriptionManager;
+    }
+
+    @Override
+    public IFeedConnectionManager getFeedConnectionManager() {
+        return feedConnectionManager;
+    }
+
+    @Override
+    public IFeedMemoryManager getFeedMemoryManager() {
+        return feedMemoryManager;
+    }
+
+    @Override
+    public IFeedMetricCollector getFeedMetricCollector() {
+        return feedMetricCollector;
+    }
+
+    public int getFrameSize() {
+        return frameSize;
+    }
+
+    @Override
+    public IFeedMetadataManager getFeedMetadataManager() {
+        return feedMetadataManager;
+    }
+
+    @Override
+    public IFeedMessageService getFeedMessageService() {
+        return feedMessageService;
+    }
+
+    @Override
+    public String getNodeId() {
+        return nodeId;
+    }
+
+    @Override
+    public String toString() {
+        return "FeedManager " + "[" + nodeId + "]";
+    }
+
+    @Override
+    public AsterixFeedProperties getAsterixFeedProperties() {
+        return asterixFeedProperties;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/feeds/FeedMessageReceiver.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedMessageReceiver.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedMessageReceiver.java
new file mode 100644
index 0000000..8530370
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedMessageReceiver.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.feeds;
+
+import java.util.logging.Level;
+
+import org.json.JSONObject;
+
+import edu.uci.ics.asterix.common.feeds.FeedConstants;
+import edu.uci.ics.asterix.common.feeds.FeedTupleCommitAckMessage;
+import edu.uci.ics.asterix.common.feeds.MessageReceiver;
+import edu.uci.ics.asterix.common.feeds.NodeLoadReport;
+import edu.uci.ics.asterix.common.feeds.api.IFeedLoadManager;
+import edu.uci.ics.asterix.common.feeds.api.IFeedMessage.MessageType;
+import edu.uci.ics.asterix.common.feeds.api.IFeedTrackingManager;
+import edu.uci.ics.asterix.common.feeds.message.FeedCongestionMessage;
+import edu.uci.ics.asterix.common.feeds.message.FeedReportMessage;
+import edu.uci.ics.asterix.common.feeds.message.ScaleInReportMessage;
+import edu.uci.ics.asterix.common.feeds.message.StorageReportFeedMessage;
+import edu.uci.ics.asterix.common.feeds.message.ThrottlingEnabledFeedMessage;
+import edu.uci.ics.asterix.feeds.CentralFeedManager.AQLExecutor;
+import edu.uci.ics.asterix.hyracks.bootstrap.FeedBootstrap;
+
+public class FeedMessageReceiver extends MessageReceiver<String> {
+
+    private static boolean initialized;
+
+    private final IFeedLoadManager feedLoadManager;
+    private final IFeedTrackingManager feedTrackingManager;
+
+    public FeedMessageReceiver(CentralFeedManager centralFeedManager) {
+        this.feedLoadManager = centralFeedManager.getFeedLoadManager();
+        this.feedTrackingManager = centralFeedManager.getFeedTrackingManager();
+    }
+
+    @Override
+    public void processMessage(String message) throws Exception {
+        JSONObject obj = new JSONObject(message);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Received message " + obj);
+        }
+        MessageType messageType = MessageType.valueOf(obj.getString(FeedConstants.MessageConstants.MESSAGE_TYPE));
+        switch (messageType) {
+            case XAQL:
+                if (!initialized) {
+                    FeedBootstrap.setUpInitialArtifacts();
+                    initialized = true;
+                }
+                AQLExecutor.executeAQL(obj.getString(FeedConstants.MessageConstants.AQL));
+                break;
+            case CONGESTION:
+                feedLoadManager.reportCongestion(FeedCongestionMessage.read(obj));
+                break;
+            case FEED_REPORT:
+                feedLoadManager.submitFeedRuntimeReport(FeedReportMessage.read(obj));
+                break;
+            case NODE_REPORT:
+                feedLoadManager.submitNodeLoadReport(NodeLoadReport.read(obj));
+                break;
+            case SCALE_IN_REQUEST:
+                feedLoadManager.submitScaleInPossibleReport(ScaleInReportMessage.read(obj));
+                break;
+            case STORAGE_REPORT:
+                FeedLifecycleListener.INSTANCE.updateTrackingInformation(StorageReportFeedMessage.read(obj));
+                break;
+            case COMMIT_ACK:
+                feedTrackingManager.submitAckReport(FeedTupleCommitAckMessage.read(obj));
+                break;
+            case THROTTLING_ENABLED:
+                feedLoadManager.reportThrottlingEnabled(ThrottlingEnabledFeedMessage.read(obj));
+            default:
+                break;
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/feeds/FeedMetadataManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedMetadataManager.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedMetadataManager.java
new file mode 100644
index 0000000..b927c90
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedMetadataManager.java
@@ -0,0 +1,107 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.feeds;
+
+import java.util.Date;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.common.feeds.api.IFeedManager;
+import edu.uci.ics.asterix.common.feeds.api.IFeedMetadataManager;
+import edu.uci.ics.asterix.hyracks.bootstrap.FeedBootstrap;
+import edu.uci.ics.asterix.metadata.feeds.XAQLFeedMessage;
+import edu.uci.ics.asterix.om.base.ARecord;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.base.IAObject;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class FeedMetadataManager implements IFeedMetadataManager {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedMetadataManager.class.getName());
+
+    private final String nodeId;
+    private ARecordType recordType;
+
+    public FeedMetadataManager(String nodeId) throws AsterixException, HyracksDataException {
+        this.nodeId = nodeId;
+        String[] fieldNames = new String[] { "id", "dataverseName", "feedName", "targetDataset", "tuple", "message",
+                "timestamp" };
+        IAType[] fieldTypes = new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
+                BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING };
+
+        recordType = new ARecordType(FeedBootstrap.FAILED_TUPLE_DATASET_TYPE, fieldNames, fieldTypes, true);
+    }
+
+    @Override
+    public void logTuple(FeedConnectionId connectionId, String tuple, String message, IFeedManager feedManager)
+            throws AsterixException {
+        try {
+            AString id = new AString("1");
+            AString dataverseValue = new AString(connectionId.getFeedId().getDataverse());
+            AString feedValue = new AString(connectionId.getFeedId().getFeedName());
+            AString targetDatasetValue = new AString(connectionId.getDatasetName());
+            AString tupleValue = new AString(tuple);
+            AString messageValue = new AString(message);
+            AString dateTime = new AString(new Date().toString());
+
+            IAObject[] fields = new IAObject[] { id, dataverseValue, feedValue, targetDatasetValue, tupleValue,
+                    messageValue, dateTime };
+            ARecord record = new ARecord(recordType, fields);
+            StringBuilder builder = new StringBuilder();
+            builder.append("use dataverse " + FeedBootstrap.FEEDS_METADATA_DV + ";" + "\n");
+            builder.append("insert into dataset " + FeedBootstrap.FAILED_TUPLE_DATASET + " ");
+            builder.append(" (" + recordToString(record) + ")");
+            builder.append(";");
+
+            XAQLFeedMessage xAqlMessage = new XAQLFeedMessage(connectionId, builder.toString());
+            feedManager.getFeedMessageService().sendMessage(xAqlMessage);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info(" Sent " + xAqlMessage.toJSON());
+            }
+        } catch (Exception pe) {
+            throw new AsterixException(pe);
+        }
+    }
+
+    @Override
+    public String toString() {
+        return "FeedMetadataManager [" + nodeId + "]";
+    }
+
+    private String recordToString(ARecord record) {
+        String[] fieldNames = record.getType().getFieldNames();
+        StringBuilder sb = new StringBuilder();
+        sb.append("{ ");
+        for (int i = 0; i < fieldNames.length; i++) {
+            if (i > 0) {
+                sb.append(", ");
+            }
+            sb.append("\"" + fieldNames[i] + "\"");
+            sb.append(": ");
+            switch (record.getType().getFieldTypes()[i].getTypeTag()) {
+                case STRING:
+                    sb.append("\"" + ((AString) record.getValueByPos(i)).getStringValue() + "\"");
+                    break;
+            }
+        }
+        sb.append(" }");
+        return sb.toString();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/feeds/FeedTrackingManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedTrackingManager.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedTrackingManager.java
new file mode 100644
index 0000000..3e5a1a4
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedTrackingManager.java
@@ -0,0 +1,184 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.feeds;
+
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.common.feeds.FeedTupleCommitAckMessage;
+import edu.uci.ics.asterix.common.feeds.FeedTupleCommitResponseMessage;
+import edu.uci.ics.asterix.common.feeds.api.IFeedTrackingManager;
+import edu.uci.ics.asterix.file.FeedOperations;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+public class FeedTrackingManager implements IFeedTrackingManager {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedTrackingManager.class.getName());
+
+    private final BitSet allOnes;
+
+    private Map<FeedConnectionId, Map<AckId, BitSet>> ackHistory;
+    private Map<FeedConnectionId, Map<AckId, Integer>> maxBaseAcked;
+
+    public FeedTrackingManager() {
+        byte[] allOneBytes = new byte[128];
+        Arrays.fill(allOneBytes, (byte) 0xff);
+        allOnes = BitSet.valueOf(allOneBytes);
+        ackHistory = new HashMap<FeedConnectionId, Map<AckId, BitSet>>();
+        maxBaseAcked = new HashMap<FeedConnectionId, Map<AckId, Integer>>();
+    }
+
+    @Override
+    public synchronized void submitAckReport(FeedTupleCommitAckMessage ackMessage) {
+        AckId ackId = getAckId(ackMessage);
+        Map<AckId, BitSet> acksForConnection = ackHistory.get(ackMessage.getConnectionId());
+        if (acksForConnection == null) {
+            acksForConnection = new HashMap<AckId, BitSet>();
+            acksForConnection.put(ackId, BitSet.valueOf(ackMessage.getCommitAcks()));
+            ackHistory.put(ackMessage.getConnectionId(), acksForConnection);
+        }
+        BitSet currentAcks = acksForConnection.get(ackId);
+        if (currentAcks == null) {
+            currentAcks = BitSet.valueOf(ackMessage.getCommitAcks());
+            acksForConnection.put(ackId, currentAcks);
+        } else {
+            currentAcks.or(BitSet.valueOf(ackMessage.getCommitAcks()));
+        }
+        if (Arrays.equals(currentAcks.toByteArray(), allOnes.toByteArray())) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info(ackMessage.getIntakePartition() + " (" + ackMessage.getBase() + ")" + " is convered");
+            }
+            Map<AckId, Integer> maxBaseAckedForConnection = maxBaseAcked.get(ackMessage.getConnectionId());
+            if (maxBaseAckedForConnection == null) {
+                maxBaseAckedForConnection = new HashMap<AckId, Integer>();
+                maxBaseAcked.put(ackMessage.getConnectionId(), maxBaseAckedForConnection);
+            }
+            Integer maxBaseAckedValue = maxBaseAckedForConnection.get(ackId);
+            if (maxBaseAckedValue == null) {
+                maxBaseAckedValue = ackMessage.getBase();
+                maxBaseAckedForConnection.put(ackId, ackMessage.getBase());
+                sendCommitResponseMessage(ackMessage.getConnectionId(), ackMessage.getIntakePartition(),
+                        ackMessage.getBase());
+            } else if (ackMessage.getBase() == maxBaseAckedValue + 1) {
+                maxBaseAckedForConnection.put(ackId, ackMessage.getBase());
+                sendCommitResponseMessage(ackMessage.getConnectionId(), ackMessage.getIntakePartition(),
+                        ackMessage.getBase());
+            } else {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Ignoring discountiuous acked base " + ackMessage.getBase() + " for " + ackId);
+                }
+            }
+
+        } else {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("AckId " + ackId + " pending number of acks " + (128 * 8 - currentAcks.cardinality()));
+            }
+        }
+    }
+
+    public synchronized void disableTracking(FeedConnectionId connectionId) {
+        ackHistory.remove(connectionId);
+        maxBaseAcked.remove(connectionId);
+    }
+
+    private void sendCommitResponseMessage(FeedConnectionId connectionId, int partition, int base) {
+        FeedTupleCommitResponseMessage response = new FeedTupleCommitResponseMessage(connectionId, partition, base);
+        List<String> storageLocations = FeedLifecycleListener.INSTANCE.getStoreLocations(connectionId);
+        List<String> collectLocations = FeedLifecycleListener.INSTANCE.getCollectLocations(connectionId);
+        String collectLocation = collectLocations.get(partition);
+        Set<String> messageDestinations = new HashSet<String>();
+        messageDestinations.add(collectLocation);
+        messageDestinations.addAll(storageLocations);
+        try {
+            JobSpecification spec = FeedOperations.buildCommitAckResponseJob(response, messageDestinations);
+            CentralFeedManager.runJob(spec, false);
+        } catch (Exception e) {
+            e.printStackTrace();
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Unable to send commit response message " + response + " exception " + e.getMessage());
+            }
+        }
+    }
+
+    private static AckId getAckId(FeedTupleCommitAckMessage ackMessage) {
+        return new AckId(ackMessage.getConnectionId(), ackMessage.getIntakePartition(), ackMessage.getBase());
+    }
+
+    private static class AckId {
+        private FeedConnectionId connectionId;
+        private int intakePartition;
+        private int base;
+
+        public AckId(FeedConnectionId connectionId, int intakePartition, int base) {
+            this.connectionId = connectionId;
+            this.intakePartition = intakePartition;
+            this.base = base;
+        }
+
+        @Override
+        public boolean equals(Object o) {
+            if (this == o) {
+                return true;
+            }
+            if (!(o instanceof AckId)) {
+                return false;
+            }
+            AckId other = (AckId) o;
+            return other.getConnectionId().equals(connectionId) && other.getIntakePartition() == intakePartition
+                    && other.getBase() == base;
+        }
+
+        @Override
+        public String toString() {
+            return connectionId + "[" + intakePartition + "]" + "(" + base + ")";
+        }
+
+        @Override
+        public int hashCode() {
+            return toString().hashCode();
+        }
+
+        public FeedConnectionId getConnectionId() {
+            return connectionId;
+        }
+
+        public int getIntakePartition() {
+            return intakePartition;
+        }
+
+        public int getBase() {
+            return base;
+        }
+
+    }
+
+    @Override
+    public void disableAcking(FeedConnectionId connectionId) {
+        ackHistory.remove(connectionId);
+        maxBaseAcked.remove(connectionId);
+        if (LOGGER.isLoggable(Level.WARNING)) {
+            LOGGER.warning("Acking disabled for " + connectionId);
+        }
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/feeds/FeedWorkRequestResponseHandler.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedWorkRequestResponseHandler.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedWorkRequestResponseHandler.java
new file mode 100644
index 0000000..ba54406
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedWorkRequestResponseHandler.java
@@ -0,0 +1,263 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.feeds;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.api.IClusterManagementWork;
+import edu.uci.ics.asterix.common.api.IClusterManagementWorkResponse;
+import edu.uci.ics.asterix.common.feeds.FeedConnectJobInfo;
+import edu.uci.ics.asterix.common.feeds.FeedIntakeInfo;
+import edu.uci.ics.asterix.common.feeds.FeedJobInfo;
+import edu.uci.ics.asterix.metadata.cluster.AddNodeWork;
+import edu.uci.ics.asterix.metadata.cluster.AddNodeWorkResponse;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.constraints.Constraint;
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.constraints.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.api.constraints.expressions.ConstraintExpression;
+import edu.uci.ics.hyracks.api.constraints.expressions.ConstraintExpression.ExpressionTag;
+import edu.uci.ics.hyracks.api.constraints.expressions.LValueConstraintExpression;
+import edu.uci.ics.hyracks.api.constraints.expressions.PartitionCountExpression;
+import edu.uci.ics.hyracks.api.constraints.expressions.PartitionLocationExpression;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.OperatorDescriptorId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+public class FeedWorkRequestResponseHandler implements Runnable {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedWorkRequestResponseHandler.class.getName());
+
+    private final LinkedBlockingQueue<IClusterManagementWorkResponse> inbox;
+
+    private Map<Integer, Map<String, List<FeedJobInfo>>> feedsWaitingForResponse = new HashMap<Integer, Map<String, List<FeedJobInfo>>>();
+
+    public FeedWorkRequestResponseHandler(LinkedBlockingQueue<IClusterManagementWorkResponse> inbox) {
+        this.inbox = inbox;
+    }
+
+    @Override
+    public void run() {
+        while (true) {
+            IClusterManagementWorkResponse response = null;
+            try {
+                response = inbox.take();
+            } catch (InterruptedException e) {
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("Interrupted exception " + e.getMessage());
+                }
+            }
+            IClusterManagementWork submittedWork = response.getWork();
+            Map<String, String> nodeSubstitution = new HashMap<String, String>();
+            switch (submittedWork.getClusterManagementWorkType()) {
+                case ADD_NODE:
+                    AddNodeWork addNodeWork = (AddNodeWork) submittedWork;
+                    int workId = addNodeWork.getWorkId();
+                    Map<String, List<FeedJobInfo>> failureAnalysis = feedsWaitingForResponse.get(workId);
+                    AddNodeWorkResponse resp = (AddNodeWorkResponse) response;
+                    List<String> nodesAdded = resp.getNodesAdded();
+                    List<String> unsubstitutedNodes = new ArrayList<String>();
+                    unsubstitutedNodes.addAll(addNodeWork.getDeadNodes());
+                    int nodeIndex = 0;
+
+                    /** form a mapping between the failed node and its substitute **/
+                    if (nodesAdded != null && nodesAdded.size() > 0) {
+                        for (String failedNodeId : addNodeWork.getDeadNodes()) {
+                            String substitute = nodesAdded.get(nodeIndex);
+                            nodeSubstitution.put(failedNodeId, substitute);
+                            nodeIndex = (nodeIndex + 1) % nodesAdded.size();
+                            unsubstitutedNodes.remove(failedNodeId);
+                            if (LOGGER.isLoggable(Level.INFO)) {
+                                LOGGER.info("Node " + substitute + " chosen to substiute lost node " + failedNodeId);
+                            }
+                        }
+                    }
+                    if (unsubstitutedNodes.size() > 0) {
+                        String[] participantNodes = AsterixClusterProperties.INSTANCE.getParticipantNodes().toArray(
+                                new String[] {});
+                        nodeIndex = 0;
+                        for (String unsubstitutedNode : unsubstitutedNodes) {
+                            nodeSubstitution.put(unsubstitutedNode, participantNodes[nodeIndex]);
+                            if (LOGGER.isLoggable(Level.INFO)) {
+                                LOGGER.info("Node " + participantNodes[nodeIndex] + " chosen to substiute lost node "
+                                        + unsubstitutedNode);
+                            }
+                            nodeIndex = (nodeIndex + 1) % participantNodes.length;
+                        }
+
+                        if (LOGGER.isLoggable(Level.WARNING)) {
+                            LOGGER.warning("Request " + resp.getWork() + " completed using internal nodes");
+                        }
+                    }
+
+                    // alter failed feed intake jobs
+
+                    for (Entry<String, List<FeedJobInfo>> entry : failureAnalysis.entrySet()) {
+                        String failedNode = entry.getKey();
+                        List<FeedJobInfo> impactedJobInfos = entry.getValue();
+                        for (FeedJobInfo info : impactedJobInfos) {
+                            JobSpecification spec = info.getSpec();
+                            replaceNode(spec, failedNode, nodeSubstitution.get(failedNode));
+                            info.setSpec(spec);
+                        }
+                    }
+
+                    Set<FeedIntakeInfo> revisedIntakeJobs = new HashSet<FeedIntakeInfo>();
+                    Set<FeedConnectJobInfo> revisedConnectJobInfos = new HashSet<FeedConnectJobInfo>();
+
+                    for (List<FeedJobInfo> infos : failureAnalysis.values()) {
+                        for (FeedJobInfo info : infos) {
+                            switch (info.getJobType()) {
+                                case INTAKE:
+                                    revisedIntakeJobs.add((FeedIntakeInfo) info);
+                                    break;
+                                case FEED_CONNECT:
+                                    revisedConnectJobInfos.add((FeedConnectJobInfo) info);
+                                    break;
+                            }
+                        }
+                    }
+
+                    IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+                    try {
+                        for (FeedIntakeInfo info : revisedIntakeJobs) {
+                            hcc.startJob(info.getSpec());
+                        }
+                        Thread.sleep(2000);
+                        for (FeedConnectJobInfo info : revisedConnectJobInfos) {
+                            hcc.startJob(info.getSpec());
+                            Thread.sleep(2000);
+                        }
+                    } catch (Exception e) {
+                        if (LOGGER.isLoggable(Level.WARNING)) {
+                            LOGGER.warning("Unable to start revised job post failure");
+                        }
+                    }
+
+                    break;
+                case REMOVE_NODE:
+                    throw new IllegalStateException("Invalid work submitted");
+            }
+        }
+    }
+
+    private void replaceNode(JobSpecification jobSpec, String failedNodeId, String replacementNode) {
+        Set<Constraint> userConstraints = jobSpec.getUserConstraints();
+        List<Constraint> locationConstraintsToReplace = new ArrayList<Constraint>();
+        List<Constraint> countConstraintsToReplace = new ArrayList<Constraint>();
+        List<OperatorDescriptorId> modifiedOperators = new ArrayList<OperatorDescriptorId>();
+        Map<OperatorDescriptorId, List<Constraint>> candidateConstraints = new HashMap<OperatorDescriptorId, List<Constraint>>();
+        Map<OperatorDescriptorId, Map<Integer, String>> newConstraints = new HashMap<OperatorDescriptorId, Map<Integer, String>>();
+        OperatorDescriptorId opId = null;
+        for (Constraint constraint : userConstraints) {
+            LValueConstraintExpression lexpr = constraint.getLValue();
+            ConstraintExpression cexpr = constraint.getRValue();
+            switch (lexpr.getTag()) {
+                case PARTITION_COUNT:
+                    opId = ((PartitionCountExpression) lexpr).getOperatorDescriptorId();
+                    if (modifiedOperators.contains(opId)) {
+                        countConstraintsToReplace.add(constraint);
+                    } else {
+                        List<Constraint> clist = candidateConstraints.get(opId);
+                        if (clist == null) {
+                            clist = new ArrayList<Constraint>();
+                            candidateConstraints.put(opId, clist);
+                        }
+                        clist.add(constraint);
+                    }
+                    break;
+                case PARTITION_LOCATION:
+                    opId = ((PartitionLocationExpression) lexpr).getOperatorDescriptorId();
+                    String oldLocation = (String) ((ConstantExpression) cexpr).getValue();
+                    if (oldLocation.equals(failedNodeId)) {
+                        locationConstraintsToReplace.add(constraint);
+                        modifiedOperators.add(((PartitionLocationExpression) lexpr).getOperatorDescriptorId());
+                        Map<Integer, String> newLocs = newConstraints.get(opId);
+                        if (newLocs == null) {
+                            newLocs = new HashMap<Integer, String>();
+                            newConstraints.put(opId, newLocs);
+                        }
+                        int partition = ((PartitionLocationExpression) lexpr).getPartition();
+                        newLocs.put(partition, replacementNode);
+                    } else {
+                        if (modifiedOperators.contains(opId)) {
+                            locationConstraintsToReplace.add(constraint);
+                            Map<Integer, String> newLocs = newConstraints.get(opId);
+                            if (newLocs == null) {
+                                newLocs = new HashMap<Integer, String>();
+                                newConstraints.put(opId, newLocs);
+                            }
+                            int partition = ((PartitionLocationExpression) lexpr).getPartition();
+                            newLocs.put(partition, oldLocation);
+                        } else {
+                            List<Constraint> clist = candidateConstraints.get(opId);
+                            if (clist == null) {
+                                clist = new ArrayList<Constraint>();
+                                candidateConstraints.put(opId, clist);
+                            }
+                            clist.add(constraint);
+                        }
+                    }
+                    break;
+            }
+        }
+
+        jobSpec.getUserConstraints().removeAll(locationConstraintsToReplace);
+        jobSpec.getUserConstraints().removeAll(countConstraintsToReplace);
+
+        for (OperatorDescriptorId mopId : modifiedOperators) {
+            List<Constraint> clist = candidateConstraints.get(mopId);
+            if (clist != null && !clist.isEmpty()) {
+                jobSpec.getUserConstraints().removeAll(clist);
+
+                for (Constraint c : clist) {
+                    if (c.getLValue().getTag().equals(ExpressionTag.PARTITION_LOCATION)) {
+                        ConstraintExpression cexpr = c.getRValue();
+                        int partition = ((PartitionLocationExpression) c.getLValue()).getPartition();
+                        String oldLocation = (String) ((ConstantExpression) cexpr).getValue();
+                        newConstraints.get(mopId).put(partition, oldLocation);
+                    }
+                }
+            }
+        }
+
+        for (Entry<OperatorDescriptorId, Map<Integer, String>> entry : newConstraints.entrySet()) {
+            OperatorDescriptorId nopId = entry.getKey();
+            Map<Integer, String> clist = entry.getValue();
+            IOperatorDescriptor op = jobSpec.getOperatorMap().get(nopId);
+            String[] locations = new String[clist.size()];
+            for (int i = 0; i < locations.length; i++) {
+                locations[i] = clist.get(i);
+            }
+            PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, op, locations);
+        }
+
+    }
+
+    public void registerFeedWork(int workId, Map<String, List<FeedJobInfo>> impactedJobs) {
+        feedsWaitingForResponse.put(workId, impactedJobs);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/feeds/FeedsActivator.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedsActivator.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedsActivator.java
new file mode 100644
index 0000000..0e7ec5b
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedsActivator.java
@@ -0,0 +1,109 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.feeds;
+
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.api.common.SessionConfig;
+import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
+import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.expression.ConnectFeedStatement;
+import edu.uci.ics.asterix.aql.expression.DataverseDecl;
+import edu.uci.ics.asterix.aql.expression.Identifier;
+import edu.uci.ics.asterix.aql.translator.AqlTranslator;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.hyracks.api.job.JobId;
+
+public class FeedsActivator implements Runnable {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedJobNotificationHandler.class.getName());
+
+    private List<FeedCollectInfo> feedsToRevive;
+    private Mode mode;
+
+    public enum Mode {
+        REVIVAL_POST_CLUSTER_REBOOT,
+        REVIVAL_POST_NODE_REJOIN
+    }
+
+    public FeedsActivator() {
+        this.mode = Mode.REVIVAL_POST_CLUSTER_REBOOT;
+    }
+
+    public FeedsActivator(List<FeedCollectInfo> feedsToRevive) {
+        this.feedsToRevive = feedsToRevive;
+        this.mode = Mode.REVIVAL_POST_NODE_REJOIN;
+    }
+
+    @Override
+    public void run() {
+        switch (mode) {
+            case REVIVAL_POST_CLUSTER_REBOOT:
+                //revivePostClusterReboot();
+                break;
+            case REVIVAL_POST_NODE_REJOIN:
+                try {
+                    Thread.sleep(10000);
+                } catch (InterruptedException e1) {
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info("Attempt to resume feed interrupted");
+                    }
+                    throw new IllegalStateException(e1.getMessage());
+                }
+                for (FeedCollectInfo finfo : feedsToRevive) {
+                    try {
+                        JobId jobId = AsterixAppContextInfo.getInstance().getHcc().startJob(finfo.jobSpec);
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info("Resumed feed :" + finfo.feedConnectionId + " job id " + jobId);
+                            LOGGER.info("Job:" + finfo.jobSpec);
+                        }
+                    } catch (Exception e) {
+                        if (LOGGER.isLoggable(Level.WARNING)) {
+                            LOGGER.warning("Unable to resume feed " + finfo.feedConnectionId + " " + e.getMessage());
+                        }
+                    }
+                }
+        }
+    }
+
+    public void reviveFeed(String dataverse, String feedName, String dataset, String feedPolicy) {
+        PrintWriter writer = new PrintWriter(System.out, true);
+        SessionConfig pc = new SessionConfig(writer, OutputFormat.ADM);
+        try {
+            DataverseDecl dataverseDecl = new DataverseDecl(new Identifier(dataverse));
+            ConnectFeedStatement stmt = new ConnectFeedStatement(new Identifier(dataverse), new Identifier(feedName),
+                    new Identifier(dataset), feedPolicy, 0);
+            stmt.setForceConnect(true);
+            List<Statement> statements = new ArrayList<Statement>();
+            statements.add(dataverseDecl);
+            statements.add(stmt);
+            AqlTranslator translator = new AqlTranslator(statements, pc);
+            translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null, AqlTranslator.ResultDelivery.SYNC);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Resumed feed: " + dataverse + ":" + dataset + " using policy " + feedPolicy);
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Exception in resuming loser feed: " + dataverse + ":" + dataset + " using policy "
+                        + feedPolicy + " Exception " + e.getMessage());
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java b/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
new file mode 100644
index 0000000..d476eed
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
@@ -0,0 +1,252 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.file;
+
+import java.io.File;
+import java.rmi.RemoteException;
+import java.util.Map;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.api.ILocalResourceMetadata;
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
+import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.context.AsterixVirtualBufferCacheProvider;
+import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
+import edu.uci.ics.asterix.formats.base.IDataFormat;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.Dataverse;
+import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.asterix.transaction.management.opcallbacks.PrimaryIndexOperationTrackerProvider;
+import edu.uci.ics.asterix.transaction.management.resource.LSMBTreeLocalResourceMetadata;
+import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceFactoryProvider;
+import edu.uci.ics.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
+import edu.uci.ics.asterix.translator.CompiledStatements.CompiledDatasetDropStatement;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexCompactOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.common.file.ILocalResourceFactoryProvider;
+import edu.uci.ics.hyracks.storage.common.file.LocalResource;
+
+public class DatasetOperations {
+
+    private static Logger LOGGER = Logger.getLogger(DatasetOperations.class.getName());
+
+    public static JobSpecification createDropDatasetJobSpec(CompiledDatasetDropStatement datasetDropStmt,
+            AqlMetadataProvider metadataProvider) throws AlgebricksException, HyracksDataException, RemoteException,
+            ACIDException, AsterixException {
+
+        String dataverseName = null;
+        if (datasetDropStmt.getDataverseName() != null) {
+            dataverseName = datasetDropStmt.getDataverseName();
+        } else if (metadataProvider.getDefaultDataverse() != null) {
+            dataverseName = metadataProvider.getDefaultDataverse().getDataverseName();
+        }
+
+        String datasetName = datasetDropStmt.getDatasetName();
+        String datasetPath = dataverseName + File.separator + datasetName;
+
+        LOGGER.info("DROP DATASETPATH: " + datasetPath);
+
+        Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
+        if (dataset == null) {
+            throw new AlgebricksException("DROP DATASET: No metadata for dataset " + datasetName);
+        }
+        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
+            return JobSpecificationUtils.createJobSpecification();
+        }
+        boolean temp = dataset.getDatasetDetails().isTemp();
+
+        Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(),
+                dataverseName);
+        IDataFormat format;
+        try {
+            format = (IDataFormat) Class.forName(dataverse.getDataFormat()).newInstance();
+        } catch (Exception e) {
+            throw new AsterixException(e);
+        }
+
+        ARecordType itemType = (ARecordType) metadataProvider.findType(dataverseName, dataset.getItemTypeName());
+
+        ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, itemType);
+        IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
+                itemType, format.getBinaryComparatorFactoryProvider());
+        int[] filterFields = DatasetUtils.createFilterFields(dataset);
+        int[] btreeFields = DatasetUtils.createBTreeFieldsWhenThereisAFilter(dataset);
+        JobSpecification specPrimary = JobSpecificationUtils.createJobSpecification();
+
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider
+                .splitProviderAndPartitionConstraintsForDataset(dataset.getDataverseName(), datasetName, datasetName,
+                        temp);
+        AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
+        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
+                metadataProvider.getMetadataTxnContext());
+
+        // The index drop operation should be persistent regardless of temp datasets or permanent dataset
+        IndexDropOperatorDescriptor primaryBtreeDrop = new IndexDropOperatorDescriptor(specPrimary,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                splitsAndConstraint.first, new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
+                        dataset.getDatasetId()), compactionInfo.first, compactionInfo.second,
+                        new PrimaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                        storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits,
+                        filterCmpFactories, btreeFields, filterFields, true));
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(specPrimary, primaryBtreeDrop,
+                splitsAndConstraint.second);
+
+        specPrimary.addRoot(primaryBtreeDrop);
+
+        return specPrimary;
+    }
+
+    public static JobSpecification createDatasetJobSpec(Dataverse dataverse, String datasetName,
+            AqlMetadataProvider metadata) throws AsterixException, AlgebricksException {
+        String dataverseName = dataverse.getDataverseName();
+        IDataFormat format;
+        try {
+            format = (IDataFormat) Class.forName(dataverse.getDataFormat()).newInstance();
+        } catch (Exception e) {
+            throw new AsterixException(e);
+        }
+        Dataset dataset = metadata.findDataset(dataverseName, datasetName);
+        if (dataset == null) {
+            throw new AsterixException("Could not find dataset " + datasetName + " in dataverse " + dataverseName);
+        }
+        boolean temp = dataset.getDatasetDetails().isTemp();
+        ARecordType itemType = (ARecordType) metadata.findType(dataverseName, dataset.getItemTypeName());
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        IBinaryComparatorFactory[] comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(dataset,
+                itemType, format.getBinaryComparatorFactoryProvider());
+        ITypeTraits[] typeTraits = DatasetUtils.computeTupleTypeTraits(dataset, itemType);
+        int[] bloomFilterKeyFields = DatasetUtils.createBloomFilterKeyFields(dataset);
+
+        ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, itemType);
+        IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
+                itemType, format.getBinaryComparatorFactoryProvider());
+        int[] filterFields = DatasetUtils.createFilterFields(dataset);
+        int[] btreeFields = DatasetUtils.createBTreeFieldsWhenThereisAFilter(dataset);
+
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadata
+                .splitProviderAndPartitionConstraintsForDataset(dataverseName, datasetName, datasetName, temp);
+        FileSplit[] fs = splitsAndConstraint.first.getFileSplits();
+        StringBuilder sb = new StringBuilder();
+        for (int i = 0; i < fs.length; i++) {
+            sb.append(stringOf(fs[i]) + " ");
+        }
+        LOGGER.info("CREATING File Splits: " + sb.toString());
+
+        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
+                metadata.getMetadataTxnContext());
+        AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
+        //prepare a LocalResourceMetadata which will be stored in NC's local resource repository
+        ILocalResourceMetadata localResourceMetadata = new LSMBTreeLocalResourceMetadata(typeTraits,
+                comparatorFactories, bloomFilterKeyFields, true, dataset.getDatasetId(), compactionInfo.first,
+                compactionInfo.second, filterTypeTraits, filterCmpFactories, btreeFields, filterFields);
+        ILocalResourceFactoryProvider localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(
+                localResourceMetadata, LocalResource.LSMBTreeResource);
+
+        // The index create operation should be persistent regardless of temp datasets or permanent dataset
+        TreeIndexCreateOperatorDescriptor indexCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                splitsAndConstraint.first, typeTraits, comparatorFactories, bloomFilterKeyFields,
+                new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
+                        compactionInfo.first, compactionInfo.second, new PrimaryIndexOperationTrackerProvider(dataset
+                                .getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                        LSMBTreeIOOperationCallbackFactory.INSTANCE, storageProperties
+                                .getBloomFilterFalsePositiveRate(), true, filterTypeTraits, filterCmpFactories,
+                        btreeFields, filterFields, true), localResourceFactoryProvider,
+                NoOpOperationCallbackFactory.INSTANCE);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, indexCreateOp,
+                splitsAndConstraint.second);
+        spec.addRoot(indexCreateOp);
+        return spec;
+    }
+
+    private static String stringOf(FileSplit fs) {
+        return fs.getNodeName() + ":" + fs.getLocalFile().toString();
+    }
+
+    public static JobSpecification compactDatasetJobSpec(Dataverse dataverse, String datasetName,
+            AqlMetadataProvider metadata) throws AsterixException, AlgebricksException {
+        String dataverseName = dataverse.getDataverseName();
+        IDataFormat format;
+        try {
+            format = (IDataFormat) Class.forName(dataverse.getDataFormat()).newInstance();
+        } catch (Exception e) {
+            throw new AsterixException(e);
+        }
+        Dataset dataset = metadata.findDataset(dataverseName, datasetName);
+        if (dataset == null) {
+            throw new AsterixException("Could not find dataset " + datasetName + " in dataverse " + dataverseName);
+        }
+        boolean temp = dataset.getDatasetDetails().isTemp();
+
+        ARecordType itemType = (ARecordType) metadata.findType(dataverseName, dataset.getItemTypeName());
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        IBinaryComparatorFactory[] comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(dataset,
+                itemType, format.getBinaryComparatorFactoryProvider());
+        ITypeTraits[] typeTraits = DatasetUtils.computeTupleTypeTraits(dataset, itemType);
+        int[] blooFilterKeyFields = DatasetUtils.createBloomFilterKeyFields(dataset);
+
+        ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, itemType);
+        IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
+                itemType, format.getBinaryComparatorFactoryProvider());
+        int[] filterFields = DatasetUtils.createFilterFields(dataset);
+        int[] btreeFields = DatasetUtils.createBTreeFieldsWhenThereisAFilter(dataset);
+
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadata
+                .splitProviderAndPartitionConstraintsForDataset(dataverseName, datasetName, datasetName, temp);
+
+        AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
+
+        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
+                metadata.getMetadataTxnContext());
+        LSMTreeIndexCompactOperatorDescriptor compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                splitsAndConstraint.first, typeTraits, comparatorFactories, blooFilterKeyFields,
+                new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
+                        compactionInfo.first, compactionInfo.second, new PrimaryIndexOperationTrackerProvider(
+                                dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                        LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                        storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits,
+                        filterCmpFactories, btreeFields, filterFields, !temp), NoOpOperationCallbackFactory.INSTANCE);
+        AlgebricksPartitionConstraintHelper
+                .setPartitionConstraintInJobSpec(spec, compactOp, splitsAndConstraint.second);
+
+        AlgebricksPartitionConstraintHelper
+                .setPartitionConstraintInJobSpec(spec, compactOp, splitsAndConstraint.second);
+        spec.addRoot(compactOp);
+        return spec;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/file/DataverseOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/DataverseOperations.java b/asterix-app/src/main/java/org/apache/asterix/file/DataverseOperations.java
new file mode 100644
index 0000000..997893f
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/file/DataverseOperations.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.file;
+
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Dataverse;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.file.FileRemoveOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+
+public class DataverseOperations {
+    public static JobSpecification createDropDataverseJobSpec(Dataverse dataverse, AqlMetadataProvider metadata) {
+        JobSpecification jobSpec = JobSpecificationUtils.createJobSpecification();
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadata
+                .splitProviderAndPartitionConstraintsForDataverse(dataverse.getDataverseName());
+        FileRemoveOperatorDescriptor frod = new FileRemoveOperatorDescriptor(jobSpec, splitsAndConstraint.first);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(jobSpec, frod, splitsAndConstraint.second);
+        jobSpec.addRoot(frod);
+        return jobSpec;
+    }
+}


[37/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlPlusExpressionToPlanTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlPlusExpressionToPlanTranslator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlPlusExpressionToPlanTranslator.java
deleted file mode 100644
index 7bfa056..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlPlusExpressionToPlanTranslator.java
+++ /dev/null
@@ -1,1498 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.translator;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.logging.Logger;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.aql.base.Clause;
-import edu.uci.ics.asterix.aql.base.Expression;
-import edu.uci.ics.asterix.aql.base.Expression.Kind;
-import edu.uci.ics.asterix.aql.expression.CallExpr;
-import edu.uci.ics.asterix.aql.expression.CompactStatement;
-import edu.uci.ics.asterix.aql.expression.ConnectFeedStatement;
-import edu.uci.ics.asterix.aql.expression.CreateDataverseStatement;
-import edu.uci.ics.asterix.aql.expression.CreateFeedPolicyStatement;
-import edu.uci.ics.asterix.aql.expression.CreateFeedStatement;
-import edu.uci.ics.asterix.aql.expression.CreateFunctionStatement;
-import edu.uci.ics.asterix.aql.expression.CreateIndexStatement;
-import edu.uci.ics.asterix.aql.expression.CreatePrimaryFeedStatement;
-import edu.uci.ics.asterix.aql.expression.CreateSecondaryFeedStatement;
-import edu.uci.ics.asterix.aql.expression.DatasetDecl;
-import edu.uci.ics.asterix.aql.expression.DataverseDecl;
-import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
-import edu.uci.ics.asterix.aql.expression.DeleteStatement;
-import edu.uci.ics.asterix.aql.expression.DisconnectFeedStatement;
-import edu.uci.ics.asterix.aql.expression.DistinctClause;
-import edu.uci.ics.asterix.aql.expression.DropStatement;
-import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
-import edu.uci.ics.asterix.aql.expression.FeedDropStatement;
-import edu.uci.ics.asterix.aql.expression.FeedPolicyDropStatement;
-import edu.uci.ics.asterix.aql.expression.FieldAccessor;
-import edu.uci.ics.asterix.aql.expression.FieldBinding;
-import edu.uci.ics.asterix.aql.expression.ForClause;
-import edu.uci.ics.asterix.aql.expression.FunctionDecl;
-import edu.uci.ics.asterix.aql.expression.FunctionDropStatement;
-import edu.uci.ics.asterix.aql.expression.GbyVariableExpressionPair;
-import edu.uci.ics.asterix.aql.expression.GroupbyClause;
-import edu.uci.ics.asterix.aql.expression.Identifier;
-import edu.uci.ics.asterix.aql.expression.IfExpr;
-import edu.uci.ics.asterix.aql.expression.IndexAccessor;
-import edu.uci.ics.asterix.aql.expression.IndexDropStatement;
-import edu.uci.ics.asterix.aql.expression.InsertStatement;
-import edu.uci.ics.asterix.aql.expression.JoinClause;
-import edu.uci.ics.asterix.aql.expression.LetClause;
-import edu.uci.ics.asterix.aql.expression.LimitClause;
-import edu.uci.ics.asterix.aql.expression.ListConstructor;
-import edu.uci.ics.asterix.aql.expression.ListConstructor.Type;
-import edu.uci.ics.asterix.aql.expression.LiteralExpr;
-import edu.uci.ics.asterix.aql.expression.LoadStatement;
-import edu.uci.ics.asterix.aql.expression.MetaVariableClause;
-import edu.uci.ics.asterix.aql.expression.MetaVariableExpr;
-import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
-import edu.uci.ics.asterix.aql.expression.NodegroupDecl;
-import edu.uci.ics.asterix.aql.expression.OperatorExpr;
-import edu.uci.ics.asterix.aql.expression.OperatorType;
-import edu.uci.ics.asterix.aql.expression.OrderbyClause;
-import edu.uci.ics.asterix.aql.expression.OrderbyClause.OrderModifier;
-import edu.uci.ics.asterix.aql.expression.OrderedListTypeDefinition;
-import edu.uci.ics.asterix.aql.expression.QuantifiedExpression;
-import edu.uci.ics.asterix.aql.expression.QuantifiedExpression.Quantifier;
-import edu.uci.ics.asterix.aql.expression.QuantifiedPair;
-import edu.uci.ics.asterix.aql.expression.Query;
-import edu.uci.ics.asterix.aql.expression.RecordConstructor;
-import edu.uci.ics.asterix.aql.expression.RecordTypeDefinition;
-import edu.uci.ics.asterix.aql.expression.SetStatement;
-import edu.uci.ics.asterix.aql.expression.TypeDecl;
-import edu.uci.ics.asterix.aql.expression.TypeDropStatement;
-import edu.uci.ics.asterix.aql.expression.TypeReferenceExpression;
-import edu.uci.ics.asterix.aql.expression.UnaryExpr;
-import edu.uci.ics.asterix.aql.expression.UnaryExpr.Sign;
-import edu.uci.ics.asterix.aql.expression.UnionExpr;
-import edu.uci.ics.asterix.aql.expression.UnorderedListTypeDefinition;
-import edu.uci.ics.asterix.aql.expression.UpdateClause;
-import edu.uci.ics.asterix.aql.expression.UpdateStatement;
-import edu.uci.ics.asterix.aql.expression.VariableExpr;
-import edu.uci.ics.asterix.aql.expression.WhereClause;
-import edu.uci.ics.asterix.aql.expression.WriteStatement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlPlusExpressionVisitor;
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.functions.FunctionConstants;
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
-import edu.uci.ics.asterix.common.transactions.JobId;
-import edu.uci.ics.asterix.formats.base.IDataFormat;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.declared.FileSplitDataSink;
-import edu.uci.ics.asterix.metadata.declared.FileSplitSinkId;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.functions.AsterixFunctionInfo;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.translator.CompiledStatements.ICompiledDmlStatement;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.Counter;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.OperatorAnnotations;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression.FunctionKind;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.BroadcastExpressionAnnotation;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.BroadcastExpressionAnnotation.BroadcastSide;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
-import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
-
-/**
- * Each visit returns a pair of an operator and a variable. The variable
- * corresponds to the new column, if any, added to the tuple flow. E.g., for
- * Unnest, the column is the variable bound to the elements in the list, for
- * Subplan it is null. The first argument of a visit method is the expression
- * which is translated. The second argument of a visit method is the tuple
- * source for the current subtree.
- */
-
-public class AqlPlusExpressionToPlanTranslator extends AbstractAqlTranslator implements
-        IAqlPlusExpressionVisitor<Pair<ILogicalOperator, LogicalVariable>, Mutable<ILogicalOperator>> {
-
-    private static final Logger LOGGER = Logger.getLogger(AqlPlusExpressionToPlanTranslator.class.getName());
-
-    private class MetaScopeLogicalVariable {
-        private HashMap<Identifier, LogicalVariable> map = new HashMap<Identifier, LogicalVariable>();
-
-        public VariableReferenceExpression getVariableReferenceExpression(Identifier id) throws AsterixException {
-            LogicalVariable var = map.get(id);
-            LOGGER.fine("get:" + id + ":" + var);
-            if (var == null) {
-                throw new AsterixException("Identifier " + id + " not found in AQL+ meta-scope.");
-            }
-            return new VariableReferenceExpression(var);
-        }
-
-        public void put(Identifier id, LogicalVariable var) {
-            LOGGER.fine("put:" + id + ":" + var);
-            map.put(id, var);
-        }
-    }
-
-    private class MetaScopeILogicalOperator {
-        private HashMap<Identifier, ILogicalOperator> map = new HashMap<Identifier, ILogicalOperator>();
-
-        public ILogicalOperator get(Identifier id) throws AsterixException {
-            ILogicalOperator op = map.get(id);
-            if (op == null) {
-                throw new AsterixException("Identifier " + id + " not found in AQL+ meta-scope.");
-            }
-            return op;
-        }
-
-        public void put(Identifier id, ILogicalOperator op) {
-            LOGGER.fine("put:" + id + ":" + op);
-            map.put(id, op);
-        }
-    }
-
-    private final JobId jobId;
-    private TranslationContext context;
-    private String outputDatasetName;
-    private ICompiledDmlStatement stmt;
-    private AqlMetadataProvider metadataProvider;
-
-    private MetaScopeLogicalVariable metaScopeExp = new MetaScopeLogicalVariable();
-    private MetaScopeILogicalOperator metaScopeOp = new MetaScopeILogicalOperator();
-    private static LogicalVariable METADATA_DUMMY_VAR = new LogicalVariable(-1);
-
-    public AqlPlusExpressionToPlanTranslator(JobId jobId, AqlMetadataProvider metadataProvider,
-            Counter currentVarCounter, String outputDatasetName, ICompiledDmlStatement stmt) {
-        this.jobId = jobId;
-        this.metadataProvider = metadataProvider;
-        this.context = new TranslationContext(currentVarCounter);
-        this.outputDatasetName = outputDatasetName;
-        this.stmt = stmt;
-        this.context.setTopFlwor(false);
-    }
-
-    public int getVarCounter() {
-        return context.getVarCounter();
-    }
-
-    public ILogicalPlan translate(Query expr) throws AlgebricksException, AsterixException {
-        return translate(expr, null);
-    }
-
-    public ILogicalPlan translate(Query expr, AqlMetadataProvider metadata) throws AlgebricksException,
-            AsterixException {
-        IDataFormat format = metadata.getFormat();
-        if (format == null) {
-            throw new AlgebricksException("Data format has not been set.");
-        }
-        format.registerRuntimeFunctions();
-        Pair<ILogicalOperator, LogicalVariable> p = expr.accept(this, new MutableObject<ILogicalOperator>(
-                new EmptyTupleSourceOperator()));
-
-        ArrayList<Mutable<ILogicalOperator>> globalPlanRoots = new ArrayList<Mutable<ILogicalOperator>>();
-
-        boolean isTransactionalWrite = false;
-        ILogicalOperator topOp = p.first;
-        ProjectOperator project = (ProjectOperator) topOp;
-        LogicalVariable resVar = project.getVariables().get(0);
-        if (outputDatasetName == null) {
-            List<Mutable<ILogicalExpression>> writeExprList = new ArrayList<Mutable<ILogicalExpression>>(1);
-            writeExprList.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(resVar)));
-            FileSplitSinkId fssi = new FileSplitSinkId(metadata.getOutputFile());
-            FileSplitDataSink sink = new FileSplitDataSink(fssi, null);
-            topOp = new WriteOperator(writeExprList, sink);
-            topOp.getInputs().add(new MutableObject<ILogicalOperator>(project));
-        } else {
-            Dataset dataset = metadata.findDataset(stmt.getDataverseName(), outputDatasetName);
-            if (dataset == null) {
-                throw new AlgebricksException("Cannot find dataset " + outputDatasetName);
-            }
-            if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
-                throw new AlgebricksException("Cannot write output to an external dataset.");
-            }
-            ARecordType itemType = (ARecordType) metadata.findType(dataset.getDataverseName(),
-                    dataset.getItemTypeName());
-            List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
-            ArrayList<LogicalVariable> vars = new ArrayList<LogicalVariable>();
-            ArrayList<Mutable<ILogicalExpression>> exprs = new ArrayList<Mutable<ILogicalExpression>>();
-            List<Mutable<ILogicalExpression>> varRefsForLoading = new ArrayList<Mutable<ILogicalExpression>>();
-            for (List<String> partitioningKey : partitioningKeys) {
-                Triple<ICopyEvaluatorFactory, ScalarFunctionCallExpression, IAType> partitioner = format
-                        .partitioningEvaluatorFactory(itemType, partitioningKey);
-                AbstractFunctionCallExpression f = partitioner.second.cloneExpression();
-                f.substituteVar(METADATA_DUMMY_VAR, resVar);
-                exprs.add(new MutableObject<ILogicalExpression>(f));
-                LogicalVariable v = context.newVar();
-                vars.add(v);
-                varRefsForLoading.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(v)));
-            }
-            AssignOperator assign = new AssignOperator(vars, exprs);
-            assign.getInputs().add(new MutableObject<ILogicalOperator>(project));
-        }
-
-        globalPlanRoots.add(new MutableObject<ILogicalOperator>(topOp));
-        ILogicalPlan plan = new ALogicalPlanImpl(globalPlanRoots);
-        return plan;
-    }
-
-    public ILogicalPlan translate(List<Clause> clauses) throws AlgebricksException, AsterixException {
-
-        if (clauses == null) {
-            return null;
-        }
-
-        Mutable<ILogicalOperator> opRef = new MutableObject<ILogicalOperator>(new EmptyTupleSourceOperator());
-        Pair<ILogicalOperator, LogicalVariable> p = null;
-        for (Clause c : clauses) {
-            p = c.accept(this, opRef);
-            opRef = new MutableObject<ILogicalOperator>(p.first);
-        }
-
-        ArrayList<Mutable<ILogicalOperator>> globalPlanRoots = new ArrayList<Mutable<ILogicalOperator>>();
-
-        ILogicalOperator topOp = p.first;
-
-        globalPlanRoots.add(new MutableObject<ILogicalOperator>(topOp));
-        ILogicalPlan plan = new ALogicalPlanImpl(globalPlanRoots);
-        return plan;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitForClause(ForClause fc, Mutable<ILogicalOperator> tupSource)
-            throws AsterixException {
-        LogicalVariable v = context.newVar(fc.getVarExpr());
-
-        Expression inExpr = fc.getInExpr();
-        Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(inExpr, tupSource);
-        ILogicalOperator returnedOp;
-
-        if (fc.getPosVarExpr() == null) {
-            returnedOp = new UnnestOperator(v, new MutableObject<ILogicalExpression>(makeUnnestExpression(eo.first)));
-        } else {
-            LogicalVariable pVar = context.newVar(fc.getPosVarExpr());
-            returnedOp = new UnnestOperator(v, new MutableObject<ILogicalExpression>(makeUnnestExpression(eo.first)),
-                    pVar, BuiltinType.AINT32, new AqlPositionWriter());
-        }
-        returnedOp.getInputs().add(eo.second);
-
-        return new Pair<ILogicalOperator, LogicalVariable>(returnedOp, v);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitLetClause(LetClause lc, Mutable<ILogicalOperator> tupSource)
-            throws AsterixException {
-        LogicalVariable v;
-        ILogicalOperator returnedOp;
-
-        switch (lc.getBindingExpr().getKind()) {
-            case VARIABLE_EXPRESSION: {
-                v = context.newVar(lc.getVarExpr());
-                LogicalVariable prev = context.getVar(((VariableExpr) lc.getBindingExpr()).getVar().getId());
-                returnedOp = new AssignOperator(v, new MutableObject<ILogicalExpression>(
-                        new VariableReferenceExpression(prev)));
-                returnedOp.getInputs().add(tupSource);
-                break;
-            }
-            default: {
-                Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(lc.getBindingExpr(),
-                        tupSource);
-                v = context.newVar(lc.getVarExpr());
-                returnedOp = new AssignOperator(v, new MutableObject<ILogicalExpression>(eo.first));
-                returnedOp.getInputs().add(eo.second);
-                break;
-            }
-        }
-        return new Pair<ILogicalOperator, LogicalVariable>(returnedOp, v);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitFlworExpression(FLWOGRExpression flwor,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        Mutable<ILogicalOperator> flworPlan = tupSource;
-        boolean isTop = context.isTopFlwor();
-        if (isTop) {
-            context.setTopFlwor(false);
-        }
-        for (Clause c : flwor.getClauseList()) {
-            Pair<ILogicalOperator, LogicalVariable> pC = c.accept(this, flworPlan);
-            flworPlan = new MutableObject<ILogicalOperator>(pC.first);
-        }
-
-        Expression r = flwor.getReturnExpr();
-        boolean noFlworClause = flwor.noForClause();
-
-        if (r.getKind() == Kind.VARIABLE_EXPRESSION) {
-            VariableExpr v = (VariableExpr) r;
-            LogicalVariable var = context.getVar(v.getVar().getId());
-
-            return produceFlwrResult(noFlworClause, isTop, flworPlan, var);
-
-        } else {
-            Mutable<ILogicalOperator> baseOp = new MutableObject<ILogicalOperator>(flworPlan.getValue());
-            Pair<ILogicalOperator, LogicalVariable> rRes = r.accept(this, baseOp);
-            ILogicalOperator rOp = rRes.first;
-            ILogicalOperator resOp;
-            if (expressionNeedsNoNesting(r)) {
-                baseOp.setValue(flworPlan.getValue());
-                resOp = rOp;
-            } else {
-                SubplanOperator s = new SubplanOperator(rOp);
-                s.getInputs().add(flworPlan);
-                resOp = s;
-                baseOp.setValue(new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(s)));
-            }
-            Mutable<ILogicalOperator> resOpRef = new MutableObject<ILogicalOperator>(resOp);
-            return produceFlwrResult(noFlworClause, isTop, resOpRef, rRes.second);
-        }
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitFieldAccessor(FieldAccessor fa,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(fa.getExpr(), tupSource);
-        LogicalVariable v = context.newVar();
-        AbstractFunctionCallExpression fldAccess = new ScalarFunctionCallExpression(
-                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME));
-        fldAccess.getArguments().add(new MutableObject<ILogicalExpression>(p.first));
-        ILogicalExpression faExpr = new ConstantExpression(new AsterixConstantValue(new AString(fa.getIdent()
-                .getValue())));
-        fldAccess.getArguments().add(new MutableObject<ILogicalExpression>(faExpr));
-        AssignOperator a = new AssignOperator(v, new MutableObject<ILogicalExpression>(fldAccess));
-        a.getInputs().add(p.second);
-        return new Pair<ILogicalOperator, LogicalVariable>(a, v);
-
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitIndexAccessor(IndexAccessor ia,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(ia.getExpr(), tupSource);
-        LogicalVariable v = context.newVar();
-        AbstractFunctionCallExpression f;
-        if (ia.isAny()) {
-            f = new ScalarFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.ANY_COLLECTION_MEMBER));
-            f.getArguments().add(new MutableObject<ILogicalExpression>(p.first));
-        } else {
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> indexPair = aqlExprToAlgExpression(ia.getIndexExpr(),
-                    tupSource);
-            f = new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM));
-            f.getArguments().add(new MutableObject<ILogicalExpression>(p.first));
-            f.getArguments().add(new MutableObject<ILogicalExpression>(indexPair.first));
-        }
-        AssignOperator a = new AssignOperator(v, new MutableObject<ILogicalExpression>(f));
-        a.getInputs().add(p.second);
-        return new Pair<ILogicalOperator, LogicalVariable>(a, v);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitCallExpr(CallExpr fcall, Mutable<ILogicalOperator> tupSource)
-            throws AsterixException {
-        LogicalVariable v = context.newVar();
-        FunctionSignature signature = fcall.getFunctionSignature();
-        List<Mutable<ILogicalExpression>> args = new ArrayList<Mutable<ILogicalExpression>>();
-        Mutable<ILogicalOperator> topOp = tupSource;
-
-        for (Expression expr : fcall.getExprList()) {
-            switch (expr.getKind()) {
-                case VARIABLE_EXPRESSION: {
-                    LogicalVariable var = context.getVar(((VariableExpr) expr).getVar().getId());
-                    args.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(var)));
-                    break;
-                }
-                case LITERAL_EXPRESSION: {
-                    LiteralExpr val = (LiteralExpr) expr;
-                    args.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
-                            ConstantHelper.objectFromLiteral(val.getValue())))));
-                    break;
-                }
-                default: {
-                    Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(expr, topOp);
-                    AbstractLogicalOperator o1 = (AbstractLogicalOperator) eo.second.getValue();
-                    args.add(new MutableObject<ILogicalExpression>(eo.first));
-                    if (o1 != null && !(o1.getOperatorTag() == LogicalOperatorTag.ASSIGN && hasOnlyChild(o1, topOp))) {
-                        topOp = eo.second;
-                    }
-                    break;
-                }
-            }
-        }
-
-        FunctionIdentifier fi = new FunctionIdentifier(AlgebricksBuiltinFunctions.ALGEBRICKS_NS, signature.getName());
-        AsterixFunctionInfo afi = AsterixBuiltinFunctions.lookupFunction(fi);
-        FunctionIdentifier builtinAquafi = afi == null ? null : afi.getFunctionIdentifier();
-
-        if (builtinAquafi != null) {
-            fi = builtinAquafi;
-        } else {
-            fi = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, signature.getName());
-            FunctionIdentifier builtinAsterixFi = AsterixBuiltinFunctions.getBuiltinFunctionIdentifier(fi);
-            if (builtinAsterixFi != null) {
-                fi = builtinAsterixFi;
-            }
-        }
-        AbstractFunctionCallExpression f;
-        if (AsterixBuiltinFunctions.isBuiltinAggregateFunction(fi)) {
-            f = AsterixBuiltinFunctions.makeAggregateFunctionExpression(fi, args);
-        } else if (AsterixBuiltinFunctions.isBuiltinUnnestingFunction(fi)) {
-            UnnestingFunctionCallExpression ufce = new UnnestingFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(fi), args);
-            ufce.setReturnsUniqueValues(AsterixBuiltinFunctions.returnsUniqueValues(fi));
-            f = ufce;
-        } else {
-            f = new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(fi), args);
-        }
-        AssignOperator op = new AssignOperator(v, new MutableObject<ILogicalExpression>(f));
-        if (topOp != null) {
-            op.getInputs().add(topOp);
-        }
-
-        return new Pair<ILogicalOperator, LogicalVariable>(op, v);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitFunctionDecl(FunctionDecl fd,
-            Mutable<ILogicalOperator> tupSource) {
-        // TODO Auto-generated method stub
-        throw new NotImplementedException();
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitGroupbyClause(GroupbyClause gc,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        GroupByOperator gOp = new GroupByOperator();
-        Mutable<ILogicalOperator> topOp = tupSource;
-        for (GbyVariableExpressionPair ve : gc.getGbyPairList()) {
-            LogicalVariable v;
-            VariableExpr vexpr = ve.getVar();
-            if (vexpr != null) {
-                v = context.newVar(vexpr);
-            } else {
-                v = context.newVar();
-            }
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(ve.getExpr(), topOp);
-            gOp.addGbyExpression(v, eo.first);
-            topOp = eo.second;
-        }
-        for (GbyVariableExpressionPair ve : gc.getDecorPairList()) {
-            LogicalVariable v;
-            VariableExpr vexpr = ve.getVar();
-            if (vexpr != null) {
-                v = context.newVar(vexpr);
-            } else {
-                v = context.newVar();
-            }
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(ve.getExpr(), topOp);
-            gOp.addDecorExpression(v, eo.first);
-            topOp = eo.second;
-        }
-        gOp.getInputs().add(topOp);
-
-        for (VariableExpr var : gc.getWithVarList()) {
-            LogicalVariable aggVar = context.newVar();
-            LogicalVariable oldVar = context.getVar(var);
-            List<Mutable<ILogicalExpression>> flArgs = new ArrayList<Mutable<ILogicalExpression>>(1);
-            flArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(oldVar)));
-            AggregateFunctionCallExpression fListify = AsterixBuiltinFunctions.makeAggregateFunctionExpression(
-                    AsterixBuiltinFunctions.LISTIFY, flArgs);
-            AggregateOperator agg = new AggregateOperator(mkSingletonArrayList(aggVar),
-                    (List) mkSingletonArrayList(new MutableObject<ILogicalExpression>(fListify)));
-            agg.getInputs().add(
-                    new MutableObject<ILogicalOperator>(new NestedTupleSourceOperator(
-                            new MutableObject<ILogicalOperator>(gOp))));
-            ILogicalPlan plan = new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(agg));
-            gOp.getNestedPlans().add(plan);
-            // Hide the variable that was part of the "with", replacing it with
-            // the one bound by the aggregation op.
-            context.setVar(var, aggVar);
-        }
-
-        gOp.getAnnotations().put(OperatorAnnotations.USE_HASH_GROUP_BY, gc.hasHashGroupByHint());
-        return new Pair<ILogicalOperator, LogicalVariable>(gOp, null);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitIfExpr(IfExpr ifexpr, Mutable<ILogicalOperator> tupSource)
-            throws AsterixException {
-        // In the most general case, IfThenElse is translated in the following
-        // way.
-        //
-        // We assign the result of the condition to one variable varCond.
-        // We create one subplan which contains the plan for the "then" branch,
-        // on top of which there is a selection whose condition is varCond.
-        // Similarly, we create one subplan for the "else" branch, in which the
-        // selection is not(varCond).
-        // Finally, we concatenate the results. (??)
-
-        Pair<ILogicalOperator, LogicalVariable> pCond = ifexpr.getCondExpr().accept(this, tupSource);
-        ILogicalOperator opCond = pCond.first;
-        LogicalVariable varCond = pCond.second;
-
-        SubplanOperator sp = new SubplanOperator();
-        Mutable<ILogicalOperator> nestedSource = new MutableObject<ILogicalOperator>(new NestedTupleSourceOperator(
-                new MutableObject<ILogicalOperator>(sp)));
-
-        Pair<ILogicalOperator, LogicalVariable> pThen = ifexpr.getThenExpr().accept(this, nestedSource);
-        SelectOperator sel1 = new SelectOperator(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
-                varCond)), false, null);
-        sel1.getInputs().add(new MutableObject<ILogicalOperator>(pThen.first));
-
-        Pair<ILogicalOperator, LogicalVariable> pElse = ifexpr.getElseExpr().accept(this, nestedSource);
-        AbstractFunctionCallExpression notVarCond = new ScalarFunctionCallExpression(
-                FunctionUtils.getFunctionInfo(AlgebricksBuiltinFunctions.NOT), new MutableObject<ILogicalExpression>(
-                        new VariableReferenceExpression(varCond)));
-        SelectOperator sel2 = new SelectOperator(new MutableObject<ILogicalExpression>(notVarCond), false, null);
-        sel2.getInputs().add(new MutableObject<ILogicalOperator>(pElse.first));
-
-        ILogicalPlan p1 = new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(sel1));
-        sp.getNestedPlans().add(p1);
-        ILogicalPlan p2 = new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(sel2));
-        sp.getNestedPlans().add(p2);
-
-        Mutable<ILogicalOperator> opCondRef = new MutableObject<ILogicalOperator>(opCond);
-        sp.getInputs().add(opCondRef);
-
-        LogicalVariable resV = context.newVar();
-        AbstractFunctionCallExpression concatNonNull = new ScalarFunctionCallExpression(
-                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.CONCAT_NON_NULL),
-                new MutableObject<ILogicalExpression>(new VariableReferenceExpression(pThen.second)),
-                new MutableObject<ILogicalExpression>(new VariableReferenceExpression(pElse.second)));
-        AssignOperator a = new AssignOperator(resV, new MutableObject<ILogicalExpression>(concatNonNull));
-        a.getInputs().add(new MutableObject<ILogicalOperator>(sp));
-
-        return new Pair<ILogicalOperator, LogicalVariable>(a, resV);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitLiteralExpr(LiteralExpr l, Mutable<ILogicalOperator> tupSource) {
-        LogicalVariable var = context.newVar();
-        AssignOperator a = new AssignOperator(var, new MutableObject<ILogicalExpression>(new ConstantExpression(
-                new AsterixConstantValue(ConstantHelper.objectFromLiteral(l.getValue())))));
-        if (tupSource != null) {
-            a.getInputs().add(tupSource);
-        }
-        return new Pair<ILogicalOperator, LogicalVariable>(a, var);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitOperatorExpr(OperatorExpr op,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        ArrayList<OperatorType> ops = op.getOpList();
-        int nOps = ops.size();
-
-        if (nOps > 0 && (ops.get(0) == OperatorType.AND || ops.get(0) == OperatorType.OR)) {
-            return visitAndOrOperator(op, tupSource);
-        }
-
-        ArrayList<Expression> exprs = op.getExprList();
-
-        Mutable<ILogicalOperator> topOp = tupSource;
-
-        ILogicalExpression currExpr = null;
-        for (int i = 0; i <= nOps; i++) {
-
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(exprs.get(i), topOp);
-            topOp = p.second;
-            ILogicalExpression e = p.first;
-            // now look at the operator
-            if (i < nOps) {
-                if (OperatorExpr.opIsComparison(ops.get(i))) {
-                    AbstractFunctionCallExpression c = createComparisonExpression(ops.get(i));
-
-                    // chain the operators
-                    if (i == 0) {
-                        c.getArguments().add(new MutableObject<ILogicalExpression>(e));
-                        currExpr = c;
-                        if (op.isBroadcastOperand(i)) {
-                            BroadcastExpressionAnnotation bcast = new BroadcastExpressionAnnotation();
-                            bcast.setObject(BroadcastSide.LEFT);
-                            c.getAnnotations().put(BroadcastExpressionAnnotation.BROADCAST_ANNOTATION_KEY, bcast);
-                        }
-                    } else {
-                        ((AbstractFunctionCallExpression) currExpr).getArguments().add(
-                                new MutableObject<ILogicalExpression>(e));
-                        c.getArguments().add(new MutableObject<ILogicalExpression>(currExpr));
-                        currExpr = c;
-                        if (i == 1 && op.isBroadcastOperand(i)) {
-                            BroadcastExpressionAnnotation bcast = new BroadcastExpressionAnnotation();
-                            bcast.setObject(BroadcastSide.RIGHT);
-                            c.getAnnotations().put(BroadcastExpressionAnnotation.BROADCAST_ANNOTATION_KEY, bcast);
-                        }
-                    }
-                } else {
-                    AbstractFunctionCallExpression f = createFunctionCallExpressionForBuiltinOperator(ops.get(i));
-
-                    if (i == 0) {
-                        f.getArguments().add(new MutableObject<ILogicalExpression>(e));
-                        currExpr = f;
-                    } else {
-                        ((AbstractFunctionCallExpression) currExpr).getArguments().add(
-                                new MutableObject<ILogicalExpression>(e));
-                        f.getArguments().add(new MutableObject<ILogicalExpression>(currExpr));
-                        currExpr = f;
-                    }
-                }
-            } else { // don't forget the last expression...
-                ((AbstractFunctionCallExpression) currExpr).getArguments()
-                        .add(new MutableObject<ILogicalExpression>(e));
-                if (i == 1 && op.isBroadcastOperand(i)) {
-                    BroadcastExpressionAnnotation bcast = new BroadcastExpressionAnnotation();
-                    bcast.setObject(BroadcastSide.RIGHT);
-                    ((AbstractFunctionCallExpression) currExpr).getAnnotations().put(
-                            BroadcastExpressionAnnotation.BROADCAST_ANNOTATION_KEY, bcast);
-                }
-            }
-        }
-
-        LogicalVariable assignedVar = context.newVar();
-        AssignOperator a = new AssignOperator(assignedVar, new MutableObject<ILogicalExpression>(currExpr));
-
-        a.getInputs().add(topOp);
-
-        return new Pair<ILogicalOperator, LogicalVariable>(a, assignedVar);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitOrderbyClause(OrderbyClause oc,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-
-        OrderOperator ord = new OrderOperator();
-        Iterator<OrderModifier> modifIter = oc.getModifierList().iterator();
-        Mutable<ILogicalOperator> topOp = tupSource;
-        for (Expression e : oc.getOrderbyList()) {
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(e, topOp);
-            OrderModifier m = modifIter.next();
-            OrderOperator.IOrder comp = (m == OrderModifier.ASC) ? OrderOperator.ASC_ORDER : OrderOperator.DESC_ORDER;
-            ord.getOrderExpressions()
-                    .add(new Pair<IOrder, Mutable<ILogicalExpression>>(comp, new MutableObject<ILogicalExpression>(
-                            p.first)));
-            topOp = p.second;
-        }
-        ord.getInputs().add(topOp);
-        if (oc.getNumTuples() > 0) {
-            ord.getAnnotations().put(OperatorAnnotations.CARDINALITY, oc.getNumTuples());
-        }
-        if (oc.getNumFrames() > 0) {
-            ord.getAnnotations().put(OperatorAnnotations.MAX_NUMBER_FRAMES, oc.getNumFrames());
-        }
-        return new Pair<ILogicalOperator, LogicalVariable>(ord, null);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitQuantifiedExpression(QuantifiedExpression qe,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        Mutable<ILogicalOperator> topOp = tupSource;
-
-        ILogicalOperator firstOp = null;
-        Mutable<ILogicalOperator> lastOp = null;
-
-        for (QuantifiedPair qt : qe.getQuantifiedList()) {
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo1 = aqlExprToAlgExpression(qt.getExpr(), topOp);
-            topOp = eo1.second;
-            LogicalVariable uVar = context.newVar(qt.getVarExpr());
-            ILogicalOperator u = new UnnestOperator(uVar, new MutableObject<ILogicalExpression>(
-                    makeUnnestExpression(eo1.first)));
-
-            if (firstOp == null) {
-                firstOp = u;
-            }
-            if (lastOp != null) {
-                u.getInputs().add(lastOp);
-            }
-            lastOp = new MutableObject<ILogicalOperator>(u);
-        }
-
-        // We make all the unnest correspond. to quantif. vars. sit on top
-        // in the hope of enabling joins & other optimiz.
-        firstOp.getInputs().add(topOp);
-        topOp = lastOp;
-
-        Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo2 = aqlExprToAlgExpression(qe.getSatisfiesExpr(), topOp);
-
-        AggregateFunctionCallExpression fAgg;
-        SelectOperator s;
-        if (qe.getQuantifier() == Quantifier.SOME) {
-            s = new SelectOperator(new MutableObject<ILogicalExpression>(eo2.first), false, null);
-            s.getInputs().add(eo2.second);
-            fAgg = AsterixBuiltinFunctions.makeAggregateFunctionExpression(AsterixBuiltinFunctions.NON_EMPTY_STREAM,
-                    new ArrayList<Mutable<ILogicalExpression>>());
-        } else { // EVERY
-            List<Mutable<ILogicalExpression>> satExprList = new ArrayList<Mutable<ILogicalExpression>>(1);
-            satExprList.add(new MutableObject<ILogicalExpression>(eo2.first));
-            s = new SelectOperator(new MutableObject<ILogicalExpression>(new ScalarFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AlgebricksBuiltinFunctions.NOT), satExprList)), false, null);
-            s.getInputs().add(eo2.second);
-            fAgg = AsterixBuiltinFunctions.makeAggregateFunctionExpression(AsterixBuiltinFunctions.EMPTY_STREAM,
-                    new ArrayList<Mutable<ILogicalExpression>>());
-        }
-        LogicalVariable qeVar = context.newVar();
-        AggregateOperator a = new AggregateOperator(mkSingletonArrayList(qeVar),
-                (List) mkSingletonArrayList(new MutableObject<ILogicalExpression>(fAgg)));
-        a.getInputs().add(new MutableObject<ILogicalOperator>(s));
-        return new Pair<ILogicalOperator, LogicalVariable>(a, qeVar);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitQuery(Query q, Mutable<ILogicalOperator> tupSource)
-            throws AsterixException {
-        return q.getBody().accept(this, tupSource);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitRecordConstructor(RecordConstructor rc,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        AbstractFunctionCallExpression f = new ScalarFunctionCallExpression(
-                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR));
-        LogicalVariable v1 = context.newVar();
-        AssignOperator a = new AssignOperator(v1, new MutableObject<ILogicalExpression>(f));
-        Mutable<ILogicalOperator> topOp = tupSource;
-        for (FieldBinding fb : rc.getFbList()) {
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo1 = aqlExprToAlgExpression(fb.getLeftExpr(), topOp);
-            f.getArguments().add(new MutableObject<ILogicalExpression>(eo1.first));
-            topOp = eo1.second;
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo2 = aqlExprToAlgExpression(fb.getRightExpr(), topOp);
-            f.getArguments().add(new MutableObject<ILogicalExpression>(eo2.first));
-            topOp = eo2.second;
-        }
-        a.getInputs().add(topOp);
-        return new Pair<ILogicalOperator, LogicalVariable>(a, v1);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitListConstructor(ListConstructor lc,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        FunctionIdentifier fid = (lc.getType() == Type.ORDERED_LIST_CONSTRUCTOR) ? AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR
-                : AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR;
-        AbstractFunctionCallExpression f = new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(fid));
-        LogicalVariable v1 = context.newVar();
-        AssignOperator a = new AssignOperator(v1, new MutableObject<ILogicalExpression>(f));
-        Mutable<ILogicalOperator> topOp = tupSource;
-        for (Expression expr : lc.getExprList()) {
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(expr, topOp);
-            f.getArguments().add(new MutableObject<ILogicalExpression>(eo.first));
-            topOp = eo.second;
-        }
-        a.getInputs().add(topOp);
-        return new Pair<ILogicalOperator, LogicalVariable>(a, v1);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitUnaryExpr(UnaryExpr u, Mutable<ILogicalOperator> tupSource)
-            throws AsterixException {
-        Expression expr = u.getExpr();
-        Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(expr, tupSource);
-        LogicalVariable v1 = context.newVar();
-        AssignOperator a;
-        if (u.getSign() == Sign.POSITIVE) {
-            a = new AssignOperator(v1, new MutableObject<ILogicalExpression>(eo.first));
-        } else {
-            AbstractFunctionCallExpression m = new ScalarFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.NUMERIC_UNARY_MINUS));
-            m.getArguments().add(new MutableObject<ILogicalExpression>(eo.first));
-            a = new AssignOperator(v1, new MutableObject<ILogicalExpression>(m));
-        }
-        a.getInputs().add(eo.second);
-        return new Pair<ILogicalOperator, LogicalVariable>(a, v1);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitVariableExpr(VariableExpr v, Mutable<ILogicalOperator> tupSource) {
-        // Should we ever get to this method?
-        LogicalVariable var = context.newVar();
-        LogicalVariable oldV = context.getVar(v.getVar().getId());
-        AssignOperator a = new AssignOperator(var, new MutableObject<ILogicalExpression>(
-                new VariableReferenceExpression(oldV)));
-        a.getInputs().add(tupSource);
-        return new Pair<ILogicalOperator, LogicalVariable>(a, var);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitWhereClause(WhereClause w, Mutable<ILogicalOperator> tupSource)
-            throws AsterixException {
-        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(w.getWhereExpr(), tupSource);
-        SelectOperator s = new SelectOperator(new MutableObject<ILogicalExpression>(p.first), false, null);
-        s.getInputs().add(p.second);
-
-        return new Pair<ILogicalOperator, LogicalVariable>(s, null);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitLimitClause(LimitClause lc, Mutable<ILogicalOperator> tupSource)
-            throws AsterixException {
-        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p1 = aqlExprToAlgExpression(lc.getLimitExpr(), tupSource);
-        LimitOperator opLim;
-        Expression offset = lc.getOffset();
-        if (offset != null) {
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p2 = aqlExprToAlgExpression(offset, p1.second);
-            opLim = new LimitOperator(p1.first, p2.first);
-            opLim.getInputs().add(p2.second);
-        } else {
-            opLim = new LimitOperator(p1.first);
-            opLim.getInputs().add(p1.second);
-        }
-        return new Pair<ILogicalOperator, LogicalVariable>(opLim, null);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitDistinctClause(DistinctClause dc,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        List<Mutable<ILogicalExpression>> exprList = new ArrayList<Mutable<ILogicalExpression>>();
-        Mutable<ILogicalOperator> input = null;
-        for (Expression expr : dc.getDistinctByExpr()) {
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(expr, tupSource);
-            exprList.add(new MutableObject<ILogicalExpression>(p.first));
-            input = p.second;
-        }
-        DistinctOperator opDistinct = new DistinctOperator(exprList);
-        opDistinct.getInputs().add(input);
-        return new Pair<ILogicalOperator, LogicalVariable>(opDistinct, null);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitUnionExpr(UnionExpr unionExpr,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        Mutable<ILogicalOperator> ts = tupSource;
-        ILogicalOperator lastOp = null;
-        LogicalVariable lastVar = null;
-        boolean first = true;
-        for (Expression e : unionExpr.getExprs()) {
-            if (first) {
-                first = false;
-            } else {
-                ts = new MutableObject<ILogicalOperator>(new EmptyTupleSourceOperator());
-            }
-            Pair<ILogicalOperator, LogicalVariable> p1 = e.accept(this, ts);
-            if (lastOp == null) {
-                lastOp = p1.first;
-                lastVar = p1.second;
-            } else {
-                LogicalVariable unnestVar1 = context.newVar();
-                UnnestOperator unnest1 = new UnnestOperator(unnestVar1, new MutableObject<ILogicalExpression>(
-                        makeUnnestExpression(new VariableReferenceExpression(lastVar))));
-                unnest1.getInputs().add(new MutableObject<ILogicalOperator>(lastOp));
-                LogicalVariable unnestVar2 = context.newVar();
-                UnnestOperator unnest2 = new UnnestOperator(unnestVar2, new MutableObject<ILogicalExpression>(
-                        makeUnnestExpression(new VariableReferenceExpression(p1.second))));
-                unnest2.getInputs().add(new MutableObject<ILogicalOperator>(p1.first));
-                List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> varMap = new ArrayList<Triple<LogicalVariable, LogicalVariable, LogicalVariable>>(
-                        1);
-                LogicalVariable resultVar = context.newVar();
-                Triple<LogicalVariable, LogicalVariable, LogicalVariable> triple = new Triple<LogicalVariable, LogicalVariable, LogicalVariable>(
-                        unnestVar1, unnestVar2, resultVar);
-                varMap.add(triple);
-                UnionAllOperator unionOp = new UnionAllOperator(varMap);
-                unionOp.getInputs().add(new MutableObject<ILogicalOperator>(unnest1));
-                unionOp.getInputs().add(new MutableObject<ILogicalOperator>(unnest2));
-                lastVar = resultVar;
-                lastOp = unionOp;
-            }
-        }
-        LogicalVariable aggVar = context.newVar();
-        ArrayList<LogicalVariable> aggregVars = new ArrayList<LogicalVariable>(1);
-        aggregVars.add(aggVar);
-        List<Mutable<ILogicalExpression>> afcExprs = new ArrayList<Mutable<ILogicalExpression>>(1);
-        afcExprs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(lastVar)));
-        AggregateFunctionCallExpression afc = AsterixBuiltinFunctions.makeAggregateFunctionExpression(
-                AsterixBuiltinFunctions.LISTIFY, afcExprs);
-        ArrayList<Mutable<ILogicalExpression>> aggregExprs = new ArrayList<Mutable<ILogicalExpression>>(1);
-        aggregExprs.add(new MutableObject<ILogicalExpression>(afc));
-        AggregateOperator agg = new AggregateOperator(aggregVars, aggregExprs);
-        agg.getInputs().add(new MutableObject<ILogicalOperator>(lastOp));
-        return new Pair<ILogicalOperator, LogicalVariable>(agg, aggVar);
-    }
-
-    private AbstractFunctionCallExpression createComparisonExpression(OperatorType t) {
-        FunctionIdentifier fi = operatorTypeToFunctionIdentifier(t);
-        IFunctionInfo finfo = FunctionUtils.getFunctionInfo(fi);
-        return new ScalarFunctionCallExpression(finfo);
-    }
-
-    private FunctionIdentifier operatorTypeToFunctionIdentifier(OperatorType t) {
-        switch (t) {
-            case EQ: {
-                return AlgebricksBuiltinFunctions.EQ;
-            }
-            case NEQ: {
-                return AlgebricksBuiltinFunctions.NEQ;
-            }
-            case GT: {
-                return AlgebricksBuiltinFunctions.GT;
-            }
-            case GE: {
-                return AlgebricksBuiltinFunctions.GE;
-            }
-            case LT: {
-                return AlgebricksBuiltinFunctions.LT;
-            }
-            case LE: {
-                return AlgebricksBuiltinFunctions.LE;
-            }
-            default: {
-                throw new IllegalStateException();
-            }
-        }
-    }
-
-    private AbstractFunctionCallExpression createFunctionCallExpressionForBuiltinOperator(OperatorType t)
-            throws AsterixException {
-
-        FunctionIdentifier fid = null;
-        switch (t) {
-            case PLUS: {
-                fid = AlgebricksBuiltinFunctions.NUMERIC_ADD;
-                break;
-            }
-            case MINUS: {
-                fid = AsterixBuiltinFunctions.NUMERIC_SUBTRACT;
-                break;
-            }
-            case MUL: {
-                fid = AsterixBuiltinFunctions.NUMERIC_MULTIPLY;
-                break;
-            }
-            case DIV: {
-                fid = AsterixBuiltinFunctions.NUMERIC_DIVIDE;
-                break;
-            }
-            case MOD: {
-                fid = AsterixBuiltinFunctions.NUMERIC_MOD;
-                break;
-            }
-            case IDIV: {
-                fid = AsterixBuiltinFunctions.NUMERIC_IDIV;
-                break;
-            }
-            case CARET: {
-                fid = AsterixBuiltinFunctions.CARET;
-                break;
-            }
-            case AND: {
-                fid = AlgebricksBuiltinFunctions.AND;
-                break;
-            }
-            case OR: {
-                fid = AlgebricksBuiltinFunctions.OR;
-                break;
-            }
-            case FUZZY_EQ: {
-                fid = AsterixBuiltinFunctions.FUZZY_EQ;
-                break;
-            }
-
-            default: {
-                throw new NotImplementedException("Operator " + t + " is not yet implemented");
-            }
-        }
-        return new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(fid));
-    }
-
-    private static boolean hasOnlyChild(ILogicalOperator parent, Mutable<ILogicalOperator> childCandidate) {
-        List<Mutable<ILogicalOperator>> inp = parent.getInputs();
-        if (inp == null || inp.size() != 1) {
-            return false;
-        }
-        return inp.get(0) == childCandidate;
-    }
-
-    private Pair<ILogicalExpression, Mutable<ILogicalOperator>> aqlExprToAlgExpression(Expression expr,
-            Mutable<ILogicalOperator> topOp) throws AsterixException {
-        switch (expr.getKind()) {
-            case VARIABLE_EXPRESSION: {
-                VariableReferenceExpression ve = new VariableReferenceExpression(context.getVar(((VariableExpr) expr)
-                        .getVar().getId()));
-                return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(ve, topOp);
-            }
-            case METAVARIABLE_EXPRESSION: {
-                ILogicalExpression le = metaScopeExp.getVariableReferenceExpression(((VariableExpr) expr).getVar());
-                return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(le, topOp);
-            }
-            case LITERAL_EXPRESSION: {
-                LiteralExpr val = (LiteralExpr) expr;
-                return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(new ConstantExpression(
-                        new AsterixConstantValue(ConstantHelper.objectFromLiteral(val.getValue()))), topOp);
-            }
-            default: {
-                // Mutable<ILogicalExpression> src = new
-                // Mutable<ILogicalExpression>();
-                // Mutable<ILogicalExpression> src = topOp;
-                if (expressionNeedsNoNesting(expr)) {
-                    Pair<ILogicalOperator, LogicalVariable> p = expr.accept(this, topOp);
-                    ILogicalExpression exp = ((AssignOperator) p.first).getExpressions().get(0).getValue();
-                    return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(exp, p.first.getInputs().get(0));
-                } else {
-                    Mutable<ILogicalOperator> src = new MutableObject<ILogicalOperator>();
-
-                    Pair<ILogicalOperator, LogicalVariable> p = expr.accept(this, src);
-
-                    if (((AbstractLogicalOperator) p.first).getOperatorTag() == LogicalOperatorTag.SUBPLAN) {
-                        // src.setOperator(topOp.getOperator());
-                        Mutable<ILogicalOperator> top2 = new MutableObject<ILogicalOperator>(p.first);
-                        return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(new VariableReferenceExpression(
-                                p.second), top2);
-                    } else {
-                        SubplanOperator s = new SubplanOperator();
-                        s.getInputs().add(topOp);
-                        src.setValue(new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(s)));
-                        Mutable<ILogicalOperator> planRoot = new MutableObject<ILogicalOperator>(p.first);
-                        s.setRootOp(planRoot);
-                        return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(new VariableReferenceExpression(
-                                p.second), new MutableObject<ILogicalOperator>(s));
-                    }
-                }
-            }
-        }
-
-    }
-
-    private Pair<ILogicalOperator, LogicalVariable> produceFlwrResult(boolean noForClause, boolean isTop,
-            Mutable<ILogicalOperator> resOpRef, LogicalVariable resVar) {
-        if (isTop) {
-            ProjectOperator pr = new ProjectOperator(resVar);
-            pr.getInputs().add(resOpRef);
-            return new Pair<ILogicalOperator, LogicalVariable>(pr, resVar);
-
-        } else if (noForClause) {
-            return new Pair<ILogicalOperator, LogicalVariable>(resOpRef.getValue(), resVar);
-        } else {
-            return aggListify(resVar, resOpRef, false);
-        }
-    }
-
-    private Pair<ILogicalOperator, LogicalVariable> aggListify(LogicalVariable var, Mutable<ILogicalOperator> opRef,
-            boolean bProject) {
-        AggregateFunctionCallExpression funAgg = AsterixBuiltinFunctions.makeAggregateFunctionExpression(
-                AsterixBuiltinFunctions.LISTIFY, new ArrayList<Mutable<ILogicalExpression>>());
-        funAgg.getArguments().add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(var)));
-        LogicalVariable varListified = context.newVar();
-        AggregateOperator agg = new AggregateOperator(mkSingletonArrayList(varListified),
-                (List) mkSingletonArrayList(new MutableObject<ILogicalExpression>(funAgg)));
-        agg.getInputs().add(opRef);
-        ILogicalOperator res;
-        if (bProject) {
-            ProjectOperator pr = new ProjectOperator(varListified);
-            pr.getInputs().add(new MutableObject<ILogicalOperator>(agg));
-            res = pr;
-        } else {
-            res = agg;
-        }
-        return new Pair<ILogicalOperator, LogicalVariable>(res, varListified);
-    }
-
-    private Pair<ILogicalOperator, LogicalVariable> visitAndOrOperator(OperatorExpr op,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        ArrayList<OperatorType> ops = op.getOpList();
-        int nOps = ops.size();
-
-        ArrayList<Expression> exprs = op.getExprList();
-
-        Mutable<ILogicalOperator> topOp = tupSource;
-
-        OperatorType opLogical = ops.get(0);
-        AbstractFunctionCallExpression f = createFunctionCallExpressionForBuiltinOperator(opLogical);
-
-        for (int i = 0; i <= nOps; i++) {
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(exprs.get(i), topOp);
-            topOp = p.second;
-            // now look at the operator
-            if (i < nOps) {
-                if (ops.get(i) != opLogical) {
-                    throw new TranslationException("Unexpected operator " + ops.get(i)
-                            + " in an OperatorExpr starting with " + opLogical);
-                }
-            }
-            f.getArguments().add(new MutableObject<ILogicalExpression>(p.first));
-        }
-
-        LogicalVariable assignedVar = context.newVar();
-        AssignOperator a = new AssignOperator(assignedVar, new MutableObject<ILogicalExpression>(f));
-        a.getInputs().add(topOp);
-
-        return new Pair<ILogicalOperator, LogicalVariable>(a, assignedVar);
-
-    }
-
-    private static boolean expressionNeedsNoNesting(Expression expr) {
-        Kind k = expr.getKind();
-        return k == Kind.LITERAL_EXPRESSION || k == Kind.LIST_CONSTRUCTOR_EXPRESSION
-                || k == Kind.RECORD_CONSTRUCTOR_EXPRESSION || k == Kind.VARIABLE_EXPRESSION
-                || k == Kind.CALL_EXPRESSION || k == Kind.OP_EXPRESSION || k == Kind.FIELD_ACCESSOR_EXPRESSION
-                || k == Kind.INDEX_ACCESSOR_EXPRESSION || k == Kind.UNARY_EXPRESSION;
-    }
-
-    private <T> ArrayList<T> mkSingletonArrayList(T item) {
-        ArrayList<T> array = new ArrayList<T>(1);
-        array.add(item);
-        return array;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitTypeDecl(TypeDecl td, Mutable<ILogicalOperator> arg)
-            throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitRecordTypeDefiniton(RecordTypeDefinition tre,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitTypeReferenceExpression(TypeReferenceExpression tre,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitNodegroupDecl(NodegroupDecl ngd, Mutable<ILogicalOperator> arg)
-            throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitLoadStatement(LoadStatement stmtLoad,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitDropStatement(DropStatement del, Mutable<ILogicalOperator> arg)
-            throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitCreateIndexStatement(CreateIndexStatement cis,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitOrderedListTypeDefiniton(OrderedListTypeDefinition olte,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitUnorderedListTypeDefiniton(UnorderedListTypeDefinition ulte,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitMetaVariableClause(MetaVariableClause mc,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        return new Pair<ILogicalOperator, LogicalVariable>(metaScopeOp.get(mc.getVar()), null);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitJoinClause(JoinClause jc, Mutable<ILogicalOperator> tupSource)
-            throws AsterixException {
-        // Pair<ILogicalOperator, LogicalVariable> leftSide =
-        // jc.getLeftExpr().accept(this, tupSource);
-        Mutable<ILogicalOperator> opRef = tupSource;
-        Pair<ILogicalOperator, LogicalVariable> leftSide = null;
-        for (Clause c : jc.getLeftClauses()) {
-            leftSide = c.accept(this, opRef);
-            opRef = new MutableObject<ILogicalOperator>(leftSide.first);
-        }
-
-        // Pair<ILogicalOperator, LogicalVariable> rightSide =
-        // jc.getRightExpr().accept(this, tupSource);
-        opRef = tupSource;
-        Pair<ILogicalOperator, LogicalVariable> rightSide = null;
-        for (Clause c : jc.getRightClauses()) {
-            rightSide = c.accept(this, opRef);
-            opRef = new MutableObject<ILogicalOperator>(rightSide.first);
-        }
-
-        Pair<ILogicalExpression, Mutable<ILogicalOperator>> whereCond = aqlExprToAlgExpression(jc.getWhereExpr(),
-                tupSource);
-
-        AbstractBinaryJoinOperator join;
-        switch (jc.getKind()) {
-            case INNER: {
-                join = new InnerJoinOperator(new MutableObject<ILogicalExpression>(whereCond.first));
-                break;
-            }
-            case LEFT_OUTER: {
-                join = new LeftOuterJoinOperator(new MutableObject<ILogicalExpression>(whereCond.first));
-                break;
-            }
-            default: {
-                throw new IllegalStateException();
-            }
-        }
-        join.getInputs().add(new MutableObject<ILogicalOperator>(leftSide.first));
-        join.getInputs().add(new MutableObject<ILogicalOperator>(rightSide.first));
-        return new Pair<ILogicalOperator, LogicalVariable>(join, null);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitMetaVariableExpr(MetaVariableExpr me,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        LogicalVariable var = context.newVar();
-        AssignOperator a = new AssignOperator(var, new MutableObject<ILogicalExpression>(
-                metaScopeExp.getVariableReferenceExpression(me.getVar())));
-        a.getInputs().add(tupSource);
-        return new Pair<ILogicalOperator, LogicalVariable>(a, var);
-    }
-
-    public void addOperatorToMetaScope(Identifier id, ILogicalOperator op) {
-        metaScopeOp.put(id, op);
-    }
-
-    public void addVariableToMetaScope(Identifier id, LogicalVariable var) {
-        metaScopeExp.put(id, var);
-    }
-
-    private ILogicalExpression makeUnnestExpression(ILogicalExpression expr) {
-        switch (expr.getExpressionTag()) {
-            case VARIABLE: {
-                return new UnnestingFunctionCallExpression(
-                        FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.SCAN_COLLECTION),
-                        new MutableObject<ILogicalExpression>(expr));
-            }
-            case FUNCTION_CALL: {
-                AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expr;
-                if (fce.getKind() == FunctionKind.UNNEST) {
-                    return expr;
-                } else {
-                    return new UnnestingFunctionCallExpression(
-                            FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.SCAN_COLLECTION),
-                            new MutableObject<ILogicalExpression>(expr));
-                }
-            }
-            default: {
-                return expr;
-            }
-        }
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitInsertStatement(InsertStatement insert,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitDeleteStatement(DeleteStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitUpdateStatement(UpdateStatement update,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitUpdateClause(UpdateClause del, Mutable<ILogicalOperator> arg)
-            throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitDataverseDecl(DataverseDecl dv, Mutable<ILogicalOperator> arg)
-            throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitDatasetDecl(DatasetDecl dd, Mutable<ILogicalOperator> arg)
-            throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitSetStatement(SetStatement ss, Mutable<ILogicalOperator> arg)
-            throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitWriteStatement(WriteStatement ws, Mutable<ILogicalOperator> arg)
-            throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitCreateDataverseStatement(CreateDataverseStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitIndexDropStatement(IndexDropStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitNodeGroupDropStatement(NodeGroupDropStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitDataverseDropStatement(DataverseDropStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitTypeDropStatement(TypeDropStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitDisconnectFeedStatement(DisconnectFeedStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visit(CreateFunctionStatement cfs, Mutable<ILogicalOperator> arg)
-            throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitFunctionDropStatement(FunctionDropStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitConnectFeedStatement(ConnectFeedStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitDropFeedStatement(FeedDropStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitCompactStatement(CompactStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-    
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitCreatePrimaryFeedStatement(CreatePrimaryFeedStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitCreateSecondaryFeedStatement(CreateSecondaryFeedStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitCreateFeedPolicyStatement(CreateFeedPolicyStatement cfps,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitDropFeedPolicyStatement(FeedPolicyDropStatement dfs,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlPositionWriter.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlPositionWriter.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlPositionWriter.java
deleted file mode 100644
index da66fe4..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlPositionWriter.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.translator;
-
-import java.io.DataOutput;
-import java.io.IOException;
-import java.io.Serializable;
-
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IUnnestingPositionWriter;
-
-public class AqlPositionWriter implements IUnnestingPositionWriter, Serializable {
-    private static final long serialVersionUID = 1L;
-
-    @Override
-    public void write(DataOutput dataOutput, long position) throws IOException {
-        dataOutput.writeByte(BuiltinType.AINT64.getTypeTag().serialize());
-        dataOutput.writeLong(position);
-    }
-
-}


[48/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AsterixInlineVariablesRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AsterixInlineVariablesRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AsterixInlineVariablesRule.java
deleted file mode 100644
index 296bbb2..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AsterixInlineVariablesRule.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.InlineVariablesRule;
-
-public class AsterixInlineVariablesRule extends InlineVariablesRule {
-
-    public AsterixInlineVariablesRule() {
-        // Do not inline field accesses and spatial functions because doing so would interfere with our access method rewrites.
-        // TODO: For now we must also exclude record constructor functions to avoid breaking our type casting rules
-        // IntroduceStaticTypeCastRule and IntroduceDynamicTypeCastRule.
-        doNotInlineFuncs.add(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME);
-        doNotInlineFuncs.add(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX);
-        doNotInlineFuncs.add(AsterixBuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR);
-        doNotInlineFuncs.add(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR);
-        doNotInlineFuncs.add(AsterixBuiltinFunctions.CAST_RECORD);
-        doNotInlineFuncs.add(AsterixBuiltinFunctions.CREATE_CIRCLE);
-        doNotInlineFuncs.add(AsterixBuiltinFunctions.CREATE_LINE);
-        doNotInlineFuncs.add(AsterixBuiltinFunctions.CREATE_MBR);
-        doNotInlineFuncs.add(AsterixBuiltinFunctions.CREATE_POINT);
-        doNotInlineFuncs.add(AsterixBuiltinFunctions.CREATE_POLYGON);
-        doNotInlineFuncs.add(AsterixBuiltinFunctions.CREATE_RECTANGLE);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AsterixIntroduceGroupByCombinerRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AsterixIntroduceGroupByCombinerRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AsterixIntroduceGroupByCombinerRule.java
deleted file mode 100644
index b71ffab..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AsterixIntroduceGroupByCombinerRule.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.AbstractIntroduceGroupByCombinerRule;
-
-public class AsterixIntroduceGroupByCombinerRule extends AbstractIntroduceGroupByCombinerRule {
-
-    @SuppressWarnings("unchecked")
-    @Override
-    protected void processNullTest(IOptimizationContext context, GroupByOperator nestedGby,
-            List<LogicalVariable> aggregateVarsProducedByCombiner) {
-        IFunctionInfo finfoEq = context.getMetadataProvider().lookupFunction(AsterixBuiltinFunctions.IS_SYSTEM_NULL);
-        SelectOperator selectNonSystemNull;
-
-        if (aggregateVarsProducedByCombiner.size() == 1) {
-            ILogicalExpression isSystemNullTest = new ScalarFunctionCallExpression(finfoEq,
-                    new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
-                            aggregateVarsProducedByCombiner.get(0))));
-            IFunctionInfo finfoNot = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.NOT);
-            ScalarFunctionCallExpression nonSystemNullTest = new ScalarFunctionCallExpression(finfoNot,
-                    new MutableObject<ILogicalExpression>(isSystemNullTest));
-            selectNonSystemNull = new SelectOperator(new MutableObject<ILogicalExpression>(nonSystemNullTest), false,
-                    null);
-        } else {
-            List<Mutable<ILogicalExpression>> isSystemNullTestList = new ArrayList<Mutable<ILogicalExpression>>();
-            for (LogicalVariable aggVar : aggregateVarsProducedByCombiner) {
-                ILogicalExpression isSystemNullTest = new ScalarFunctionCallExpression(finfoEq,
-                        new MutableObject<ILogicalExpression>(new VariableReferenceExpression(aggVar)));
-                IFunctionInfo finfoNot = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.NOT);
-                ScalarFunctionCallExpression nonSystemNullTest = new ScalarFunctionCallExpression(finfoNot,
-                        new MutableObject<ILogicalExpression>(isSystemNullTest));
-                isSystemNullTestList.add(new MutableObject<ILogicalExpression>(nonSystemNullTest));
-            }
-            IFunctionInfo finfoAnd = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.AND);
-            selectNonSystemNull = new SelectOperator(new MutableObject<ILogicalExpression>(
-                    new ScalarFunctionCallExpression(finfoAnd, isSystemNullTestList)), false, null);
-        }
-
-        //add the not-system-null check into the nested pipeline
-        Mutable<ILogicalOperator> ntsBeforeNestedGby = nestedGby.getInputs().get(0);
-        nestedGby.getInputs().set(0, new MutableObject<ILogicalOperator>(selectNonSystemNull));
-        selectNonSystemNull.getInputs().add(ntsBeforeNestedGby);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AsterixMoveFreeVariableOperatorOutOfSubplanRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AsterixMoveFreeVariableOperatorOutOfSubplanRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AsterixMoveFreeVariableOperatorOutOfSubplanRule.java
deleted file mode 100644
index c42fa15..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AsterixMoveFreeVariableOperatorOutOfSubplanRule.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.MoveFreeVariableOperatorOutOfSubplanRule;
-
-public class AsterixMoveFreeVariableOperatorOutOfSubplanRule extends MoveFreeVariableOperatorOutOfSubplanRule {
-
-    @Override
-    protected boolean movableOperator(LogicalOperatorTag operatorTag) {
-        return (operatorTag == LogicalOperatorTag.ASSIGN);
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ByNameToByHandleFieldAccessRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ByNameToByHandleFieldAccessRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ByNameToByHandleFieldAccessRule.java
deleted file mode 100644
index 9c04564..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ByNameToByHandleFieldAccessRule.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.algebra.base.AsterixOperatorAnnotations;
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class ByNameToByHandleFieldAccessRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (op.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
-            return false;
-        }
-        AssignOperator assign = (AssignOperator) op;
-        if (assign.getAnnotations().get(AsterixOperatorAnnotations.PUSHED_FIELD_ACCESS) == null) {
-            return false;
-        }
-        byNameToByHandle(assign, context);
-        return true;
-    }
-
-    private static void byNameToByHandle(AssignOperator fieldAccessOp, IOptimizationContext context) {
-        Mutable<ILogicalOperator> opUnder = fieldAccessOp.getInputs().get(0);
-        AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) fieldAccessOp.getExpressions().get(0)
-                .getValue();
-        ILogicalExpression a1 = fce.getArguments().get(0).getValue();
-
-        VariableReferenceExpression x;
-        if (a1.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-            x = (VariableReferenceExpression) a1;
-        } else {
-            LogicalVariable var1 = context.newVar();
-            ArrayList<LogicalVariable> varArray = new ArrayList<LogicalVariable>(1);
-            varArray.add(var1);
-            ArrayList<Mutable<ILogicalExpression>> exprArray = new ArrayList<Mutable<ILogicalExpression>>(1);
-            exprArray.add(new MutableObject<ILogicalExpression>(a1));
-            AssignOperator assignVar = new AssignOperator(varArray, exprArray);
-            x = new VariableReferenceExpression(var1);
-            assignVar.getInputs().add(opUnder);
-            opUnder = new MutableObject<ILogicalOperator>(assignVar);
-        }
-
-        // let $t := type-of(x)
-        LogicalVariable t = context.newVar();
-
-        AbstractFunctionCallExpression typeOf = new ScalarFunctionCallExpression(
-                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.TYPE_OF));
-        typeOf.getArguments().add(new MutableObject<ILogicalExpression>(x));
-        AssignOperator typAssign = new AssignOperator(t, new MutableObject<ILogicalExpression>(typeOf));
-        typAssign.getInputs().add(opUnder);
-
-        // let $w := get-handle($t, path-expression)
-        LogicalVariable w = context.newVar();
-        AbstractFunctionCallExpression getHandle = new ScalarFunctionCallExpression(
-                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_HANDLE));
-        getHandle.getArguments().add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(t)));
-        // the accessed field
-        getHandle.getArguments().add(new MutableObject<ILogicalExpression>(fce.getArguments().get(1).getValue()));
-        AssignOperator handleAssign = new AssignOperator(w, new MutableObject<ILogicalExpression>(getHandle));
-        handleAssign.getInputs().add(new MutableObject<ILogicalOperator>(typAssign));
-
-        // let $y := get-data(x, $w)
-        AbstractFunctionCallExpression getData = new ScalarFunctionCallExpression(
-                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_DATA));
-        VariableReferenceExpression ref2 = new VariableReferenceExpression(x.getVariableReference());
-        getData.getArguments().add(new MutableObject<ILogicalExpression>(ref2));
-        getData.getArguments().add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(w)));
-        fieldAccessOp.getExpressions().get(0).setValue(getData);
-        List<Mutable<ILogicalOperator>> faInputs = fieldAccessOp.getInputs();
-        faInputs.clear();
-        faInputs.add(new MutableObject<ILogicalOperator>(handleAssign));
-
-        // fieldAccess.setAnnotation(OperatorAnnotation.FIELD_ACCESS,
-        // fce.getArguments().get(0));
-        fieldAccessOp.removeAnnotation(AsterixOperatorAnnotations.PUSHED_FIELD_ACCESS);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ByNameToByIndexFieldAccessRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ByNameToByIndexFieldAccessRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ByNameToByIndexFieldAccessRule.java
deleted file mode 100644
index 0dee2be..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ByNameToByIndexFieldAccessRule.java
+++ /dev/null
@@ -1,175 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.algebra.base.AsterixOperatorAnnotations;
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.base.IAObject;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.AUnionType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class ByNameToByIndexFieldAccessRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (op.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
-            return false;
-        }
-        AssignOperator assign = (AssignOperator) op;
-        if (assign.getExpressions().get(0).getValue().getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            return false;
-        }
-
-        List<Mutable<ILogicalExpression>> expressions = assign.getExpressions();
-        boolean changed = false;
-        for (int i = 0; i < expressions.size(); i++) {
-            AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expressions.get(i).getValue();
-            if (fce.getFunctionIdentifier() != AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME) {
-                continue;
-            }
-            IVariableTypeEnvironment env = context.getOutputTypeEnvironment(op);
-
-            ILogicalExpression a0 = fce.getArguments().get(0).getValue();
-            if (a0.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                LogicalVariable var1 = context.newVar();
-                ArrayList<LogicalVariable> varArray = new ArrayList<LogicalVariable>(1);
-                varArray.add(var1);
-                ArrayList<Mutable<ILogicalExpression>> exprArray = new ArrayList<Mutable<ILogicalExpression>>(1);
-                exprArray.add(new MutableObject<ILogicalExpression>(a0));
-                AssignOperator assignVar = new AssignOperator(varArray, exprArray);
-                fce.getArguments().get(0).setValue(new VariableReferenceExpression(var1));
-                assignVar.getInputs().add(new MutableObject<ILogicalOperator>(assign.getInputs().get(0).getValue()));
-                assign.getInputs().get(0).setValue(assignVar);
-                context.computeAndSetTypeEnvironmentForOperator(assignVar);
-                context.computeAndSetTypeEnvironmentForOperator(assign);
-                //access by name was not replaced to access by index, but the plan was altered, hence changed is true
-                changed = true;
-            }
-
-            IAType t = (IAType) env.getType(fce.getArguments().get(0).getValue());
-            try {
-                switch (t.getTypeTag()) {
-                    case ANY: {
-                        return false || changed;
-                    }
-                    case RECORD: {
-                        ARecordType recType = (ARecordType) t;
-                        ILogicalExpression fai = createFieldAccessByIndex(recType, fce);
-                        if (fai == null) {
-                            return false || changed;
-                        }
-                        expressions.get(i).setValue(fai);
-                        changed = true;
-                        break;
-                    }
-                    case UNION: {
-                        AUnionType unionT = (AUnionType) t;
-                        if (unionT.isNullableType()) {
-                            IAType t2 = unionT.getNullableType();
-                            if (t2.getTypeTag() == ATypeTag.RECORD) {
-                                ARecordType recType = (ARecordType) t2;
-                                ILogicalExpression fai = createFieldAccessByIndex(recType, fce);
-                                if (fai == null) {
-                                    return false || changed;
-                                }
-                                expressions.get(i).setValue(fai);
-                                changed = true;
-                                break;
-                            }
-                        }
-                        throw new NotImplementedException("Union " + unionT);
-                    }
-                    default: {
-                        throw new AlgebricksException("Cannot call field-access on data of type " + t);
-                    }
-                }
-            } catch (IOException e) {
-                throw new AlgebricksException(e);
-            }
-        }
-        assign.removeAnnotation(AsterixOperatorAnnotations.PUSHED_FIELD_ACCESS);
-        return changed;
-    }
-
-    @SuppressWarnings("unchecked")
-    private static ILogicalExpression createFieldAccessByIndex(ARecordType recType, AbstractFunctionCallExpression fce)
-            throws IOException {
-        String s = getStringSecondArgument(fce);
-        if (s == null) {
-            return null;
-        }
-        int k = recType.findFieldPosition(s);
-        if (k < 0) {
-            return null;
-        }
-        return new ScalarFunctionCallExpression(
-                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX),
-                fce.getArguments().get(0), new MutableObject<ILogicalExpression>(new ConstantExpression(
-                        new AsterixConstantValue(new AInt32(k)))));
-    }
-
-    private static String getStringSecondArgument(AbstractFunctionCallExpression expr) {
-        ILogicalExpression e2 = expr.getArguments().get(1).getValue();
-        if (e2.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
-            return null;
-        }
-        ConstantExpression c = (ConstantExpression) e2;
-        if (!(c.getValue() instanceof AsterixConstantValue)) {
-            return null;
-        }
-        IAObject v = ((AsterixConstantValue) c.getValue()).getObject();
-        if (v.getType().getTypeTag() != ATypeTag.STRING) {
-            return null;
-        }
-        return ((AString) v).getStringValue();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/CancelUnnestWithNestedListifyRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/CancelUnnestWithNestedListifyRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/CancelUnnestWithNestedListifyRule.java
deleted file mode 100644
index 73b0de7..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/CancelUnnestWithNestedListifyRule.java
+++ /dev/null
@@ -1,282 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.StatefulFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.RunningAggregatePOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.UnpartitionedPropertyComputer;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-/**
- * This rule cancels the unnest with the nested listify. Formally, the following plan<br/>
- *
- * <pre>
- * unnset $x <- $y
- *   group-by($k){
- *     aggregate $y <- listify($z)
- *     ...
- *   }
- * </pre>
- *
- * will be converted into<br/>
- *
- * <pre>
- * assign $x <- $z
- *   sort($k)
- * </pre>
- *
- * When the positional variable exists, for example the original plan is like<br/>
- *
- * <pre>
- * unnset $x at $p <- $y
- *   group-by($k){
- *     aggregate $y <- listify($z)
- *     ...
- *   }
- * </pre>
- *
- * will be converted into<br/>
- *
- * <pre>
- * group-by($k){
- *   running-aggregate $p <- tid()
- * }
- * </pre>
- */
-public class CancelUnnestWithNestedListifyRule implements IAlgebraicRewriteRule {
-
-    /* (non-Javadoc)
-     * @see edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule#rewritePost(org.apache.commons.lang3.mutable.Mutable, edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext)
-     */
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        // apply it only at the top of the plan
-        ILogicalOperator op = opRef.getValue();
-        if (context.checkIfInDontApplySet(this, op)) {
-            return false;
-        }
-        Set<LogicalVariable> varSet = new HashSet<LogicalVariable>();
-        return applyRuleDown(opRef, varSet, context);
-    }
-
-    private boolean applyRuleDown(Mutable<ILogicalOperator> opRef, Set<LogicalVariable> varSet,
-            IOptimizationContext context) throws AlgebricksException {
-        boolean changed = applies(opRef, varSet, context);
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        VariableUtilities.getUsedVariables(op, varSet);
-        if (op.hasNestedPlans()) {
-            AbstractOperatorWithNestedPlans aonp = (AbstractOperatorWithNestedPlans) op;
-            for (ILogicalPlan p : aonp.getNestedPlans()) {
-                for (Mutable<ILogicalOperator> r : p.getRoots()) {
-                    if (applyRuleDown(r, varSet, context)) {
-                        changed = true;
-                    }
-                    context.addToDontApplySet(this, r.getValue());
-                }
-            }
-        }
-        for (Mutable<ILogicalOperator> i : op.getInputs()) {
-            if (applyRuleDown(i, varSet, context)) {
-                changed = true;
-            }
-            context.addToDontApplySet(this, i.getValue());
-        }
-        return changed;
-    }
-
-    private boolean applies(Mutable<ILogicalOperator> opRef, Set<LogicalVariable> varUsedAbove,
-            IOptimizationContext context) throws AlgebricksException {
-        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
-        if (op1.getOperatorTag() != LogicalOperatorTag.UNNEST) {
-            return false;
-        }
-        UnnestOperator unnest1 = (UnnestOperator) op1;
-        ILogicalExpression expr = unnest1.getExpressionRef().getValue();
-        LogicalVariable unnestedVar;
-        switch (expr.getExpressionTag()) {
-            case VARIABLE:
-                unnestedVar = ((VariableReferenceExpression) expr).getVariableReference();
-                break;
-            case FUNCTION_CALL:
-                if (((AbstractFunctionCallExpression) expr).getFunctionIdentifier() != AsterixBuiltinFunctions.SCAN_COLLECTION) {
-                    return false;
-                }
-                AbstractFunctionCallExpression functionCall = (AbstractFunctionCallExpression) expr;
-                ILogicalExpression functionCallArgExpr = functionCall.getArguments().get(0).getValue();
-                if (functionCallArgExpr.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
-                    return false;
-                }
-                unnestedVar = ((VariableReferenceExpression) functionCallArgExpr).getVariableReference();
-                break;
-            default:
-                return false;
-        }
-        if (varUsedAbove.contains(unnestedVar)) {
-            return false;
-        }
-
-        Mutable<ILogicalOperator> opRef2 = op1.getInputs().get(0);
-        AbstractLogicalOperator r = (AbstractLogicalOperator) opRef2.getValue();
-
-        if (r.getOperatorTag() != LogicalOperatorTag.GROUP) {
-            return false;
-        }
-
-        // go inside of a group-by plan
-        GroupByOperator gby = (GroupByOperator) r;
-        if (gby.getNestedPlans().size() != 1) {
-            return false;
-        }
-        if (gby.getNestedPlans().get(0).getRoots().size() != 1) {
-            return false;
-        }
-
-        AbstractLogicalOperator nestedPlanRoot = (AbstractLogicalOperator) gby.getNestedPlans().get(0).getRoots()
-                .get(0).getValue();
-        if (nestedPlanRoot.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
-            return false;
-        }
-        AggregateOperator agg = (AggregateOperator) nestedPlanRoot;
-        Mutable<ILogicalOperator> aggInputOpRef = agg.getInputs().get(0);
-
-        if (agg.getVariables().size() > 1) {
-            return false;
-        }
-
-        LogicalVariable aggVar = agg.getVariables().get(0);
-        ILogicalExpression aggFun = agg.getExpressions().get(0).getValue();
-        if (!aggVar.equals(unnestedVar)
-                || ((AbstractLogicalExpression) aggFun).getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            return false;
-        }
-        AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) aggFun;
-        if (!AsterixBuiltinFunctions.LISTIFY.equals(f.getFunctionIdentifier())) {
-            return false;
-        }
-        if (f.getArguments().size() != 1) {
-            return false;
-        }
-        ILogicalExpression arg0 = f.getArguments().get(0).getValue();
-        if (((AbstractLogicalExpression) arg0).getExpressionTag() != LogicalExpressionTag.VARIABLE) {
-            return false;
-        }
-        LogicalVariable paramVar = ((VariableReferenceExpression) arg0).getVariableReference();
-
-        ArrayList<LogicalVariable> assgnVars = new ArrayList<LogicalVariable>(1);
-        assgnVars.add(unnest1.getVariable());
-        ArrayList<Mutable<ILogicalExpression>> assgnExprs = new ArrayList<Mutable<ILogicalExpression>>(1);
-        assgnExprs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(paramVar)));
-        AssignOperator assign = new AssignOperator(assgnVars, assgnExprs);
-
-        LogicalVariable posVar = unnest1.getPositionalVariable();
-        if (posVar == null) {
-            // Creates assignment for group-by keys.
-            ArrayList<LogicalVariable> gbyKeyAssgnVars = new ArrayList<LogicalVariable>();
-            ArrayList<Mutable<ILogicalExpression>> gbyKeyAssgnExprs = new ArrayList<Mutable<ILogicalExpression>>();
-            for (int i = 0; i < gby.getGroupByList().size(); i++) {
-                if (gby.getGroupByList().get(i).first != null) {
-                    gbyKeyAssgnVars.add(gby.getGroupByList().get(i).first);
-                    gbyKeyAssgnExprs.add(gby.getGroupByList().get(i).second);
-                }
-            }
-
-            // Moves the nested pipeline before aggregation out of the group-by op.
-            Mutable<ILogicalOperator> bottomOpRef = aggInputOpRef;
-            AbstractLogicalOperator bottomOp = (AbstractLogicalOperator) bottomOpRef.getValue();
-            while (bottomOp.getOperatorTag() != LogicalOperatorTag.NESTEDTUPLESOURCE) {
-                bottomOpRef = bottomOp.getInputs().get(0);
-                bottomOp = (AbstractLogicalOperator) bottomOpRef.getValue();
-            }
-
-            // Removes the group-by operator.
-            opRef.setValue(assign);
-            assign.getInputs().add(aggInputOpRef);
-            AssignOperator gbyKeyAssign = new AssignOperator(gbyKeyAssgnVars, gbyKeyAssgnExprs);
-            gbyKeyAssign.getInputs().add(gby.getInputs().get(0));
-            bottomOpRef.setValue(gbyKeyAssign);
-
-            context.computeAndSetTypeEnvironmentForOperator(gbyKeyAssign);
-            context.computeAndSetTypeEnvironmentForOperator(assign);
-        } else {
-            // if positional variable is used in unnest, the unnest will be pushed into the group-by as a running-aggregate
-
-            // First create assign for the unnest variable
-            List<LogicalVariable> nestedAssignVars = new ArrayList<LogicalVariable>();
-            List<Mutable<ILogicalExpression>> nestedAssignExprs = new ArrayList<Mutable<ILogicalExpression>>();
-            nestedAssignVars.add(unnest1.getVariable());
-            nestedAssignExprs.add(new MutableObject<ILogicalExpression>(arg0));
-            AssignOperator nestedAssign = new AssignOperator(nestedAssignVars, nestedAssignExprs);
-            nestedAssign.getInputs().add(opRef2);
-
-            // Then create running aggregation for the positional variable
-            List<LogicalVariable> raggVars = new ArrayList<LogicalVariable>();
-            List<Mutable<ILogicalExpression>> raggExprs = new ArrayList<Mutable<ILogicalExpression>>();
-            raggVars.add(posVar);
-            StatefulFunctionCallExpression fce = new StatefulFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.TID), UnpartitionedPropertyComputer.INSTANCE);
-            raggExprs.add(new MutableObject<ILogicalExpression>(fce));
-            RunningAggregateOperator raggOp = new RunningAggregateOperator(raggVars, raggExprs);
-            raggOp.setExecutionMode(unnest1.getExecutionMode());
-            RunningAggregatePOperator raggPOp = new RunningAggregatePOperator();
-            raggOp.setPhysicalOperator(raggPOp);
-            raggOp.getInputs().add(nestedPlanRoot.getInputs().get(0));
-            gby.getNestedPlans().get(0).getRoots().set(0, new MutableObject<ILogicalOperator>(raggOp));
-
-            opRef.setValue(nestedAssign);
-
-            context.computeAndSetTypeEnvironmentForOperator(nestedAssign);
-            context.computeAndSetTypeEnvironmentForOperator(raggOp);
-            context.computeAndSetTypeEnvironmentForOperator(gby);
-
-        }
-
-        return true;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/CheckFilterExpressionTypeRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/CheckFilterExpressionTypeRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/CheckFilterExpressionTypeRule.java
deleted file mode 100644
index 7e9ebcc..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/CheckFilterExpressionTypeRule.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.optimizer.rules;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.AUnionType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-/**
- * This rule is to check if all the filter expression are of the boolean type or a possible (determined
- * at the runtime) boolean type.
- * If that is not the case, an exception should be thrown.
- *
- * @author yingyib
- */
-public class CheckFilterExpressionTypeRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
-            return false;
-        }
-        SelectOperator select = (SelectOperator) op;
-        ILogicalExpression condition = select.getCondition().getValue();
-        IVariableTypeEnvironment env = select.computeOutputTypeEnvironment(context);
-        IAType condType = (IAType) env.getType(condition);
-        if (condType.getTypeTag() != ATypeTag.BOOLEAN && condType.getTypeTag() != ATypeTag.ANY
-                && !isPossibleBoolean(condType)) {
-            throw new AlgebricksException("The select condition " + condition.toString()
-                    + " should be of the boolean type.");
-        }
-        return false;
-    }
-
-    /**
-     * Check if the type is optional boolean or not
-     * 
-     * @param type
-     * @return true if it is; false otherwise.
-     */
-    private boolean isPossibleBoolean(IAType type) {
-        while (NonTaggedFormatUtil.isOptional(type)) {
-            type = ((AUnionType) type).getNullableType();
-            if (type.getTypeTag() == ATypeTag.BOOLEAN || type.getTypeTag() == ATypeTag.ANY) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ConstantFoldingRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ConstantFoldingRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ConstantFoldingRule.java
deleted file mode 100644
index be1653d..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ConstantFoldingRule.java
+++ /dev/null
@@ -1,264 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.io.DataInputStream;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.common.config.GlobalConfig;
-import edu.uci.ics.asterix.dataflow.data.common.AqlExpressionTypeComputer;
-import edu.uci.ics.asterix.dataflow.data.common.AqlNullableTypeComputer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.AqlNullWriterFactory;
-import edu.uci.ics.asterix.formats.nontagged.AqlBinaryBooleanInspectorImpl;
-import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
-import edu.uci.ics.asterix.formats.nontagged.AqlBinaryHashFunctionFactoryProvider;
-import edu.uci.ics.asterix.formats.nontagged.AqlBinaryHashFunctionFamilyProvider;
-import edu.uci.ics.asterix.formats.nontagged.AqlBinaryIntegerInspector;
-import edu.uci.ics.asterix.formats.nontagged.AqlPrinterFactoryProvider;
-import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
-import edu.uci.ics.asterix.formats.nontagged.AqlTypeTraitProvider;
-import edu.uci.ics.asterix.jobgen.AqlLogicalExpressionJobGen;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.base.IAObject;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.AbstractCollectionType;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.LogicalExpressionJobGenToExpressionRuntimeProviderAdapter;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.StatefulFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
-import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
-import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionVisitor;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IScalarEvaluator;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.api.IPointable;
-import edu.uci.ics.hyracks.data.std.primitive.VoidPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
-
-public class ConstantFoldingRule implements IAlgebraicRewriteRule {
-
-    private final ConstantFoldingVisitor cfv = new ConstantFoldingVisitor();
-
-    /** Throws exceptions in substituiteProducedVariable, setVarType, and one getVarType method. */
-    private static final IVariableTypeEnvironment _emptyTypeEnv = new IVariableTypeEnvironment() {
-
-        @Override
-        public boolean substituteProducedVariable(LogicalVariable v1, LogicalVariable v2) throws AlgebricksException {
-            throw new IllegalStateException();
-        }
-
-        @Override
-        public void setVarType(LogicalVariable var, Object type) {
-            throw new IllegalStateException();
-        }
-
-        @Override
-        public Object getVarType(LogicalVariable var, List<LogicalVariable> nonNullVariables,
-                List<List<LogicalVariable>> correlatedNullableVariableLists) throws AlgebricksException {
-            throw new IllegalStateException();
-        }
-
-        @Override
-        public Object getVarType(LogicalVariable var) throws AlgebricksException {
-            throw new IllegalStateException();
-        }
-
-        @Override
-        public Object getType(ILogicalExpression expr) throws AlgebricksException {
-            return AqlExpressionTypeComputer.INSTANCE.getType(expr, null, this);
-        }
-    };
-
-    private static final JobGenContext _jobGenCtx = new JobGenContext(null, null, null,
-            AqlSerializerDeserializerProvider.INSTANCE, AqlBinaryHashFunctionFactoryProvider.INSTANCE,
-            AqlBinaryHashFunctionFamilyProvider.INSTANCE, AqlBinaryComparatorFactoryProvider.INSTANCE,
-            AqlTypeTraitProvider.INSTANCE, AqlBinaryBooleanInspectorImpl.FACTORY, AqlBinaryIntegerInspector.FACTORY,
-            AqlPrinterFactoryProvider.INSTANCE, AqlNullWriterFactory.INSTANCE, null,
-            new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter(AqlLogicalExpressionJobGen.INSTANCE),
-            AqlExpressionTypeComputer.INSTANCE, AqlNullableTypeComputer.INSTANCE, null, null, null, null,
-            GlobalConfig.DEFAULT_FRAME_SIZE, null);
-
-    private static final IOperatorSchema[] _emptySchemas = new IOperatorSchema[] {};
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        ILogicalOperator op = opRef.getValue();
-        if (context.checkIfInDontApplySet(this, op)) {
-            return false;
-        }
-        return op.acceptExpressionTransform(cfv);
-    }
-
-    private class ConstantFoldingVisitor implements ILogicalExpressionVisitor<Pair<Boolean, ILogicalExpression>, Void>,
-            ILogicalExpressionReferenceTransform {
-
-        private final IPointable p = VoidPointable.FACTORY.createPointable();
-        private final ByteBufferInputStream bbis = new ByteBufferInputStream();
-        private final DataInputStream dis = new DataInputStream(bbis);
-
-        @Override
-        public boolean transform(Mutable<ILogicalExpression> exprRef) throws AlgebricksException {
-            AbstractLogicalExpression expr = (AbstractLogicalExpression) exprRef.getValue();
-            Pair<Boolean, ILogicalExpression> p = expr.accept(this, null);
-            if (p.first) {
-                exprRef.setValue(p.second);
-            }
-            return p.first;
-        }
-
-        @Override
-        public Pair<Boolean, ILogicalExpression> visitConstantExpression(ConstantExpression expr, Void arg)
-                throws AlgebricksException {
-            return new Pair<Boolean, ILogicalExpression>(false, expr);
-        }
-
-        @Override
-        public Pair<Boolean, ILogicalExpression> visitVariableReferenceExpression(VariableReferenceExpression expr,
-                Void arg) throws AlgebricksException {
-            return new Pair<Boolean, ILogicalExpression>(false, expr);
-        }
-
-        @Override
-        public Pair<Boolean, ILogicalExpression> visitScalarFunctionCallExpression(ScalarFunctionCallExpression expr,
-                Void arg) throws AlgebricksException {
-            boolean changed = changeRec(expr, arg);
-            if (!checkArgs(expr) || !expr.isFunctional()) {
-                return new Pair<Boolean, ILogicalExpression>(changed, expr);
-            }
-            //Current ARecord SerDe assumes a closed record, so we do not constant fold open record constructors
-            if (expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR)
-                    || expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.CAST_RECORD)) {
-                return new Pair<Boolean, ILogicalExpression>(false, null);
-            }
-            //Current List SerDe assumes a strongly typed list, so we do not constant fold the list constructors if they are not strongly typed
-            if (expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR)
-                    || expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR)) {
-                AbstractCollectionType listType = (AbstractCollectionType) TypeComputerUtilities.getRequiredType(expr);
-                if (listType != null
-                        && (listType.getItemType().getTypeTag() == ATypeTag.ANY || listType.getItemType() instanceof AbstractCollectionType)) {
-                    //case1: listType == null,  could be a nested list inside a list<ANY>
-                    //case2: itemType = ANY
-                    //case3: itemType = a nested list
-                    return new Pair<Boolean, ILogicalExpression>(false, null);
-                }
-            }
-            if (expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME)) {
-                ARecordType rt = (ARecordType) _emptyTypeEnv.getType(expr.getArguments().get(0).getValue());
-                String str = ((AString) ((AsterixConstantValue) ((ConstantExpression) expr.getArguments().get(1)
-                        .getValue()).getValue()).getObject()).getStringValue();
-                int k;
-                try {
-                    k = rt.findFieldPosition(str);
-                } catch (IOException e) {
-                    throw new AlgebricksException(e);
-                }
-                if (k >= 0) {
-                    // wait for the ByNameToByIndex rule to apply
-                    return new Pair<Boolean, ILogicalExpression>(changed, expr);
-                }
-            }
-            IScalarEvaluatorFactory fact = _jobGenCtx.getExpressionRuntimeProvider().createEvaluatorFactory(expr,
-                    _emptyTypeEnv, _emptySchemas, _jobGenCtx);
-            IScalarEvaluator eval = fact.createScalarEvaluator(null);
-            eval.evaluate(null, p);
-            Object t = _emptyTypeEnv.getType(expr);
-
-            @SuppressWarnings("rawtypes")
-            ISerializerDeserializer serde = _jobGenCtx.getSerializerDeserializerProvider().getSerializerDeserializer(t);
-            bbis.setByteBuffer(ByteBuffer.wrap(p.getByteArray(), p.getStartOffset(), p.getLength()), 0);
-            IAObject o;
-            try {
-                o = (IAObject) serde.deserialize(dis);
-            } catch (HyracksDataException e) {
-                throw new AlgebricksException(e);
-            }
-            return new Pair<Boolean, ILogicalExpression>(true, new ConstantExpression(new AsterixConstantValue(o)));
-        }
-
-        @Override
-        public Pair<Boolean, ILogicalExpression> visitAggregateFunctionCallExpression(
-                AggregateFunctionCallExpression expr, Void arg) throws AlgebricksException {
-            boolean changed = changeRec(expr, arg);
-            return new Pair<Boolean, ILogicalExpression>(changed, expr);
-        }
-
-        @Override
-        public Pair<Boolean, ILogicalExpression> visitStatefulFunctionCallExpression(
-                StatefulFunctionCallExpression expr, Void arg) throws AlgebricksException {
-            boolean changed = changeRec(expr, arg);
-            return new Pair<Boolean, ILogicalExpression>(changed, expr);
-        }
-
-        @Override
-        public Pair<Boolean, ILogicalExpression> visitUnnestingFunctionCallExpression(
-                UnnestingFunctionCallExpression expr, Void arg) throws AlgebricksException {
-            boolean changed = changeRec(expr, arg);
-            return new Pair<Boolean, ILogicalExpression>(changed, expr);
-        }
-
-        private boolean changeRec(AbstractFunctionCallExpression expr, Void arg) throws AlgebricksException {
-            boolean changed = false;
-            for (Mutable<ILogicalExpression> r : expr.getArguments()) {
-                Pair<Boolean, ILogicalExpression> p2 = r.getValue().accept(this, arg);
-                if (p2.first) {
-                    r.setValue(p2.second);
-                    changed = true;
-                }
-            }
-            return changed;
-        }
-
-        private boolean checkArgs(AbstractFunctionCallExpression expr) throws AlgebricksException {
-            for (Mutable<ILogicalExpression> r : expr.getArguments()) {
-                if (r.getValue().getExpressionTag() != LogicalExpressionTag.CONSTANT) {
-                    return false;
-                }
-            }
-            return true;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/CountVarToCountOneRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/CountVarToCountOneRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/CountVarToCountOneRule.java
deleted file mode 100644
index 218cd4b..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/CountVarToCountOneRule.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.om.base.AInt64;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class CountVarToCountOneRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    // It is only for a group-by having just one aggregate which is a count.
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
-        if (op1.getOperatorTag() != LogicalOperatorTag.GROUP) {
-            return false;
-        }
-        GroupByOperator g = (GroupByOperator) op1;
-        if (g.getNestedPlans().size() != 1) {
-            return false;
-        }
-        ILogicalPlan p = g.getNestedPlans().get(0);
-        if (p.getRoots().size() != 1) {
-            return false;
-        }
-        AbstractLogicalOperator op2 = (AbstractLogicalOperator) p.getRoots().get(0).getValue();
-        if (op2.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
-            return false;
-        }
-        AggregateOperator agg = (AggregateOperator) op2;
-        if (agg.getExpressions().size() != 1) {
-            return false;
-        }
-        ILogicalExpression exp2 = agg.getExpressions().get(0).getValue();
-        if (exp2.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            return false;
-        }
-        AbstractFunctionCallExpression fun = (AbstractFunctionCallExpression) exp2;
-        if (fun.getFunctionIdentifier() != AsterixBuiltinFunctions.COUNT) {
-            return false;
-        }
-        ILogicalExpression exp3 = fun.getArguments().get(0).getValue();
-        if (exp3.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
-            return false;
-        }
-        if (((AbstractLogicalOperator) agg.getInputs().get(0).getValue()).getOperatorTag() != LogicalOperatorTag.NESTEDTUPLESOURCE) {
-            return false;
-        }
-        fun.getArguments().get(0).setValue(new ConstantExpression(new AsterixConstantValue(new AInt64(1L))));
-        return true;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/DisjunctivePredicateToJoinRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/DisjunctivePredicateToJoinRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/DisjunctivePredicateToJoinRule.java
deleted file mode 100644
index 0fb205d..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/DisjunctivePredicateToJoinRule.java
+++ /dev/null
@@ -1,191 +0,0 @@
-/*
- * Copyright 2014 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.HashSet;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.om.base.AOrderedList;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.types.AOrderedListType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IndexedNLJoinExpressionAnnotation;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class DisjunctivePredicateToJoinRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-
-        SelectOperator select;
-        if ((select = asSelectOperator(opRef)) == null) {
-            return false;
-        }
-
-        AbstractFunctionCallExpression condEx;
-        if ((condEx = asFunctionCallExpression(select.getCondition(), AlgebricksBuiltinFunctions.OR)) == null) {
-            return false;
-        }
-
-        List<Mutable<ILogicalExpression>> args = condEx.getArguments();
-
-        VariableReferenceExpression varEx = null;
-        IAType valType = null;
-        HashSet<AsterixConstantValue> values = new HashSet<AsterixConstantValue>();
-
-        for (Mutable<ILogicalExpression> arg : args) {
-            AbstractFunctionCallExpression fctCall;
-            if ((fctCall = asFunctionCallExpression(arg, AlgebricksBuiltinFunctions.EQ)) == null) {
-                return false;
-            }
-
-            boolean haveConst = false;
-            boolean haveVar = false;
-            List<Mutable<ILogicalExpression>> fctArgs = fctCall.getArguments();
-            for (Mutable<ILogicalExpression> fctArg : fctArgs) {
-                final ILogicalExpression argExpr = fctArg.getValue();
-                switch (argExpr.getExpressionTag()) {
-                    case CONSTANT:
-                        haveConst = true;
-                        AsterixConstantValue value = (AsterixConstantValue) ((ConstantExpression) argExpr).getValue();
-                        if (valType == null) {
-                            valType = value.getObject().getType();
-                        } else if (!isCompatible(valType, value.getObject().getType())) {
-                            return false;
-                        }
-                        values.add(value);
-                        break;
-                    case VARIABLE:
-                        haveVar = true;
-                        final VariableReferenceExpression varArg = (VariableReferenceExpression) argExpr;
-                        if (varEx == null) {
-                            varEx = varArg;
-                        } else if (!varEx.getVariableReference().equals(varArg.getVariableReference())) {
-                            return false;
-                        }
-                        break;
-                    default:
-                        return false;
-                }
-            }
-            if (!(haveVar && haveConst)) {
-                return false;
-            }
-        }
-
-        AOrderedList list = new AOrderedList(new AOrderedListType(valType, "orderedlist"));
-        for (AsterixConstantValue value : values) {
-            list.add(value.getObject());
-        }
-
-        EmptyTupleSourceOperator ets = new EmptyTupleSourceOperator();
-        context.computeAndSetTypeEnvironmentForOperator(ets);
-
-        ILogicalExpression cExp = new ConstantExpression(new AsterixConstantValue(list));
-        Mutable<ILogicalExpression> mutCExp = new MutableObject<ILogicalExpression>(cExp);
-        IFunctionInfo scanFctInfo = AsterixBuiltinFunctions
-                .getAsterixFunctionInfo(AsterixBuiltinFunctions.SCAN_COLLECTION);
-        UnnestingFunctionCallExpression scanExp = new UnnestingFunctionCallExpression(scanFctInfo, mutCExp);
-        LogicalVariable scanVar = context.newVar();
-        UnnestOperator unn = new UnnestOperator(scanVar, new MutableObject<ILogicalExpression>(scanExp));
-        unn.getInputs().add(new MutableObject<ILogicalOperator>(ets));
-        context.computeAndSetTypeEnvironmentForOperator(unn);
-
-        IFunctionInfo eqFctInfo = AsterixBuiltinFunctions.getAsterixFunctionInfo(AlgebricksBuiltinFunctions.EQ);
-        AbstractFunctionCallExpression eqExp = new ScalarFunctionCallExpression(eqFctInfo);
-        eqExp.getArguments().add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(scanVar)));
-        eqExp.getArguments().add(new MutableObject<ILogicalExpression>(varEx.cloneExpression()));
-        eqExp.getAnnotations().put(IndexedNLJoinExpressionAnnotation.INSTANCE,
-                IndexedNLJoinExpressionAnnotation.INSTANCE);
-
-        InnerJoinOperator jOp = new InnerJoinOperator(new MutableObject<ILogicalExpression>(eqExp));
-        jOp.getInputs().add(new MutableObject<ILogicalOperator>(unn));
-        jOp.getInputs().add(select.getInputs().get(0));
-
-        opRef.setValue(jOp);
-        context.computeAndSetTypeEnvironmentForOperator(jOp);
-
-        return true;
-    }
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
-        return false;
-    }
-
-    /**
-     * This checks the compatibility the types of the constants to ensure that the comparison behaves as expected
-     * when joining. Right now this compatibility is defined as type equality, but it could we relaxed.
-     * Once type promotion works correctly in all parts of the system, this check should not be needed anymore.
-     * (see https://code.google.com/p/asterixdb/issues/detail?id=716)
-     * 
-     * @param t1
-     *            one type
-     * @param t2
-     *            another type
-     * @return true, if types are equal
-     */
-    private static boolean isCompatible(IAType t1, IAType t2) {
-        return t1.equals(t2);
-    }
-
-    // some helpers
-
-    private static SelectOperator asSelectOperator(ILogicalOperator op) {
-        return op.getOperatorTag() == LogicalOperatorTag.SELECT ? (SelectOperator) op : null;
-    }
-
-    private static SelectOperator asSelectOperator(Mutable<ILogicalOperator> op) {
-        return asSelectOperator(op.getValue());
-    }
-
-    private static AbstractFunctionCallExpression asFunctionCallExpression(ILogicalExpression ex, FunctionIdentifier fi) {
-        AbstractFunctionCallExpression fctCall = (ex.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL ? (AbstractFunctionCallExpression) ex
-                : null);
-        if (fctCall != null && (fi == null || fctCall.getFunctionIdentifier().equals(fi)))
-            return fctCall;
-        return null;
-    }
-
-    private static AbstractFunctionCallExpression asFunctionCallExpression(Mutable<ILogicalExpression> ex,
-            FunctionIdentifier fi) {
-        return asFunctionCallExpression(ex.getValue(), fi);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ExtractDistinctByExpressionsRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ExtractDistinctByExpressionsRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ExtractDistinctByExpressionsRule.java
deleted file mode 100644
index 7c36cb7..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ExtractDistinctByExpressionsRule.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.AbstractExtractExprRule;
-
-/**
- * Needed only bc. current Hyracks operators require keys to be fields.
- */
-public class ExtractDistinctByExpressionsRule extends AbstractExtractExprRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
-        if (op1.getOperatorTag() != LogicalOperatorTag.DISTINCT) {
-            return false;
-        }
-
-        if (context.checkIfInDontApplySet(this, op1)) {
-            return false;
-        }
-        context.addToDontApplySet(this, op1);
-        DistinctOperator d = (DistinctOperator) op1;
-        boolean changed = false;
-        Mutable<ILogicalOperator> opRef2 = d.getInputs().get(0);
-        List<Mutable<ILogicalExpression>> newExprList = new ArrayList<Mutable<ILogicalExpression>>();
-        for (Mutable<ILogicalExpression> expr : d.getExpressions()) {
-            LogicalExpressionTag tag = expr.getValue().getExpressionTag();
-            if (tag == LogicalExpressionTag.VARIABLE || tag == LogicalExpressionTag.CONSTANT) {
-                newExprList.add(expr);
-                continue;
-            }
-            LogicalVariable v = extractExprIntoAssignOpRef(expr.getValue(), opRef2, context);
-            ILogicalExpression newExpr = new VariableReferenceExpression(v);
-            newExprList.add(new MutableObject<ILogicalExpression>(newExpr));
-            changed = true;
-        }
-        if (changed) {
-            d.getExpressions().clear();
-            d.getExpressions().addAll(newExprList);
-            context.computeAndSetTypeEnvironmentForOperator(d);
-        }
-        return changed;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ExtractOrderExpressionsRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ExtractOrderExpressionsRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ExtractOrderExpressionsRule.java
deleted file mode 100644
index b53c0cc..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ExtractOrderExpressionsRule.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.optimizer.base.AnalysisUtil;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.AbstractExtractExprRule;
-
-public class ExtractOrderExpressionsRule extends AbstractExtractExprRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-
-        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
-        if (op1.getOperatorTag() != LogicalOperatorTag.ORDER) {
-            return false;
-        }
-
-        if (context.checkIfInDontApplySet(this, op1)) {
-            return false;
-        }
-        context.addToDontApplySet(this, op1);
-        OrderOperator oo = (OrderOperator) op1;
-
-        if (!orderHasComplexExpr(oo)) {
-            return false;
-        }
-        Mutable<ILogicalOperator> opRef2 = oo.getInputs().get(0);
-        for (Pair<IOrder, Mutable<ILogicalExpression>> orderPair : oo.getOrderExpressions()) {
-            ILogicalExpression expr = orderPair.second.getValue();
-            if (expr.getExpressionTag() != LogicalExpressionTag.VARIABLE && !AnalysisUtil.isAccessToFieldRecord(expr)) {
-                LogicalVariable v = extractExprIntoAssignOpRef(expr, opRef2, context);
-                orderPair.second.setValue(new VariableReferenceExpression(v));
-            }
-        }
-        context.computeAndSetTypeEnvironmentForOperator(oo);
-        return true;
-    }
-
-    private boolean orderHasComplexExpr(OrderOperator oo) {
-        for (Pair<IOrder, Mutable<ILogicalExpression>> orderPair : oo.getOrderExpressions()) {
-            if (orderPair.second.getValue().getExpressionTag() != LogicalExpressionTag.VARIABLE) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FeedScanCollectionToUnnest.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FeedScanCollectionToUnnest.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FeedScanCollectionToUnnest.java
deleted file mode 100644
index fa7c7f4..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FeedScanCollectionToUnnest.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression.FunctionKind;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractAssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class FeedScanCollectionToUnnest implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (context.checkIfInDontApplySet(this, op)) {
-            return false;
-        }
-        if (op.getOperatorTag() != LogicalOperatorTag.UNNEST) {
-            context.addToDontApplySet(this, op);
-            return false;
-        }
-        UnnestOperator unnest = (UnnestOperator) op;
-        ILogicalExpression unnestExpr = unnest.getExpressionRef().getValue();
-        if (needsScanCollection(unnestExpr, op)) {
-            ILogicalExpression newExpr = new UnnestingFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.SCAN_COLLECTION),
-                    new MutableObject<ILogicalExpression>(unnestExpr));
-            unnest.getExpressionRef().setValue(newExpr);
-            context.addToDontApplySet(this, op);
-            return true;
-        }
-        context.addToDontApplySet(this, op);
-        return false;
-    }
-
-    private ILogicalExpression findVarOriginExpression(LogicalVariable v, ILogicalOperator op)
-            throws AlgebricksException {
-        boolean searchInputs = false;
-        if (!(op instanceof AbstractAssignOperator)) {
-            searchInputs = true;
-        } else {
-            AbstractAssignOperator aao = (AbstractAssignOperator) op;
-            List<LogicalVariable> producedVars = new ArrayList<>();
-            VariableUtilities.getProducedVariables(op, producedVars);
-            int exprIndex = producedVars.indexOf(v);
-            if (exprIndex == -1) {
-                searchInputs = true;
-            } else {
-                ILogicalExpression originalCandidate = aao.getExpressions().get(exprIndex).getValue();
-                if (originalCandidate.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-                    searchInputs = true;
-                } else {
-                    return originalCandidate;
-                }
-            }
-        }
-
-        if (searchInputs) {
-            for (Mutable<ILogicalOperator> childOp : op.getInputs()) {
-                ILogicalExpression ret = findVarOriginExpression(v, childOp.getValue());
-                if (ret != null) {
-                    return ret;
-                }
-            }
-        }
-
-        throw new IllegalStateException("Unable to find the original expression that produced variable " + v);
-    }
-
-    private boolean needsScanCollection(ILogicalExpression unnestExpr, ILogicalOperator op) throws AlgebricksException {
-        switch (unnestExpr.getExpressionTag()) {
-            case VARIABLE: {
-                LogicalVariable v = ((VariableReferenceExpression) unnestExpr).getVariableReference();
-                ILogicalExpression originalExpr = findVarOriginExpression(v, op);
-                if (originalExpr.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
-                    return false;
-                } else {
-                    return !isUnnestingFunction(originalExpr);
-                }
-            }
-            case FUNCTION_CALL: {
-                return !isUnnestingFunction(unnestExpr);
-            }
-            default: {
-                return false;
-            }
-        }
-    }
-
-    private boolean isUnnestingFunction(ILogicalExpression expr) {
-        if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-            AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expr;
-            return fce.getKind() == FunctionKind.UNNEST;
-        }
-        return false;
-    }
-}



[25/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/InvertedIndexAccessMethod.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/InvertedIndexAccessMethod.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/InvertedIndexAccessMethod.java
new file mode 100644
index 0000000..614cfbc
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/InvertedIndexAccessMethod.java
@@ -0,0 +1,1166 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules.am;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.algebra.base.LogicalOperatorDeepCopyVisitor;
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.common.annotations.SkipSecondaryIndexSearchExpressionAnnotation;
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.formats.nontagged.AqlBinaryTokenizerFactoryProvider;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.om.base.AFloat;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.base.IACollection;
+import edu.uci.ics.asterix.om.base.IAObject;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.types.hierachy.ATypeHierarchy;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.Counter;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IAlgebricksConstantValue;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifierFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.ConjunctiveEditDistanceSearchModifierFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.ConjunctiveListEditDistanceSearchModifierFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.ConjunctiveSearchModifierFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.EditDistanceSearchModifierFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.JaccardSearchModifierFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.ListEditDistanceSearchModifierFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
+
+/**
+ * Class for helping rewrite rules to choose and apply inverted indexes.
+ */
+public class InvertedIndexAccessMethod implements IAccessMethod {
+
+    // Enum describing the search modifier type. Used for passing info to jobgen.
+    public static enum SearchModifierType {
+        CONJUNCTIVE,
+        JACCARD,
+        EDIT_DISTANCE,
+        CONJUNCTIVE_EDIT_DISTANCE,
+        INVALID
+    }
+
+    private static List<FunctionIdentifier> funcIdents = new ArrayList<FunctionIdentifier>();
+    static {
+        funcIdents.add(AsterixBuiltinFunctions.CONTAINS);
+        // For matching similarity-check functions. For example, similarity-jaccard-check returns a list of two items,
+        // and the select condition will get the first list-item and check whether it evaluates to true.
+        funcIdents.add(AsterixBuiltinFunctions.GET_ITEM);
+    }
+
+    // These function identifiers are matched in this AM's analyzeFuncExprArgs(),
+    // and are not visible to the outside driver.
+    private static HashSet<FunctionIdentifier> secondLevelFuncIdents = new HashSet<FunctionIdentifier>();
+    static {
+        secondLevelFuncIdents.add(AsterixBuiltinFunctions.SIMILARITY_JACCARD_CHECK);
+        secondLevelFuncIdents.add(AsterixBuiltinFunctions.EDIT_DISTANCE_CHECK);
+        secondLevelFuncIdents.add(AsterixBuiltinFunctions.EDIT_DISTANCE_CONTAINS);
+    }
+
+    public static InvertedIndexAccessMethod INSTANCE = new InvertedIndexAccessMethod();
+
+    @Override
+    public List<FunctionIdentifier> getOptimizableFunctions() {
+        return funcIdents;
+    }
+
+    @Override
+    public boolean analyzeFuncExprArgs(AbstractFunctionCallExpression funcExpr,
+            List<AbstractLogicalOperator> assignsAndUnnests, AccessMethodAnalysisContext analysisCtx) {
+
+        if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.CONTAINS) {
+            boolean matches = AccessMethodUtils.analyzeFuncExprArgsForOneConstAndVar(funcExpr, analysisCtx);
+            if (!matches) {
+                matches = AccessMethodUtils.analyzeFuncExprArgsForTwoVars(funcExpr, analysisCtx);
+            }
+            return matches;
+        }
+        return analyzeGetItemFuncExpr(funcExpr, assignsAndUnnests, analysisCtx);
+    }
+
+    public boolean analyzeGetItemFuncExpr(AbstractFunctionCallExpression funcExpr,
+            List<AbstractLogicalOperator> assignsAndUnnests, AccessMethodAnalysisContext analysisCtx) {
+        if (funcExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.GET_ITEM) {
+            return false;
+        }
+        ILogicalExpression arg1 = funcExpr.getArguments().get(0).getValue();
+        ILogicalExpression arg2 = funcExpr.getArguments().get(1).getValue();
+        // The second arg is the item index to be accessed. It must be a constant.
+        if (arg2.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+            return false;
+        }
+        // The first arg must be a variable or a function expr.
+        // If it is a variable we must track its origin in the assigns to get the original function expr.
+        if (arg1.getExpressionTag() != LogicalExpressionTag.VARIABLE
+                && arg1.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return false;
+        }
+        AbstractFunctionCallExpression matchedFuncExpr = null;
+        // The get-item arg is function call, directly check if it's optimizable.
+        if (arg1.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+            matchedFuncExpr = (AbstractFunctionCallExpression) arg1;
+        }
+        // The get-item arg is a variable. Search the assigns and unnests for its origination function.
+        int matchedAssignOrUnnestIndex = -1;
+        if (arg1.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+            VariableReferenceExpression varRefExpr = (VariableReferenceExpression) arg1;
+            // Try to find variable ref expr in all assigns.
+            for (int i = 0; i < assignsAndUnnests.size(); i++) {
+                AbstractLogicalOperator op = assignsAndUnnests.get(i);
+                if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+                    AssignOperator assign = (AssignOperator) op;
+                    List<LogicalVariable> assignVars = assign.getVariables();
+                    List<Mutable<ILogicalExpression>> assignExprs = assign.getExpressions();
+                    for (int j = 0; j < assignVars.size(); j++) {
+                        LogicalVariable var = assignVars.get(j);
+                        if (var != varRefExpr.getVariableReference()) {
+                            continue;
+                        }
+                        // We've matched the variable in the first assign. Now analyze the originating function.
+                        ILogicalExpression matchedExpr = assignExprs.get(j).getValue();
+                        if (matchedExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+                            return false;
+                        }
+                        matchedFuncExpr = (AbstractFunctionCallExpression) matchedExpr;
+                        break;
+                    }
+                } else {
+                    UnnestOperator unnest = (UnnestOperator) op;
+                    LogicalVariable var = unnest.getVariable();
+                    if (var == varRefExpr.getVariableReference()) {
+                        ILogicalExpression matchedExpr = unnest.getExpressionRef().getValue();
+                        if (matchedExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+                            return false;
+                        }
+                        AbstractFunctionCallExpression unnestFuncExpr = (AbstractFunctionCallExpression) matchedExpr;
+                        if (unnestFuncExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.SCAN_COLLECTION) {
+                            return false;
+                        }
+                        matchedFuncExpr = (AbstractFunctionCallExpression) unnestFuncExpr.getArguments().get(0)
+                                .getValue();
+                    }
+                }
+                // We've already found a match.
+                if (matchedFuncExpr != null) {
+                    matchedAssignOrUnnestIndex = i;
+                    break;
+                }
+            }
+        }
+        // Check that the matched function is optimizable by this access method.
+        if (!secondLevelFuncIdents.contains(matchedFuncExpr.getFunctionIdentifier())) {
+            return false;
+        }
+        boolean selectMatchFound = analyzeSelectSimilarityCheckFuncExprArgs(matchedFuncExpr, assignsAndUnnests,
+                matchedAssignOrUnnestIndex, analysisCtx);
+        boolean joinMatchFound = analyzeJoinSimilarityCheckFuncExprArgs(matchedFuncExpr, assignsAndUnnests,
+                matchedAssignOrUnnestIndex, analysisCtx);
+        if (selectMatchFound || joinMatchFound) {
+            return true;
+        }
+        return false;
+    }
+
+    private boolean analyzeJoinSimilarityCheckFuncExprArgs(AbstractFunctionCallExpression funcExpr,
+            List<AbstractLogicalOperator> assignsAndUnnests, int matchedAssignOrUnnestIndex,
+            AccessMethodAnalysisContext analysisCtx) {
+        // There should be exactly three arguments.
+        // The last function argument is assumed to be the similarity threshold.
+        IAlgebricksConstantValue constThreshVal = null;
+        ILogicalExpression arg3 = funcExpr.getArguments().get(2).getValue();
+        if (arg3.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+            return false;
+        }
+        constThreshVal = ((ConstantExpression) arg3).getValue();
+        ILogicalExpression arg1 = funcExpr.getArguments().get(0).getValue();
+        ILogicalExpression arg2 = funcExpr.getArguments().get(1).getValue();
+        // We expect arg1 and arg2 to be non-constants for a join.
+        if (arg1.getExpressionTag() == LogicalExpressionTag.CONSTANT
+                || arg2.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
+            return false;
+        }
+        LogicalVariable fieldVarExpr1 = getNonConstArgFieldExprPair(arg1, funcExpr, assignsAndUnnests,
+                matchedAssignOrUnnestIndex);
+        if (fieldVarExpr1 == null) {
+            return false;
+        }
+        LogicalVariable fieldVarExpr2 = getNonConstArgFieldExprPair(arg2, funcExpr, assignsAndUnnests,
+                matchedAssignOrUnnestIndex);
+        if (fieldVarExpr2 == null) {
+            return false;
+        }
+        OptimizableFuncExpr newOptFuncExpr = new OptimizableFuncExpr(funcExpr, new LogicalVariable[] { fieldVarExpr1,
+                fieldVarExpr2 }, new IAlgebricksConstantValue[] { constThreshVal });
+        for (IOptimizableFuncExpr optFuncExpr : analysisCtx.matchedFuncExprs) {
+            //avoid additional optFuncExpressions in case of a join
+            if (optFuncExpr.getFuncExpr().equals(funcExpr)) {
+                return true;
+            }
+        }
+        analysisCtx.matchedFuncExprs.add(newOptFuncExpr);
+        return true;
+    }
+
+    private boolean analyzeSelectSimilarityCheckFuncExprArgs(AbstractFunctionCallExpression funcExpr,
+            List<AbstractLogicalOperator> assignsAndUnnests, int matchedAssignOrUnnestIndex,
+            AccessMethodAnalysisContext analysisCtx) {
+        // There should be exactly three arguments.
+        // The last function argument is assumed to be the similarity threshold.
+        IAlgebricksConstantValue constThreshVal = null;
+        ILogicalExpression arg3 = funcExpr.getArguments().get(2).getValue();
+        if (arg3.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+            return false;
+        }
+        constThreshVal = ((ConstantExpression) arg3).getValue();
+        ILogicalExpression arg1 = funcExpr.getArguments().get(0).getValue();
+        ILogicalExpression arg2 = funcExpr.getArguments().get(1).getValue();
+        // Determine whether one arg is constant, and the other is non-constant.
+        ILogicalExpression constArg = null;
+        ILogicalExpression nonConstArg = null;
+        if (arg1.getExpressionTag() == LogicalExpressionTag.CONSTANT
+                && arg2.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+            // The arguments of edit-distance-contains() function are asymmetrical, we can only use index if it is on the first argument
+            if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CONTAINS) {
+                return false;
+            }
+            constArg = arg1;
+            nonConstArg = arg2;
+        } else if (arg2.getExpressionTag() == LogicalExpressionTag.CONSTANT
+                && arg1.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+            constArg = arg2;
+            nonConstArg = arg1;
+        } else {
+            return false;
+        }
+        ConstantExpression constExpr = (ConstantExpression) constArg;
+        IAlgebricksConstantValue constFilterVal = constExpr.getValue();
+        LogicalVariable fieldVarExpr = getNonConstArgFieldExprPair(nonConstArg, funcExpr, assignsAndUnnests,
+                matchedAssignOrUnnestIndex);
+        if (fieldVarExpr == null) {
+            return false;
+        }
+        OptimizableFuncExpr newOptFuncExpr = new OptimizableFuncExpr(funcExpr, new LogicalVariable[] { fieldVarExpr },
+                new IAlgebricksConstantValue[] { constFilterVal, constThreshVal });
+        for (IOptimizableFuncExpr optFuncExpr : analysisCtx.matchedFuncExprs) {
+            //avoid additional optFuncExpressions in case of a join
+            if (optFuncExpr.getFuncExpr().equals(funcExpr))
+                return true;
+        }
+        analysisCtx.matchedFuncExprs.add(newOptFuncExpr);
+        return true;
+    }
+
+    private LogicalVariable getNonConstArgFieldExprPair(ILogicalExpression nonConstArg,
+            AbstractFunctionCallExpression funcExpr, List<AbstractLogicalOperator> assignsAndUnnests,
+            int matchedAssignOrUnnestIndex) {
+        LogicalVariable fieldVar = null;
+        // Analyze nonConstArg depending on similarity function.
+        if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.SIMILARITY_JACCARD_CHECK) {
+            AbstractFunctionCallExpression nonConstFuncExpr = funcExpr;
+            if (nonConstArg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                nonConstFuncExpr = (AbstractFunctionCallExpression) nonConstArg;
+                // TODO: Currently, we're only looking for word and gram tokens (non hashed).
+                if (nonConstFuncExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.WORD_TOKENS
+                        && nonConstFuncExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.GRAM_TOKENS) {
+                    return null;
+                }
+                // Find the variable that is being tokenized.
+                nonConstArg = nonConstFuncExpr.getArguments().get(0).getValue();
+            }
+        }
+        if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CHECK
+                || funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CONTAINS) {
+            while (nonConstArg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                AbstractFunctionCallExpression nonConstFuncExpr = (AbstractFunctionCallExpression) nonConstArg;
+                if (nonConstFuncExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.WORD_TOKENS
+                        && nonConstFuncExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.SUBSTRING
+                        && nonConstFuncExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.SUBSTRING_BEFORE
+                        && nonConstFuncExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.SUBSTRING_AFTER) {
+                    return null;
+                }
+                // Find the variable whose substring is used in the similarity function
+                nonConstArg = nonConstFuncExpr.getArguments().get(0).getValue();
+            }
+        }
+        if (nonConstArg.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+            fieldVar = ((VariableReferenceExpression) nonConstArg).getVariableReference();
+        }
+        return fieldVar;
+    }
+
+    @Override
+    public boolean matchAllIndexExprs() {
+        return true;
+    }
+
+    @Override
+    public boolean matchPrefixIndexExprs() {
+        return false;
+    }
+
+    private ILogicalOperator createSecondaryToPrimaryPlan(OptimizableOperatorSubTree indexSubTree,
+            OptimizableOperatorSubTree probeSubTree, Index chosenIndex, IOptimizableFuncExpr optFuncExpr,
+            boolean retainInput, boolean retainNull, boolean requiresBroadcast, IOptimizationContext context)
+            throws AlgebricksException {
+        Dataset dataset = indexSubTree.dataset;
+        ARecordType recordType = indexSubTree.recordType;
+        // we made sure indexSubTree has datasource scan
+        DataSourceScanOperator dataSourceScan = (DataSourceScanOperator) indexSubTree.dataSourceRef.getValue();
+
+        InvertedIndexJobGenParams jobGenParams = new InvertedIndexJobGenParams(chosenIndex.getIndexName(),
+                chosenIndex.getIndexType(), dataset.getDataverseName(), dataset.getDatasetName(), retainInput,
+                retainNull, requiresBroadcast);
+        // Add function-specific args such as search modifier, and possibly a similarity threshold.
+        addFunctionSpecificArgs(optFuncExpr, jobGenParams);
+        // Add the type of search key from the optFuncExpr.
+        addSearchKeyType(optFuncExpr, indexSubTree, context, jobGenParams);
+
+        // Operator that feeds the secondary-index search.
+        AbstractLogicalOperator inputOp = null;
+        // Here we generate vars and funcs for assigning the secondary-index keys to be fed into the secondary-index search.
+        // List of variables for the assign.
+        ArrayList<LogicalVariable> keyVarList = new ArrayList<LogicalVariable>();
+        // probeSubTree is null if we are dealing with a selection query, and non-null for join queries.
+        if (probeSubTree == null) {
+            // List of expressions for the assign.
+            ArrayList<Mutable<ILogicalExpression>> keyExprList = new ArrayList<Mutable<ILogicalExpression>>();
+            // Add key vars and exprs to argument list.
+            addKeyVarsAndExprs(optFuncExpr, keyVarList, keyExprList, context);
+            // Assign operator that sets the secondary-index search-key fields.
+            inputOp = new AssignOperator(keyVarList, keyExprList);
+            // Input to this assign is the EmptyTupleSource (which the dataSourceScan also must have had as input).
+            inputOp.getInputs().add(dataSourceScan.getInputs().get(0));
+            inputOp.setExecutionMode(dataSourceScan.getExecutionMode());
+        } else {
+            // We are optimizing a join. Add the input variable to the secondaryIndexFuncArgs.
+            LogicalVariable inputSearchVariable = getInputSearchVar(optFuncExpr, indexSubTree);
+            keyVarList.add(inputSearchVariable);
+            inputOp = (AbstractLogicalOperator) probeSubTree.root;
+        }
+        jobGenParams.setKeyVarList(keyVarList);
+        UnnestMapOperator secondaryIndexUnnestOp = AccessMethodUtils.createSecondaryIndexUnnestMap(dataset, recordType,
+                chosenIndex, inputOp, jobGenParams, context, true, retainInput);
+
+        // Generate the rest of the upstream plan which feeds the search results into the primary index.
+        UnnestMapOperator primaryIndexUnnestOp = AccessMethodUtils.createPrimaryIndexUnnestMap(dataSourceScan, dataset,
+                recordType, secondaryIndexUnnestOp, context, true, retainInput, retainNull, false);
+
+        return primaryIndexUnnestOp;
+    }
+
+    /**
+     * Returns the variable which acts as the input search key to a secondary
+     * index that optimizes optFuncExpr by replacing rewriting indexSubTree
+     * (which is the original subtree that will be replaced by the index plan).
+     */
+    private LogicalVariable getInputSearchVar(IOptimizableFuncExpr optFuncExpr, OptimizableOperatorSubTree indexSubTree) {
+        if (optFuncExpr.getOperatorSubTree(0) == indexSubTree) {
+            // If the index is on a dataset in subtree 0, then subtree 1 will feed.
+            return optFuncExpr.getLogicalVar(1);
+        } else {
+            // If the index is on a dataset in subtree 1, then subtree 0 will feed.
+            return optFuncExpr.getLogicalVar(0);
+        }
+    }
+
+    @Override
+    public boolean applySelectPlanTransformation(Mutable<ILogicalOperator> selectRef,
+            OptimizableOperatorSubTree subTree, Index chosenIndex, AccessMethodAnalysisContext analysisCtx,
+            IOptimizationContext context) throws AlgebricksException {
+        IOptimizableFuncExpr optFuncExpr = AccessMethodUtils.chooseFirstOptFuncExpr(chosenIndex, analysisCtx);
+        ILogicalOperator indexPlanRootOp = createSecondaryToPrimaryPlan(subTree, null, chosenIndex, optFuncExpr, false,
+                false, false, context);
+        // Replace the datasource scan with the new plan rooted at primaryIndexUnnestMap.
+        subTree.dataSourceRef.setValue(indexPlanRootOp);
+        return true;
+    }
+
+    @Override
+    public boolean applyJoinPlanTransformation(Mutable<ILogicalOperator> joinRef,
+            OptimizableOperatorSubTree leftSubTree, OptimizableOperatorSubTree rightSubTree, Index chosenIndex,
+            AccessMethodAnalysisContext analysisCtx, IOptimizationContext context, boolean isLeftOuterJoin,
+            boolean hasGroupBy) throws AlgebricksException {
+        // Figure out if the index is applicable on the left or right side (if both, we arbitrarily prefer the left side).
+        Dataset dataset = analysisCtx.indexDatasetMap.get(chosenIndex);
+        // Determine probe and index subtrees based on chosen index.
+        OptimizableOperatorSubTree indexSubTree = null;
+        OptimizableOperatorSubTree probeSubTree = null;
+        if (!isLeftOuterJoin && leftSubTree.hasDataSourceScan()
+                && dataset.getDatasetName().equals(leftSubTree.dataset.getDatasetName())) {
+            indexSubTree = leftSubTree;
+            probeSubTree = rightSubTree;
+        } else if (rightSubTree.hasDataSourceScan()
+                && dataset.getDatasetName().equals(rightSubTree.dataset.getDatasetName())) {
+            indexSubTree = rightSubTree;
+            probeSubTree = leftSubTree;
+        }
+        if (indexSubTree == null) {
+            //This may happen for left outer join case
+            return false;
+        }
+
+        IOptimizableFuncExpr optFuncExpr = AccessMethodUtils.chooseFirstOptFuncExpr(chosenIndex, analysisCtx);
+        // The arguments of edit-distance-contains() function are asymmetrical, we can only use index
+        // if the dataset of index subtree and the dataset of first argument's subtree is the same
+        if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CONTAINS
+                && optFuncExpr.getOperatorSubTree(0).dataset != null
+                && !optFuncExpr.getOperatorSubTree(0).dataset.getDatasetName().equals(
+                        indexSubTree.dataset.getDatasetName())) {
+            return false;
+        }
+
+        //if LOJ, reset null place holder variable
+        LogicalVariable newNullPlaceHolderVar = null;
+        if (isLeftOuterJoin && hasGroupBy) {
+            //get a new null place holder variable that is the first field variable of the primary key
+            //from the indexSubTree's datasourceScanOp
+            newNullPlaceHolderVar = indexSubTree.getDataSourceVariables().get(0);
+
+            //reset the null place holder variable
+            AccessMethodUtils.resetLOJNullPlaceholderVariableInGroupByOp(analysisCtx, newNullPlaceHolderVar, context);
+        }
+
+        AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) joinRef.getValue();
+
+        // Remember the original probe subtree, and its primary-key variables,
+        // so we can later retrieve the missing attributes via an equi join.
+        List<LogicalVariable> originalSubTreePKs = new ArrayList<LogicalVariable>();
+        // Remember the primary-keys of the new probe subtree for the top-level equi join.
+        List<LogicalVariable> surrogateSubTreePKs = new ArrayList<LogicalVariable>();
+
+        // Copy probe subtree, replacing their variables with new ones. We will use the original variables
+        // to stitch together a top-level equi join.
+        Mutable<ILogicalOperator> originalProbeSubTreeRootRef = copyAndReinitProbeSubTree(probeSubTree, join
+                .getCondition().getValue(), optFuncExpr, originalSubTreePKs, surrogateSubTreePKs, context);
+
+        // Remember original live variables from the index sub tree.
+        List<LogicalVariable> indexSubTreeLiveVars = new ArrayList<LogicalVariable>();
+        VariableUtilities.getLiveVariables(indexSubTree.root, indexSubTreeLiveVars);
+
+        // Clone the original join condition because we may have to modify it (and we also need the original).
+        ILogicalExpression joinCond = join.getCondition().getValue().cloneExpression();
+        // Create "panic" (non indexed) nested-loop join path if necessary.
+        Mutable<ILogicalOperator> panicJoinRef = null;
+        Map<LogicalVariable, LogicalVariable> panicVarMap = null;
+        if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CHECK
+                || optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CONTAINS) {
+            panicJoinRef = new MutableObject<ILogicalOperator>(joinRef.getValue());
+            panicVarMap = new HashMap<LogicalVariable, LogicalVariable>();
+            Mutable<ILogicalOperator> newProbeRootRef = createPanicNestedLoopJoinPlan(panicJoinRef, indexSubTree,
+                    probeSubTree, optFuncExpr, chosenIndex, panicVarMap, context);
+            probeSubTree.rootRef.setValue(newProbeRootRef.getValue());
+            probeSubTree.root = newProbeRootRef.getValue();
+        }
+        // Create regular indexed-nested loop join path.
+        ILogicalOperator indexPlanRootOp = createSecondaryToPrimaryPlan(indexSubTree, probeSubTree, chosenIndex,
+                optFuncExpr, true, isLeftOuterJoin, true, context);
+        indexSubTree.dataSourceRef.setValue(indexPlanRootOp);
+
+        // Change join into a select with the same condition.
+        SelectOperator topSelect = new SelectOperator(new MutableObject<ILogicalExpression>(joinCond), isLeftOuterJoin,
+                newNullPlaceHolderVar);
+        topSelect.getInputs().add(indexSubTree.rootRef);
+        topSelect.setExecutionMode(ExecutionMode.LOCAL);
+        context.computeAndSetTypeEnvironmentForOperator(topSelect);
+        ILogicalOperator topOp = topSelect;
+
+        // Hook up the indexed-nested loop join path with the "panic" (non indexed) nested-loop join path by putting a union all on top.
+        if (panicJoinRef != null) {
+            LogicalVariable inputSearchVar = getInputSearchVar(optFuncExpr, indexSubTree);
+            indexSubTreeLiveVars.addAll(originalSubTreePKs);
+            indexSubTreeLiveVars.add(inputSearchVar);
+            List<LogicalVariable> panicPlanLiveVars = new ArrayList<LogicalVariable>();
+            VariableUtilities.getLiveVariables(panicJoinRef.getValue(), panicPlanLiveVars);
+            // Create variable mapping for union all operator.
+            List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> varMap = new ArrayList<Triple<LogicalVariable, LogicalVariable, LogicalVariable>>();
+            for (int i = 0; i < indexSubTreeLiveVars.size(); i++) {
+                LogicalVariable indexSubTreeVar = indexSubTreeLiveVars.get(i);
+                LogicalVariable panicPlanVar = panicVarMap.get(indexSubTreeVar);
+                if (panicPlanVar == null) {
+                    panicPlanVar = indexSubTreeVar;
+                }
+                varMap.add(new Triple<LogicalVariable, LogicalVariable, LogicalVariable>(indexSubTreeVar, panicPlanVar,
+                        indexSubTreeVar));
+            }
+            UnionAllOperator unionAllOp = new UnionAllOperator(varMap);
+            unionAllOp.getInputs().add(new MutableObject<ILogicalOperator>(topOp));
+            unionAllOp.getInputs().add(panicJoinRef);
+            unionAllOp.setExecutionMode(ExecutionMode.PARTITIONED);
+            context.computeAndSetTypeEnvironmentForOperator(unionAllOp);
+            topOp = unionAllOp;
+        }
+
+        // Place a top-level equi-join on top to retrieve the missing variables from the original probe subtree.
+        // The inner (build) branch of the join is the subtree with the data scan, since the result of the similarity join could potentially be big.
+        // This choice may not always be the most efficient, but it seems more robust than the alternative.
+        Mutable<ILogicalExpression> eqJoinConditionRef = createPrimaryKeysEqJoinCondition(originalSubTreePKs,
+                surrogateSubTreePKs);
+        InnerJoinOperator topEqJoin = new InnerJoinOperator(eqJoinConditionRef, originalProbeSubTreeRootRef,
+                new MutableObject<ILogicalOperator>(topOp));
+        topEqJoin.setExecutionMode(ExecutionMode.PARTITIONED);
+        joinRef.setValue(topEqJoin);
+        context.computeAndSetTypeEnvironmentForOperator(topEqJoin);
+
+        return true;
+    }
+
+    /**
+     * Copies the probeSubTree (using new variables), and reinitializes the probeSubTree to it.
+     * Accordingly replaces the variables in the given joinCond, and the optFuncExpr.
+     * Returns a reference to the original plan root.
+     */
+    private Mutable<ILogicalOperator> copyAndReinitProbeSubTree(OptimizableOperatorSubTree probeSubTree,
+            ILogicalExpression joinCond, IOptimizableFuncExpr optFuncExpr, List<LogicalVariable> originalSubTreePKs,
+            List<LogicalVariable> surrogateSubTreePKs, IOptimizationContext context) throws AlgebricksException {
+
+        probeSubTree.getPrimaryKeyVars(originalSubTreePKs);
+
+        // Create two copies of the original probe subtree.
+        // The first copy, which becomes the new probe subtree, will retain the primary-key and secondary-search key variables,
+        // but have all other variables replaced with new ones.
+        // The second copy, which will become an input to the top-level equi-join to resolve the surrogates,
+        // will have all primary-key and secondary-search keys replaced, but retains all other original variables.
+
+        // Variable replacement map for the first copy.
+        Map<LogicalVariable, LogicalVariable> newProbeSubTreeVarMap = new HashMap<LogicalVariable, LogicalVariable>();
+        // Variable replacement map for the second copy.
+        Map<LogicalVariable, LogicalVariable> joinInputSubTreeVarMap = new HashMap<LogicalVariable, LogicalVariable>();
+        // Init with all live vars.
+        List<LogicalVariable> liveVars = new ArrayList<LogicalVariable>();
+        VariableUtilities.getLiveVariables(probeSubTree.root, liveVars);
+        for (LogicalVariable var : liveVars) {
+            joinInputSubTreeVarMap.put(var, var);
+        }
+        // Fill variable replacement maps.
+        for (int i = 0; i < optFuncExpr.getNumLogicalVars(); i++) {
+            joinInputSubTreeVarMap.put(optFuncExpr.getLogicalVar(i), context.newVar());
+            newProbeSubTreeVarMap.put(optFuncExpr.getLogicalVar(i), optFuncExpr.getLogicalVar(i));
+        }
+        for (int i = 0; i < originalSubTreePKs.size(); i++) {
+            LogicalVariable newPKVar = context.newVar();
+            surrogateSubTreePKs.add(newPKVar);
+            joinInputSubTreeVarMap.put(originalSubTreePKs.get(i), newPKVar);
+            newProbeSubTreeVarMap.put(originalSubTreePKs.get(i), originalSubTreePKs.get(i));
+        }
+
+        // Create first copy.
+        Counter firstCounter = new Counter(context.getVarCounter());
+        LogicalOperatorDeepCopyVisitor firstDeepCopyVisitor = new LogicalOperatorDeepCopyVisitor(firstCounter,
+                newProbeSubTreeVarMap);
+        ILogicalOperator newProbeSubTree = firstDeepCopyVisitor.deepCopy(probeSubTree.root, null);
+        inferTypes(newProbeSubTree, context);
+        Mutable<ILogicalOperator> newProbeSubTreeRootRef = new MutableObject<ILogicalOperator>(newProbeSubTree);
+        context.setVarCounter(firstCounter.get());
+        // Create second copy.
+        Counter secondCounter = new Counter(context.getVarCounter());
+        LogicalOperatorDeepCopyVisitor secondDeepCopyVisitor = new LogicalOperatorDeepCopyVisitor(secondCounter,
+                joinInputSubTreeVarMap);
+        ILogicalOperator joinInputSubTree = secondDeepCopyVisitor.deepCopy(probeSubTree.root, null);
+        inferTypes(joinInputSubTree, context);
+        probeSubTree.rootRef.setValue(joinInputSubTree);
+        context.setVarCounter(secondCounter.get());
+
+        // Remember the original probe subtree reference so we can return it.
+        Mutable<ILogicalOperator> originalProbeSubTreeRootRef = probeSubTree.rootRef;
+
+        // Replace the original probe subtree with its copy.
+        Dataset origDataset = probeSubTree.dataset;
+        ARecordType origRecordType = probeSubTree.recordType;
+        probeSubTree.initFromSubTree(newProbeSubTreeRootRef);
+        probeSubTree.dataset = origDataset;
+        probeSubTree.recordType = origRecordType;
+
+        // Replace the variables in the join condition based on the mapping of variables
+        // in the new probe subtree.
+        Map<LogicalVariable, LogicalVariable> varMapping = firstDeepCopyVisitor.getVariableMapping();
+        for (Map.Entry<LogicalVariable, LogicalVariable> varMapEntry : varMapping.entrySet()) {
+            if (varMapEntry.getKey() != varMapEntry.getValue()) {
+                joinCond.substituteVar(varMapEntry.getKey(), varMapEntry.getValue());
+            }
+        }
+        return originalProbeSubTreeRootRef;
+    }
+
+    private Mutable<ILogicalExpression> createPrimaryKeysEqJoinCondition(List<LogicalVariable> originalSubTreePKs,
+            List<LogicalVariable> surrogateSubTreePKs) {
+        List<Mutable<ILogicalExpression>> eqExprs = new ArrayList<Mutable<ILogicalExpression>>();
+        int numPKVars = originalSubTreePKs.size();
+        for (int i = 0; i < numPKVars; i++) {
+            List<Mutable<ILogicalExpression>> args = new ArrayList<Mutable<ILogicalExpression>>();
+            args.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(surrogateSubTreePKs.get(i))));
+            args.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(originalSubTreePKs.get(i))));
+            ILogicalExpression eqFunc = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AlgebricksBuiltinFunctions.EQ), args);
+            eqExprs.add(new MutableObject<ILogicalExpression>(eqFunc));
+        }
+        if (eqExprs.size() == 1) {
+            return eqExprs.get(0);
+        } else {
+            ILogicalExpression andFunc = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AlgebricksBuiltinFunctions.AND), eqExprs);
+            return new MutableObject<ILogicalExpression>(andFunc);
+        }
+    }
+
+    private Mutable<ILogicalOperator> createPanicNestedLoopJoinPlan(Mutable<ILogicalOperator> joinRef,
+            OptimizableOperatorSubTree indexSubTree, OptimizableOperatorSubTree probeSubTree,
+            IOptimizableFuncExpr optFuncExpr, Index chosenIndex, Map<LogicalVariable, LogicalVariable> panicVarMap,
+            IOptimizationContext context) throws AlgebricksException {
+        LogicalVariable inputSearchVar = getInputSearchVar(optFuncExpr, indexSubTree);
+
+        // We split the plan into two "branches", and add selections on each side.
+        AbstractLogicalOperator replicateOp = new ReplicateOperator(2);
+        replicateOp.getInputs().add(new MutableObject<ILogicalOperator>(probeSubTree.root));
+        replicateOp.setExecutionMode(ExecutionMode.PARTITIONED);
+        context.computeAndSetTypeEnvironmentForOperator(replicateOp);
+
+        // Create select ops for removing tuples that are filterable and not filterable, respectively.
+        IVariableTypeEnvironment probeTypeEnv = context.getOutputTypeEnvironment(probeSubTree.root);
+        IAType inputSearchVarType;
+        if (chosenIndex.isEnforcingKeyFileds())
+            inputSearchVarType = optFuncExpr.getFieldType(optFuncExpr.findLogicalVar(inputSearchVar));
+        else
+            inputSearchVarType = (IAType) probeTypeEnv.getVarType(inputSearchVar);
+        Mutable<ILogicalOperator> isFilterableSelectOpRef = new MutableObject<ILogicalOperator>();
+        Mutable<ILogicalOperator> isNotFilterableSelectOpRef = new MutableObject<ILogicalOperator>();
+        createIsFilterableSelectOps(replicateOp, inputSearchVar, inputSearchVarType, optFuncExpr, chosenIndex, context,
+                isFilterableSelectOpRef, isNotFilterableSelectOpRef);
+
+        List<LogicalVariable> originalLiveVars = new ArrayList<LogicalVariable>();
+        VariableUtilities.getLiveVariables(indexSubTree.root, originalLiveVars);
+
+        // Copy the scan subtree in indexSubTree.
+        Counter counter = new Counter(context.getVarCounter());
+        LogicalOperatorDeepCopyVisitor deepCopyVisitor = new LogicalOperatorDeepCopyVisitor(counter);
+        ILogicalOperator scanSubTree = deepCopyVisitor.deepCopy(indexSubTree.root, null);
+
+        context.setVarCounter(counter.get());
+        Map<LogicalVariable, LogicalVariable> copyVarMap = deepCopyVisitor.getVariableMapping();
+        panicVarMap.putAll(copyVarMap);
+
+        List<LogicalVariable> copyLiveVars = new ArrayList<LogicalVariable>();
+        VariableUtilities.getLiveVariables(scanSubTree, copyLiveVars);
+
+        // Replace the inputs of the given join op, and replace variables in its
+        // condition since we deep-copied one of the scanner subtrees which
+        // changed variables.
+        AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) joinRef.getValue();
+        for (Map.Entry<LogicalVariable, LogicalVariable> entry : copyVarMap.entrySet()) {
+            joinOp.getCondition().getValue().substituteVar(entry.getKey(), entry.getValue());
+        }
+        joinOp.getInputs().clear();
+        joinOp.getInputs().add(new MutableObject<ILogicalOperator>(scanSubTree));
+        // Make sure that the build input (which may be materialized causing blocking) comes from
+        // the split+select, otherwise the plan will have a deadlock.
+        joinOp.getInputs().add(isNotFilterableSelectOpRef);
+        context.computeAndSetTypeEnvironmentForOperator(joinOp);
+
+        // Return the new root of the probeSubTree.
+        return isFilterableSelectOpRef;
+    }
+
+    private void createIsFilterableSelectOps(ILogicalOperator inputOp, LogicalVariable inputSearchVar,
+            IAType inputSearchVarType, IOptimizableFuncExpr optFuncExpr, Index chosenIndex,
+            IOptimizationContext context, Mutable<ILogicalOperator> isFilterableSelectOpRef,
+            Mutable<ILogicalOperator> isNotFilterableSelectOpRef) throws AlgebricksException {
+        // Create select operator for removing tuples that are not filterable.
+        // First determine the proper filter function and args based on the type of the input search var.
+        ILogicalExpression isFilterableExpr = null;
+        switch (inputSearchVarType.getTypeTag()) {
+            case STRING: {
+                List<Mutable<ILogicalExpression>> isFilterableArgs = new ArrayList<Mutable<ILogicalExpression>>(4);
+                isFilterableArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
+                        inputSearchVar)));
+                // Since we are optimizing a join, the similarity threshold should be the only constant in the optimizable function expression.
+                isFilterableArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(optFuncExpr
+                        .getConstantVal(0))));
+                isFilterableArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils
+                        .createInt32Constant(chosenIndex.getGramLength())));
+                boolean usePrePost = optFuncExpr.containsPartialField() ? false : true;
+                isFilterableArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils
+                        .createBooleanConstant(usePrePost)));
+                isFilterableExpr = new ScalarFunctionCallExpression(
+                        FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.EDIT_DISTANCE_STRING_IS_FILTERABLE),
+                        isFilterableArgs);
+                break;
+            }
+            case UNORDEREDLIST:
+            case ORDEREDLIST: {
+                List<Mutable<ILogicalExpression>> isFilterableArgs = new ArrayList<Mutable<ILogicalExpression>>(2);
+                isFilterableArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
+                        inputSearchVar)));
+                // Since we are optimizing a join, the similarity threshold should be the only constant in the optimizable function expression.
+                isFilterableArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(optFuncExpr
+                        .getConstantVal(0))));
+                isFilterableExpr = new ScalarFunctionCallExpression(
+                        FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.EDIT_DISTANCE_LIST_IS_FILTERABLE),
+                        isFilterableArgs);
+                break;
+            }
+            default: {
+                throw new AlgebricksException("Only strings, ordered and unordered list types supported.");
+            }
+        }
+
+        SelectOperator isFilterableSelectOp = new SelectOperator(
+                new MutableObject<ILogicalExpression>(isFilterableExpr), false, null);
+        isFilterableSelectOp.getInputs().add(new MutableObject<ILogicalOperator>(inputOp));
+        isFilterableSelectOp.setExecutionMode(ExecutionMode.LOCAL);
+        context.computeAndSetTypeEnvironmentForOperator(isFilterableSelectOp);
+
+        // Select operator for removing tuples that are filterable.
+        List<Mutable<ILogicalExpression>> isNotFilterableArgs = new ArrayList<Mutable<ILogicalExpression>>();
+        isNotFilterableArgs.add(new MutableObject<ILogicalExpression>(isFilterableExpr));
+        ILogicalExpression isNotFilterableExpr = new ScalarFunctionCallExpression(
+                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.NOT), isNotFilterableArgs);
+        SelectOperator isNotFilterableSelectOp = new SelectOperator(new MutableObject<ILogicalExpression>(
+                isNotFilterableExpr), false, null);
+        isNotFilterableSelectOp.getInputs().add(new MutableObject<ILogicalOperator>(inputOp));
+        isNotFilterableSelectOp.setExecutionMode(ExecutionMode.LOCAL);
+        context.computeAndSetTypeEnvironmentForOperator(isNotFilterableSelectOp);
+
+        isFilterableSelectOpRef.setValue(isFilterableSelectOp);
+        isNotFilterableSelectOpRef.setValue(isNotFilterableSelectOp);
+    }
+
+    private void addSearchKeyType(IOptimizableFuncExpr optFuncExpr, OptimizableOperatorSubTree indexSubTree,
+            IOptimizationContext context, InvertedIndexJobGenParams jobGenParams) throws AlgebricksException {
+        // If we have two variables in the optFunxExpr, then we are optimizing a join.
+        IAType type = null;
+        ATypeTag typeTag = null;
+        if (optFuncExpr.getNumLogicalVars() == 2) {
+            // Find the type of the variable that is going to feed into the index search.
+            if (optFuncExpr.getOperatorSubTree(0) == indexSubTree) {
+                // If the index is on a dataset in subtree 0, then subtree 1 will feed.
+                type = optFuncExpr.getFieldType(1);
+            } else {
+                // If the index is on a dataset in subtree 1, then subtree 0 will feed.
+                type = optFuncExpr.getFieldType(0);
+            }
+            typeTag = type.getTypeTag();
+        } else {
+            // We are optimizing a selection query. Add the type of the search key constant.
+            AsterixConstantValue constVal = (AsterixConstantValue) optFuncExpr.getConstantVal(0);
+            IAObject obj = constVal.getObject();
+            type = obj.getType();
+            typeTag = type.getTypeTag();
+            if (typeTag != ATypeTag.ORDEREDLIST && typeTag != ATypeTag.STRING && typeTag != ATypeTag.UNORDEREDLIST) {
+                throw new AlgebricksException("Only ordered lists, string, and unordered lists types supported.");
+            }
+        }
+        jobGenParams.setSearchKeyType(typeTag);
+    }
+
+    private void addFunctionSpecificArgs(IOptimizableFuncExpr optFuncExpr, InvertedIndexJobGenParams jobGenParams) {
+        if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.CONTAINS) {
+            jobGenParams.setSearchModifierType(SearchModifierType.CONJUNCTIVE);
+            jobGenParams.setSimilarityThreshold(new AsterixConstantValue(ANull.NULL));
+        }
+        if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.SIMILARITY_JACCARD_CHECK) {
+            jobGenParams.setSearchModifierType(SearchModifierType.JACCARD);
+            // Add the similarity threshold which, by convention, is the last constant value.
+            jobGenParams.setSimilarityThreshold(optFuncExpr.getConstantVal(optFuncExpr.getNumConstantVals() - 1));
+        }
+        if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CHECK
+                || optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CONTAINS) {
+            if (optFuncExpr.containsPartialField()) {
+                jobGenParams.setSearchModifierType(SearchModifierType.CONJUNCTIVE_EDIT_DISTANCE);
+            } else {
+                jobGenParams.setSearchModifierType(SearchModifierType.EDIT_DISTANCE);
+            }
+            // Add the similarity threshold which, by convention, is the last constant value.
+            jobGenParams.setSimilarityThreshold(optFuncExpr.getConstantVal(optFuncExpr.getNumConstantVals() - 1));
+        }
+    }
+
+    private void addKeyVarsAndExprs(IOptimizableFuncExpr optFuncExpr, ArrayList<LogicalVariable> keyVarList,
+            ArrayList<Mutable<ILogicalExpression>> keyExprList, IOptimizationContext context)
+            throws AlgebricksException {
+        // For now we are assuming a single secondary index key.
+        // Add a variable and its expr to the lists which will be passed into an assign op.
+        LogicalVariable keyVar = context.newVar();
+        keyVarList.add(keyVar);
+        keyExprList.add(new MutableObject<ILogicalExpression>(new ConstantExpression(optFuncExpr.getConstantVal(0))));
+        return;
+    }
+
+    @Override
+    public boolean exprIsOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) throws AlgebricksException {
+        if (optFuncExpr.getFuncExpr().getAnnotations()
+                .containsKey(SkipSecondaryIndexSearchExpressionAnnotation.INSTANCE)) {
+            return false;
+        }
+
+        if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CHECK
+                || optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CONTAINS) {
+            return isEditDistanceFuncOptimizable(index, optFuncExpr);
+        }
+
+        if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.SIMILARITY_JACCARD_CHECK) {
+            return isJaccardFuncOptimizable(index, optFuncExpr);
+        }
+
+        if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.CONTAINS) {
+            return isContainsFuncOptimizable(index, optFuncExpr);
+        }
+
+        return false;
+    }
+
+    private boolean isEditDistanceFuncOptimizable(Index index, IOptimizableFuncExpr optFuncExpr)
+            throws AlgebricksException {
+        if (optFuncExpr.getNumConstantVals() == 1) {
+            return isEditDistanceFuncJoinOptimizable(index, optFuncExpr);
+        } else {
+            return isEditDistanceFuncSelectOptimizable(index, optFuncExpr);
+        }
+    }
+
+    private boolean isEditDistanceFuncJoinOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) {
+        if (index.isEnforcingKeyFileds())
+            return isEditDistanceFuncCompatible(index.getKeyFieldTypes().get(0).getTypeTag(), index.getIndexType());
+        else
+            return isEditDistanceFuncCompatible(optFuncExpr.getFieldType(0).getTypeTag(), index.getIndexType());
+    }
+
+    private boolean isEditDistanceFuncCompatible(ATypeTag typeTag, IndexType indexType) {
+        // We can only optimize edit distance on strings using an ngram index.
+        if (typeTag == ATypeTag.STRING
+                && (indexType == IndexType.SINGLE_PARTITION_NGRAM_INVIX || indexType == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX)) {
+            return true;
+        }
+        // We can only optimize edit distance on lists using a word index.
+        if ((typeTag == ATypeTag.ORDEREDLIST)
+                && (indexType == IndexType.SINGLE_PARTITION_WORD_INVIX || indexType == IndexType.LENGTH_PARTITIONED_WORD_INVIX)) {
+            return true;
+        }
+        return false;
+    }
+
+    private boolean isEditDistanceFuncSelectOptimizable(Index index, IOptimizableFuncExpr optFuncExpr)
+            throws AlgebricksException {
+
+        // Check for panic in selection query.
+        // TODO: Panic also depends on prePost which is currently hardcoded to be true.
+        AsterixConstantValue listOrStrConstVal = (AsterixConstantValue) optFuncExpr.getConstantVal(0);
+        IAObject listOrStrObj = listOrStrConstVal.getObject();
+        ATypeTag typeTag = listOrStrObj.getType().getTypeTag();
+
+        if (!isEditDistanceFuncCompatible(typeTag, index.getIndexType())) {
+            return false;
+        }
+
+        AsterixConstantValue intConstVal = (AsterixConstantValue) optFuncExpr.getConstantVal(1);
+        IAObject intObj = intConstVal.getObject();
+
+        AInt32 edThresh = null;
+        // Apply type casting based on numeric types of the input to INT32 type.
+        try {
+            edThresh = (AInt32) ATypeHierarchy.convertNumericTypeObject(intObj, ATypeTag.INT32);
+        } catch (AsterixException e) {
+            throw new AlgebricksException(e);
+        }
+        int mergeThreshold = 0;
+
+        if (typeTag == ATypeTag.STRING) {
+            AString astr = (AString) listOrStrObj;
+            // Compute merge threshold depending on the query grams contain pre- and postfixing
+            if (optFuncExpr.containsPartialField()) {
+                mergeThreshold = (astr.getStringValue().length() - index.getGramLength() + 1)
+                        - edThresh.getIntegerValue() * index.getGramLength();
+            } else {
+                mergeThreshold = (astr.getStringValue().length() + index.getGramLength() - 1)
+                        - edThresh.getIntegerValue() * index.getGramLength();
+            }
+        }
+
+        if ((typeTag == ATypeTag.ORDEREDLIST)
+                && (index.getIndexType() == IndexType.SINGLE_PARTITION_WORD_INVIX || index.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX)) {
+            IACollection alist = (IACollection) listOrStrObj;
+            // Compute merge threshold.
+            mergeThreshold = alist.size() - edThresh.getIntegerValue();
+        }
+        if (mergeThreshold <= 0) {
+            // We cannot use index to optimize expr.
+            return false;
+        }
+        return true;
+    }
+
+    private boolean isJaccardFuncOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) {
+        //TODO we need to split join and select cases in order to check join case more thoroughly.
+
+        int variableCount = optFuncExpr.getNumLogicalVars();
+
+        //check whether gram-tokens function is optimizable
+        ScalarFunctionCallExpression funcExpr = null;
+        for (int i = 0; i < variableCount; i++) {
+            funcExpr = findTokensFunc(AsterixBuiltinFunctions.GRAM_TOKENS, optFuncExpr, i);
+            if (funcExpr != null) {
+                return isJaccardFuncCompatible(funcExpr, optFuncExpr.getFieldType(i).getTypeTag(), index.getIndexType());
+            }
+        }
+
+        //check whether word-tokens function is optimizable
+        for (int i = 0; i < variableCount; i++) {
+            funcExpr = findTokensFunc(AsterixBuiltinFunctions.WORD_TOKENS, optFuncExpr, i);
+            if (funcExpr != null) {
+                return isJaccardFuncCompatible(funcExpr, optFuncExpr.getFieldType(i).getTypeTag(), index.getIndexType());
+            }
+        }
+
+        //check whether a search variable is optimizable
+        OptimizableOperatorSubTree subTree = null;
+        LogicalVariable targetVar = null;
+        for (int i = 0; i < variableCount; i++) {
+            subTree = optFuncExpr.getOperatorSubTree(i);
+            if (subTree == null)
+                continue;
+            targetVar = optFuncExpr.getLogicalVar(i);
+            if (targetVar == null)
+                continue;
+            return isJaccardFuncCompatible(optFuncExpr.getFuncExpr().getArguments().get(i).getValue(), optFuncExpr
+                    .getFieldType(i).getTypeTag(), index.getIndexType());
+        }
+
+        return false;
+    }
+
+    private ScalarFunctionCallExpression findTokensFunc(FunctionIdentifier funcId, IOptimizableFuncExpr optFuncExpr,
+            int subTreeIndex) {
+        //find either a gram-tokens or a word-tokens function that exists in optFuncExpr.subTrees' assignsAndUnnests
+        OptimizableOperatorSubTree subTree = null;
+        LogicalVariable targetVar = null;
+
+        subTree = optFuncExpr.getOperatorSubTree(subTreeIndex);
+        if (subTree == null) {
+            return null;
+        }
+
+        targetVar = optFuncExpr.getLogicalVar(subTreeIndex);
+        if (targetVar == null) {
+            return null;
+        }
+
+        for (AbstractLogicalOperator op : subTree.assignsAndUnnests) {
+            if (op.getOperatorTag() != LogicalOperatorTag.ASSIGN)
+                continue;
+            List<Mutable<ILogicalExpression>> exprList = ((AssignOperator) op).getExpressions();
+            for (Mutable<ILogicalExpression> expr : exprList) {
+                if (expr.getValue().getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL)
+                    continue;
+                AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr.getValue();
+                if (funcExpr.getFunctionIdentifier() != funcId)
+                    continue;
+                ILogicalExpression varExpr = funcExpr.getArguments().get(0).getValue();
+                if (varExpr.getExpressionTag() != LogicalExpressionTag.VARIABLE)
+                    continue;
+                if (((VariableReferenceExpression) varExpr).getVariableReference() == targetVar)
+                    continue;
+                return (ScalarFunctionCallExpression) funcExpr;
+            }
+        }
+        return null;
+    }
+
+    private boolean isJaccardFuncCompatible(ILogicalExpression nonConstArg, ATypeTag typeTag, IndexType indexType) {
+        if (nonConstArg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+            AbstractFunctionCallExpression nonConstfuncExpr = (AbstractFunctionCallExpression) nonConstArg;
+            // We can use this index if the tokenization function matches the index type.
+            if (nonConstfuncExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.WORD_TOKENS
+                    && (indexType == IndexType.SINGLE_PARTITION_WORD_INVIX || indexType == IndexType.LENGTH_PARTITIONED_WORD_INVIX)) {
+                return true;
+            }
+            if (nonConstfuncExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.GRAM_TOKENS
+                    && (indexType == IndexType.SINGLE_PARTITION_NGRAM_INVIX || indexType == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX)) {
+                return true;
+            }
+        }
+
+        if (nonConstArg.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+            if ((typeTag == ATypeTag.ORDEREDLIST || typeTag == ATypeTag.UNORDEREDLIST)
+                    && (indexType == IndexType.SINGLE_PARTITION_WORD_INVIX || indexType == IndexType.LENGTH_PARTITIONED_WORD_INVIX)) {
+                return true;
+            }
+            // We assume that the given list variable doesn't have ngram list in it since it is unrealistic.
+        }
+        return false;
+    }
+
+    private boolean isContainsFuncOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) {
+        if (optFuncExpr.getNumLogicalVars() == 2) {
+            return isContainsFuncJoinOptimizable(index, optFuncExpr);
+        } else {
+            return isContainsFuncSelectOptimizable(index, optFuncExpr);
+        }
+    }
+
+    private boolean isContainsFuncSelectOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) {
+        AsterixConstantValue strConstVal = (AsterixConstantValue) optFuncExpr.getConstantVal(0);
+        IAObject strObj = strConstVal.getObject();
+        ATypeTag typeTag = strObj.getType().getTypeTag();
+
+        if (!isContainsFuncCompatible(typeTag, index.getIndexType())) {
+            return false;
+        }
+
+        // Check that the constant search string has at least gramLength characters.
+        if (strObj.getType().getTypeTag() == ATypeTag.STRING) {
+            AString astr = (AString) strObj;
+            if (astr.getStringValue().length() >= index.getGramLength()) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    private boolean isContainsFuncJoinOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) {
+        if (index.isEnforcingKeyFileds())
+            return isContainsFuncCompatible(index.getKeyFieldTypes().get(0).getTypeTag(), index.getIndexType());
+        else
+            return isContainsFuncCompatible(optFuncExpr.getFieldType(0).getTypeTag(), index.getIndexType());
+    }
+
+    private boolean isContainsFuncCompatible(ATypeTag typeTag, IndexType indexType) {
+        //We can only optimize contains with ngram indexes.
+        if ((typeTag == ATypeTag.STRING)
+                && (indexType == IndexType.SINGLE_PARTITION_NGRAM_INVIX || indexType == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX)) {
+            return true;
+        }
+        return false;
+    }
+
+    public static IBinaryTokenizerFactory getBinaryTokenizerFactory(SearchModifierType searchModifierType,
+            ATypeTag searchKeyType, Index index) throws AlgebricksException {
+        switch (index.getIndexType()) {
+            case SINGLE_PARTITION_WORD_INVIX:
+            case LENGTH_PARTITIONED_WORD_INVIX: {
+                return AqlBinaryTokenizerFactoryProvider.INSTANCE.getWordTokenizerFactory(searchKeyType, false);
+            }
+            case SINGLE_PARTITION_NGRAM_INVIX:
+            case LENGTH_PARTITIONED_NGRAM_INVIX: {
+                // Make sure not to use pre- and postfixing for conjunctive searches.
+                boolean prePost = (searchModifierType == SearchModifierType.CONJUNCTIVE || searchModifierType == SearchModifierType.CONJUNCTIVE_EDIT_DISTANCE) ? false
+                        : true;
+                return AqlBinaryTokenizerFactoryProvider.INSTANCE.getNGramTokenizerFactory(searchKeyType,
+                        index.getGramLength(), prePost, false);
+            }
+            default: {
+                throw new AlgebricksException("Tokenizer not applicable to index kind '" + index.getIndexType() + "'.");
+            }
+        }
+    }
+
+    public static IInvertedIndexSearchModifierFactory getSearchModifierFactory(SearchModifierType searchModifierType,
+            IAObject simThresh, Index index) throws AlgebricksException {
+        switch (searchModifierType) {
+            case CONJUNCTIVE: {
+                return new ConjunctiveSearchModifierFactory();
+            }
+            case JACCARD: {
+                float jaccThresh = ((AFloat) simThresh).getFloatValue();
+                return new JaccardSearchModifierFactory(jaccThresh);
+            }
+            case EDIT_DISTANCE:
+            case CONJUNCTIVE_EDIT_DISTANCE: {
+                int edThresh = 0;
+                try {
+                    edThresh = ((AInt32) ATypeHierarchy.convertNumericTypeObject(simThresh, ATypeTag.INT32))
+                            .getIntegerValue();
+                } catch (AsterixException e) {
+                    throw new AlgebricksException(e);
+                }
+
+                switch (index.getIndexType()) {
+                    case SINGLE_PARTITION_NGRAM_INVIX:
+                    case LENGTH_PARTITIONED_NGRAM_INVIX: {
+                        // Edit distance on strings, filtered with overlapping grams.
+                        if (searchModifierType == SearchModifierType.EDIT_DISTANCE) {
+                            return new EditDistanceSearchModifierFactory(index.getGramLength(), edThresh);
+                        } else {
+                            return new ConjunctiveEditDistanceSearchModifierFactory(index.getGramLength(), edThresh);
+                        }
+                    }
+                    case SINGLE_PARTITION_WORD_INVIX:
+                    case LENGTH_PARTITIONED_WORD_INVIX: {
+                        // Edit distance on two lists. The list-elements are non-overlapping.
+                        if (searchModifierType == SearchModifierType.EDIT_DISTANCE) {
+                            return new ListEditDistanceSearchModifierFactory(edThresh);
+                        } else {
+                            return new ConjunctiveListEditDistanceSearchModifierFactory(edThresh);
+                        }
+                    }
+                    default: {
+                        throw new AlgebricksException("Incompatible search modifier '" + searchModifierType
+                                + "' for index type '" + index.getIndexType() + "'");
+                    }
+                }
+            }
+            default: {
+                throw new AlgebricksException("Unknown search modifier type '" + searchModifierType + "'.");
+            }
+        }
+    }
+
+    private void inferTypes(ILogicalOperator op, IOptimizationContext context) throws AlgebricksException {
+        for (Mutable<ILogicalOperator> childOpRef : op.getInputs()) {
+            inferTypes(childOpRef.getValue(), context);
+        }
+        context.computeAndSetTypeEnvironmentForOperator(op);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/InvertedIndexJobGenParams.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/InvertedIndexJobGenParams.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/InvertedIndexJobGenParams.java
new file mode 100644
index 0000000..2bb9641
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/InvertedIndexJobGenParams.java
@@ -0,0 +1,125 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules.am;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.optimizer.rules.am.InvertedIndexAccessMethod.SearchModifierType;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IAlgebricksConstantValue;
+
+/**
+ * Helper class for reading and writing job-gen parameters for RTree access methods to
+ * and from a list of function arguments, typically of an unnest-map.
+ */
+public class InvertedIndexJobGenParams extends AccessMethodJobGenParams {
+
+    protected SearchModifierType searchModifierType;
+    protected IAlgebricksConstantValue similarityThreshold;
+    protected ATypeTag searchKeyType;
+    protected List<LogicalVariable> keyVarList;
+    protected List<LogicalVariable> nonKeyVarList;
+
+    public InvertedIndexJobGenParams() {
+    }
+
+    public InvertedIndexJobGenParams(String indexName, IndexType indexType, String dataverseName, String datasetName,
+            boolean retainInput, boolean retainNull, boolean requiresBroadcast) {
+        super(indexName, indexType, dataverseName, datasetName, retainInput, retainNull, requiresBroadcast);
+    }
+
+    public void setSearchModifierType(SearchModifierType searchModifierType) {
+        this.searchModifierType = searchModifierType;
+    }
+
+    public void setSimilarityThreshold(IAlgebricksConstantValue similarityThreshold) {
+        this.similarityThreshold = similarityThreshold;
+    }
+
+    public void setSearchKeyType(ATypeTag searchKeyType) {
+        this.searchKeyType = searchKeyType;
+    }
+
+    public void setKeyVarList(List<LogicalVariable> keyVarList) {
+        this.keyVarList = keyVarList;
+    }
+
+    public void writeToFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
+        super.writeToFuncArgs(funcArgs);
+        // Write search modifier type.
+        funcArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils.createInt32Constant(searchModifierType
+                .ordinal())));
+        // Write similarity threshold.
+        funcArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(similarityThreshold)));
+        // Write search key type.
+        funcArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils.createInt32Constant(searchKeyType
+                .ordinal())));
+        // Write key var list.
+        writeVarList(keyVarList, funcArgs);
+        // Write non-key var list.
+        if (nonKeyVarList != null) {
+            writeVarList(nonKeyVarList, funcArgs);
+        }
+    }
+
+    public void readFromFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
+        super.readFromFuncArgs(funcArgs);
+        int index = super.getNumParams();
+        // Read search modifier type.
+        int searchModifierOrdinal = AccessMethodUtils.getInt32Constant(funcArgs.get(index));
+        searchModifierType = SearchModifierType.values()[searchModifierOrdinal];
+        // Read similarity threshold. Concrete type depends on search modifier.
+        similarityThreshold = ((AsterixConstantValue) ((ConstantExpression) funcArgs.get(index + 1).getValue())
+                .getValue());
+        // Read type of search key.
+        int typeTagOrdinal = AccessMethodUtils.getInt32Constant(funcArgs.get(index + 2));
+        searchKeyType = ATypeTag.values()[typeTagOrdinal];
+        // Read key var list.
+        keyVarList = new ArrayList<LogicalVariable>();
+        readVarList(funcArgs, index + 3, keyVarList);
+        // TODO: We could possibly simplify things if we did read the non-key var list here.
+        // We don't need to read the non-key var list.
+        nonKeyVarList = null;
+    }
+
+    public SearchModifierType getSearchModifierType() {
+        return searchModifierType;
+    }
+
+    public IAlgebricksConstantValue getSimilarityThreshold() {
+        return similarityThreshold;
+    }
+
+    public ATypeTag getSearchKeyType() {
+        return searchKeyType;
+    }
+
+    public List<LogicalVariable> getKeyVarList() {
+        return keyVarList;
+    }
+
+    public List<LogicalVariable> getNonKeyVarList() {
+        return nonKeyVarList;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableFuncExpr.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableFuncExpr.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableFuncExpr.java
new file mode 100644
index 0000000..340bf3a
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableFuncExpr.java
@@ -0,0 +1,215 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules.am;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IAlgebricksConstantValue;
+
+/**
+ * General-purpose implementation of IOptimizableFuncExpr that supports any
+ * number of constant args, variable args and field names.
+ */
+public class OptimizableFuncExpr implements IOptimizableFuncExpr {
+    protected final AbstractFunctionCallExpression funcExpr;
+    protected final LogicalVariable[] logicalVars;
+    protected final LogicalVariable[] sourceVars;
+    protected final ILogicalExpression[] logicalExprs;
+    protected final List<List<String>> fieldNames;
+    protected final IAType[] fieldTypes;
+    protected final OptimizableOperatorSubTree[] subTrees;
+    protected final IAlgebricksConstantValue[] constantVals;
+    protected boolean partialField;
+
+    public OptimizableFuncExpr(AbstractFunctionCallExpression funcExpr, LogicalVariable[] logicalVars,
+            IAlgebricksConstantValue[] constantVals) {
+        this.funcExpr = funcExpr;
+        this.logicalVars = logicalVars;
+        this.sourceVars = new LogicalVariable[logicalVars.length];
+        this.logicalExprs = new ILogicalExpression[logicalVars.length];
+        this.constantVals = constantVals;
+        this.fieldNames = new ArrayList<List<String>>();
+        for (int i = 0; i < logicalVars.length; i++) {
+            fieldNames.add(new ArrayList<String>());
+        }
+        this.fieldTypes = new IAType[logicalVars.length];
+        this.subTrees = new OptimizableOperatorSubTree[logicalVars.length];
+
+        if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CONTAINS) {
+            this.partialField = true;
+        } else {
+            this.partialField = false;
+        }
+    }
+
+    // Special, more convenient c'tor for simple binary functions.
+    public OptimizableFuncExpr(AbstractFunctionCallExpression funcExpr, LogicalVariable logicalVar,
+            IAlgebricksConstantValue constantVal) {
+        this.funcExpr = funcExpr;
+        this.logicalVars = new LogicalVariable[] { logicalVar };
+        this.sourceVars = new LogicalVariable[1];
+        this.logicalExprs = new ILogicalExpression[1];
+        this.constantVals = new IAlgebricksConstantValue[] { constantVal };
+        this.fieldNames = new ArrayList<List<String>>();
+        for (int i = 0; i < logicalVars.length; i++) {
+            fieldNames.add(new ArrayList<String>());
+        }
+        this.fieldTypes = new IAType[logicalVars.length];
+        this.subTrees = new OptimizableOperatorSubTree[logicalVars.length];
+        if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CONTAINS) {
+            this.partialField = true;
+        } else {
+            this.partialField = false;
+        }
+    }
+
+    @Override
+    public AbstractFunctionCallExpression getFuncExpr() {
+        return funcExpr;
+    }
+
+    @Override
+    public int getNumLogicalVars() {
+        return logicalVars.length;
+    }
+
+    @Override
+    public int getNumConstantVals() {
+        return constantVals.length;
+    }
+
+    @Override
+    public LogicalVariable getLogicalVar(int index) {
+        return logicalVars[index];
+    }
+
+    @Override
+    public void setLogicalExpr(int index, ILogicalExpression logExpr) {
+        logicalExprs[index] = logExpr;
+    }
+
+    @Override
+    public ILogicalExpression getLogicalExpr(int index) {
+        return logicalExprs[index];
+    }
+
+    @Override
+    public void setFieldName(int index, List<String> fieldName) {
+        fieldNames.set(index, fieldName);
+    }
+
+    @Override
+    public List<String> getFieldName(int index) {
+        return fieldNames.get(index);
+    }
+
+    @Override
+    public void setFieldType(int index, IAType fieldType) {
+        fieldTypes[index] = fieldType;
+    }
+
+    @Override
+    public IAType getFieldType(int index) {
+        return fieldTypes[index];
+    }
+
+    @Override
+    public IAlgebricksConstantValue getConstantVal(int index) {
+        return constantVals[index];
+    }
+
+    @Override
+    public int findLogicalVar(LogicalVariable var) {
+        for (int i = 0; i < logicalVars.length; i++) {
+            if (var == logicalVars[i]) {
+                return i;
+            }
+        }
+        return -1;
+    }
+
+    @Override
+    public int findFieldName(List<String> fieldName) {
+        for (int i = 0; i < fieldNames.size(); i++) {
+            if (fieldName.equals(fieldNames.get(i))) {
+                return i;
+            }
+        }
+        return -1;
+    }
+
+    @Override
+    public void setOptimizableSubTree(int index, OptimizableOperatorSubTree subTree) {
+        subTrees[index] = subTree;
+    }
+
+    @Override
+    public OptimizableOperatorSubTree getOperatorSubTree(int index) {
+        return subTrees[index];
+    }
+
+    @Override
+    public void substituteVar(LogicalVariable original, LogicalVariable substitution) {
+        if (logicalVars != null) {
+            for (int i = 0; i < logicalVars.length; i++) {
+                if (logicalVars[i] == original) {
+                    logicalVars[i] = substitution;
+                    break;
+                }
+            }
+        }
+    }
+
+    @Override
+    public void setPartialField(boolean partialField) {
+        this.partialField = partialField;
+    }
+
+    @Override
+    public boolean containsPartialField() {
+        return partialField;
+    }
+
+    @Override
+    public int hashCode() {
+        return funcExpr.hashCode();
+
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (!(o instanceof OptimizableFuncExpr))
+            return false;
+        else
+            return funcExpr.equals(o);
+
+    }
+
+    public void setSourceVar(int index, LogicalVariable var) {
+        sourceVars[index] = var;
+    }
+
+    @Override
+    public LogicalVariable getSourceVar(int index) {
+        return sourceVars[index];
+    }
+
+}


[17/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/drivers/AsterixCLI.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/drivers/AsterixCLI.java b/asterix-app/src/main/java/edu/uci/ics/asterix/drivers/AsterixCLI.java
deleted file mode 100644
index 69b0805..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/drivers/AsterixCLI.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.drivers;
-
-import java.io.File;
-import java.io.FileReader;
-import java.io.Reader;
-import java.util.List;
-
-import org.apache.commons.io.FileUtils;
-import org.kohsuke.args4j.Argument;
-import org.kohsuke.args4j.CmdLineParser;
-import org.kohsuke.args4j.Option;
-
-import edu.uci.ics.asterix.api.common.AsterixHyracksIntegrationUtil;
-import edu.uci.ics.asterix.api.java.AsterixJavaClient;
-import edu.uci.ics.asterix.common.config.GlobalConfig;
-
-public class AsterixCLI {
-    private static class Options {
-        @Option(name = "-properties", usage = "Name of properties file", required = true)
-        public String properties;
-
-        @Option(name = "-output", usage = "Output folder to place results", required = true)
-        public String outputFolder;
-
-        @Argument(usage = "AQL Files to run", multiValued = true, required = true)
-        public List<String> args;
-    }
-
-    public static void main(String args[]) throws Exception {
-        Options options = new Options();
-        CmdLineParser parser = new CmdLineParser(options);
-        parser.parseArgument(args);
-
-        setUp(options);
-        try {
-            for (String queryFile : options.args) {
-                Reader in = new FileReader(queryFile);
-                AsterixJavaClient ajc = new AsterixJavaClient(
-                        AsterixHyracksIntegrationUtil.getHyracksClientConnection(), in);
-                try {
-                    ajc.compile(true, false, false, false, false, true, false);
-                } finally {
-                    in.close();
-                }
-                ajc.execute();
-            }
-        } finally {
-            tearDown();
-        }
-        System.exit(0);
-    }
-
-    public static void setUp(Options options) throws Exception {
-        System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, options.properties);
-        File outdir = new File(options.outputFolder);
-        outdir.mkdirs();
-
-        File log = new File("asterix_logs");
-        if (log.exists())
-            FileUtils.deleteDirectory(log);
-        File lsn = new File("last_checkpoint_lsn");
-        lsn.deleteOnExit();
-
-        AsterixHyracksIntegrationUtil.init();
-    }
-
-    public static void tearDown() throws Exception {
-        AsterixHyracksIntegrationUtil.deinit();
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/drivers/AsterixClientDriver.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/drivers/AsterixClientDriver.java b/asterix-app/src/main/java/edu/uci/ics/asterix/drivers/AsterixClientDriver.java
deleted file mode 100644
index 1a82941..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/drivers/AsterixClientDriver.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.drivers;
-
-import java.io.FileReader;
-
-import org.kohsuke.args4j.CmdLineParser;
-
-import edu.uci.ics.asterix.api.common.AsterixClientConfig;
-import edu.uci.ics.asterix.api.java.AsterixJavaClient;
-import edu.uci.ics.hyracks.api.client.HyracksConnection;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-
-public class AsterixClientDriver {
-
-    public static void main(String args[]) throws Exception {
-        AsterixClientConfig acc = new AsterixClientConfig();
-        CmdLineParser cmdParser = new CmdLineParser(acc);
-        try {
-            cmdParser.parseArgument(args);
-        } catch (Exception e) {
-            cmdParser.printUsage(System.err);
-            throw e;
-        }
-
-        if (acc.getArguments().isEmpty()) {
-            System.err.println("Please specify the file containing the query.");
-            return;
-        }
-        if (acc.getArguments().size() > 1) {
-            System.err.print("Too many arguments. ");
-            System.err.println("Only the file contained the query needs to be specified.");
-            return;
-        }
-        boolean exec = new Boolean(acc.execute);
-        IHyracksClientConnection hcc = exec ? new HyracksConnection("localhost", acc.hyracksPort) : null;
-        AsterixJavaClient q = compileQuery(hcc, acc.getArguments().get(0), new Boolean(acc.optimize), new Boolean(
-                acc.onlyPhysical), exec || new Boolean(acc.hyracksJob));
-        if (exec) {
-            q.execute();
-        }
-    }
-
-    private static AsterixJavaClient compileQuery(IHyracksClientConnection hcc, String filename, boolean optimize,
-            boolean onlyPhysical, boolean createBinaryRuntime) throws Exception {
-        FileReader reader = new FileReader(filename);
-        AsterixJavaClient q = new AsterixJavaClient(hcc, reader);
-        q.compile(optimize, true, true, true, onlyPhysical, createBinaryRuntime, createBinaryRuntime);
-        return q;
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/drivers/AsterixWebServer.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/drivers/AsterixWebServer.java b/asterix-app/src/main/java/edu/uci/ics/asterix/drivers/AsterixWebServer.java
deleted file mode 100644
index a1204b1..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/drivers/AsterixWebServer.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.drivers;
-
-import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.servlet.ServletContextHandler;
-import org.eclipse.jetty.servlet.ServletHolder;
-
-import edu.uci.ics.asterix.api.http.servlet.APIServlet;
-
-public class AsterixWebServer {
-    public static void main(String args[]) throws Exception {
-        Server server = new Server(8080);
-        ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
-        context.setContextPath("/");
-        server.setHandler(context);
-
-        context.addServlet(new ServletHolder(new APIServlet()), "/*");
-        server.start();
-        server.join();
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/CentralFeedManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/CentralFeedManager.java b/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/CentralFeedManager.java
deleted file mode 100644
index 7bd19dd..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/CentralFeedManager.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Copyright 2009-2014 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feeds;
-
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.io.StringReader;
-import java.util.List;
-
-import edu.uci.ics.asterix.api.common.SessionConfig;
-import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.parser.AQLParser;
-import edu.uci.ics.asterix.aql.translator.AqlTranslator;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.feeds.api.ICentralFeedManager;
-import edu.uci.ics.asterix.common.feeds.api.IFeedLoadManager;
-import edu.uci.ics.asterix.common.feeds.api.IFeedTrackingManager;
-import edu.uci.ics.asterix.metadata.feeds.SocketMessageListener;
-import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-
-public class CentralFeedManager implements ICentralFeedManager {
-
-    private static final ICentralFeedManager centralFeedManager = new CentralFeedManager();
-
-    public static ICentralFeedManager getInstance() {
-        return centralFeedManager;
-    }
-
-    private final int port;
-    private final IFeedLoadManager feedLoadManager;
-    private final IFeedTrackingManager feedTrackingManager;
-    private final SocketMessageListener messageListener;
-
-    private CentralFeedManager() {
-        this.port = AsterixAppContextInfo.getInstance().getFeedProperties().getFeedCentralManagerPort();
-        this.feedLoadManager = new FeedLoadManager();
-        this.feedTrackingManager = new FeedTrackingManager();
-        this.messageListener = new SocketMessageListener(port, new FeedMessageReceiver(this));
-    }
-
-    @Override
-    public void start() throws AsterixException {
-        messageListener.start();
-    }
-
-    @Override
-    public void stop() throws AsterixException, IOException {
-        messageListener.stop();
-    }
-
-    public static JobId runJob(JobSpecification spec, boolean waitForCompletion) throws Exception {
-        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
-        JobId jobId = hcc.startJob(spec);
-        if (waitForCompletion) {
-            hcc.waitForCompletion(jobId);
-        }
-        return jobId;
-    }
-
-    @Override
-    public IFeedLoadManager getFeedLoadManager() {
-        return feedLoadManager;
-    }
-
-    @Override
-    public IFeedTrackingManager getFeedTrackingManager() {
-        return feedTrackingManager;
-    }
-
-    public static class AQLExecutor {
-
-        private static final PrintWriter out = new PrintWriter(System.out, true);
-
-        public static void executeAQL(String aql) throws Exception {
-            AQLParser parser = new AQLParser(new StringReader(aql));
-            List<Statement> statements;
-            statements = parser.Statement();
-            SessionConfig pc = new SessionConfig(out, OutputFormat.ADM);
-            AqlTranslator translator = new AqlTranslator(statements, pc);
-            translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null, AqlTranslator.ResultDelivery.SYNC);
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedCollectInfo.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedCollectInfo.java b/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedCollectInfo.java
deleted file mode 100644
index 35b8378..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedCollectInfo.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feeds;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
-import edu.uci.ics.asterix.common.feeds.FeedId;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-
-public class FeedCollectInfo extends FeedInfo {
-    public FeedId sourceFeedId;
-    public FeedConnectionId feedConnectionId;
-    public List<String> collectLocations = new ArrayList<String>();
-    public List<String> computeLocations = new ArrayList<String>();
-    public List<String> storageLocations = new ArrayList<String>();
-    public Map<String, String> feedPolicy;
-    public String superFeedManagerHost;
-    public int superFeedManagerPort;
-    public boolean fullyConnected;
-
-    public FeedCollectInfo(FeedId sourceFeedId, FeedConnectionId feedConnectionId, JobSpecification jobSpec,
-            JobId jobId, Map<String, String> feedPolicy) {
-        super(jobSpec, jobId, FeedInfoType.COLLECT);
-        this.sourceFeedId = sourceFeedId;
-        this.feedConnectionId = feedConnectionId;
-        this.feedPolicy = feedPolicy;
-        this.fullyConnected = true;
-    }
-
-    @Override
-    public String toString() {
-        return FeedInfoType.COLLECT + "[" + feedConnectionId + "]";
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedInfo.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedInfo.java b/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedInfo.java
deleted file mode 100644
index afc6ccb..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedInfo.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feeds;
-
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.api.job.JobInfo;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-
-public class FeedInfo {
-    public JobSpecification jobSpec;
-    public JobInfo jobInfo;
-    public JobId jobId;
-    public FeedInfoType infoType;
-    public State state;
-
-    public enum State {
-        ACTIVE,
-        INACTIVE
-    }
-
-    public enum FeedInfoType {
-        INTAKE,
-        COLLECT
-    }
-
-    public FeedInfo(JobSpecification jobSpec, JobId jobId, FeedInfoType infoType) {
-        this.jobSpec = jobSpec;
-        this.jobId = jobId;
-        this.infoType = infoType;
-        this.state = State.INACTIVE;
-    }
-
-    @Override
-    public String toString() {
-        return " job id " + jobId;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedJobNotificationHandler.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedJobNotificationHandler.java b/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedJobNotificationHandler.java
deleted file mode 100644
index b045eae..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedJobNotificationHandler.java
+++ /dev/null
@@ -1,739 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feeds;
-
-import java.rmi.RemoteException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.commons.lang3.StringUtils;
-
-import edu.uci.ics.asterix.api.common.FeedWorkCollection.SubscribeFeedWork;
-import edu.uci.ics.asterix.common.exceptions.ACIDException;
-import edu.uci.ics.asterix.common.feeds.FeedActivity;
-import edu.uci.ics.asterix.common.feeds.FeedConnectJobInfo;
-import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
-import edu.uci.ics.asterix.common.feeds.FeedConnectionRequest;
-import edu.uci.ics.asterix.common.feeds.FeedId;
-import edu.uci.ics.asterix.common.feeds.FeedIntakeInfo;
-import edu.uci.ics.asterix.common.feeds.FeedJobInfo;
-import edu.uci.ics.asterix.common.feeds.FeedJobInfo.FeedJobState;
-import edu.uci.ics.asterix.common.feeds.FeedJobInfo.JobType;
-import edu.uci.ics.asterix.common.feeds.FeedJointKey;
-import edu.uci.ics.asterix.common.feeds.FeedPolicyAccessor;
-import edu.uci.ics.asterix.common.feeds.api.IFeedJoint;
-import edu.uci.ics.asterix.common.feeds.api.IFeedJoint.State;
-import edu.uci.ics.asterix.common.feeds.api.IFeedLifecycleEventSubscriber;
-import edu.uci.ics.asterix.common.feeds.api.IFeedLifecycleEventSubscriber.FeedLifecycleEvent;
-import edu.uci.ics.asterix.common.feeds.api.IIntakeProgressTracker;
-import edu.uci.ics.asterix.common.feeds.message.StorageReportFeedMessage;
-import edu.uci.ics.asterix.feeds.FeedLifecycleListener.Message;
-import edu.uci.ics.asterix.metadata.feeds.BuiltinFeedPolicies;
-import edu.uci.ics.asterix.metadata.feeds.FeedCollectOperatorDescriptor;
-import edu.uci.ics.asterix.metadata.feeds.FeedIntakeOperatorDescriptor;
-import edu.uci.ics.asterix.metadata.feeds.FeedMetaOperatorDescriptor;
-import edu.uci.ics.asterix.metadata.feeds.FeedWorkManager;
-import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
-import edu.uci.ics.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.OperatorDescriptorId;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.api.job.JobInfo;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.api.job.JobStatus;
-import edu.uci.ics.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexInsertUpdateDeleteOperatorDescriptor;
-
-public class FeedJobNotificationHandler implements Runnable {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedJobNotificationHandler.class.getName());
-
-    private final LinkedBlockingQueue<Message> inbox;
-    private final Map<FeedConnectionId, List<IFeedLifecycleEventSubscriber>> eventSubscribers;
-
-    private final Map<JobId, FeedJobInfo> jobInfos;
-    private final Map<FeedId, FeedIntakeInfo> intakeJobInfos;
-    private final Map<FeedConnectionId, FeedConnectJobInfo> connectJobInfos;
-    private final Map<FeedId, List<IFeedJoint>> feedPipeline;
-    private final Map<FeedConnectionId, Pair<IIntakeProgressTracker, Long>> feedIntakeProgressTrackers;
-
-    public FeedJobNotificationHandler(LinkedBlockingQueue<Message> inbox) {
-        this.inbox = inbox;
-        this.jobInfos = new HashMap<JobId, FeedJobInfo>();
-        this.intakeJobInfos = new HashMap<FeedId, FeedIntakeInfo>();
-        this.connectJobInfos = new HashMap<FeedConnectionId, FeedConnectJobInfo>();
-        this.feedPipeline = new HashMap<FeedId, List<IFeedJoint>>();
-        this.eventSubscribers = new HashMap<FeedConnectionId, List<IFeedLifecycleEventSubscriber>>();
-        this.feedIntakeProgressTrackers = new HashMap<FeedConnectionId, Pair<IIntakeProgressTracker, Long>>();
-    }
-
-    @Override
-    public void run() {
-        Message mesg;
-        while (true) {
-            try {
-                mesg = inbox.take();
-                switch (mesg.messageKind) {
-                    case JOB_START:
-                        handleJobStartMessage(mesg);
-                        break;
-                    case JOB_FINISH:
-                        handleJobFinishMessage(mesg);
-                        break;
-                }
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-
-        }
-    }
-
-    public void registerFeedIntakeProgressTracker(FeedConnectionId connectionId,
-            IIntakeProgressTracker feedIntakeProgressTracker) {
-        if (feedIntakeProgressTrackers.get(connectionId) == null) {
-            this.feedIntakeProgressTrackers.put(connectionId, new Pair<IIntakeProgressTracker, Long>(
-                    feedIntakeProgressTracker, 0L));
-        } else {
-            throw new IllegalStateException(" Progress tracker for connection " + connectionId
-                    + " is alreader registered");
-        }
-    }
-
-    public void deregisterFeedIntakeProgressTracker(FeedConnectionId connectionId) {
-        this.feedIntakeProgressTrackers.remove(connectionId);
-    }
-
-    public void updateTrackingInformation(StorageReportFeedMessage srm) {
-        Pair<IIntakeProgressTracker, Long> p = feedIntakeProgressTrackers.get(srm.getConnectionId());
-        if (p != null && p.second < srm.getLastPersistedTupleIntakeTimestamp()) {
-            p.second = srm.getLastPersistedTupleIntakeTimestamp();
-            p.first.notifyIngestedTupleTimestamp(p.second);
-        }
-    }
-
-    public Collection<FeedIntakeInfo> getFeedIntakeInfos() {
-        return intakeJobInfos.values();
-    }
-
-    public Collection<FeedConnectJobInfo> getFeedConnectInfos() {
-        return connectJobInfos.values();
-    }
-
-    public void registerFeedJoint(IFeedJoint feedJoint) {
-        List<IFeedJoint> feedJointsOnPipeline = feedPipeline.get(feedJoint.getOwnerFeedId());
-        if (feedJointsOnPipeline == null) {
-            feedJointsOnPipeline = new ArrayList<IFeedJoint>();
-            feedPipeline.put(feedJoint.getOwnerFeedId(), feedJointsOnPipeline);
-            feedJointsOnPipeline.add(feedJoint);
-        } else {
-            if (!feedJointsOnPipeline.contains(feedJoint)) {
-                feedJointsOnPipeline.add(feedJoint);
-            } else {
-                throw new IllegalArgumentException("Feed joint " + feedJoint + " already registered");
-            }
-        }
-    }
-
-    public void registerFeedIntakeJob(FeedId feedId, JobId jobId, JobSpecification jobSpec) throws HyracksDataException {
-        if (jobInfos.get(jobId) != null) {
-            throw new IllegalStateException("Feed job already registered");
-        }
-
-        List<IFeedJoint> joints = feedPipeline.get(feedId);
-        IFeedJoint intakeJoint = null;
-        for (IFeedJoint joint : joints) {
-            if (joint.getType().equals(IFeedJoint.FeedJointType.INTAKE)) {
-                intakeJoint = joint;
-                break;
-            }
-        }
-
-        if (intakeJoint != null) {
-            FeedIntakeInfo intakeJobInfo = new FeedIntakeInfo(jobId, FeedJobState.CREATED, FeedJobInfo.JobType.INTAKE,
-                    feedId, intakeJoint, jobSpec);
-            intakeJobInfos.put(feedId, intakeJobInfo);
-            jobInfos.put(jobId, intakeJobInfo);
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Registered feed intake [" + jobId + "]" + " for feed " + feedId);
-            }
-        } else {
-            throw new HyracksDataException("Could not register feed intake job [" + jobId + "]" + " for feed  "
-                    + feedId);
-        }
-    }
-
-    public void registerFeedCollectionJob(FeedId sourceFeedId, FeedConnectionId connectionId, JobId jobId,
-            JobSpecification jobSpec, Map<String, String> feedPolicy) {
-        if (jobInfos.get(jobId) != null) {
-            throw new IllegalStateException("Feed job already registered");
-        }
-
-        List<IFeedJoint> feedJoints = feedPipeline.get(sourceFeedId);
-        FeedConnectionId cid = null;
-        IFeedJoint sourceFeedJoint = null;
-        for (IFeedJoint joint : feedJoints) {
-            cid = joint.getReceiver(connectionId);
-            if (cid != null) {
-                sourceFeedJoint = joint;
-                break;
-            }
-        }
-
-        if (cid != null) {
-            FeedConnectJobInfo cInfo = new FeedConnectJobInfo(jobId, FeedJobState.CREATED, connectionId,
-                    sourceFeedJoint, null, jobSpec, feedPolicy);
-            jobInfos.put(jobId, cInfo);
-            connectJobInfos.put(connectionId, cInfo);
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Registered feed connection [" + jobId + "]" + " for feed " + connectionId);
-            }
-        } else {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Could not register feed collection job [" + jobId + "]" + " for feed connection "
-                        + connectionId);
-            }
-        }
-
-    }
-
-    public void deregisterFeedIntakeJob(JobId jobId) {
-        if (jobInfos.get(jobId) == null) {
-            throw new IllegalStateException(" Feed Intake job not registered ");
-        }
-
-        FeedIntakeInfo info = (FeedIntakeInfo) jobInfos.get(jobId);
-        jobInfos.remove(jobId);
-        intakeJobInfos.remove(info.getFeedId());
-
-        if (!info.getState().equals(FeedJobState.UNDER_RECOVERY)) {
-            List<IFeedJoint> joints = feedPipeline.get(info.getFeedId());
-            joints.remove(info.getIntakeFeedJoint());
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Deregistered feed intake job [" + jobId + "]");
-            }
-        } else {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Not removing feed joint as intake job is in " + FeedJobState.UNDER_RECOVERY + " state.");
-            }
-        }
-
-    }
-
-    private void handleJobStartMessage(Message message) throws Exception {
-        FeedJobInfo jobInfo = jobInfos.get(message.jobId);
-        switch (jobInfo.getJobType()) {
-            case INTAKE:
-                handleIntakeJobStartMessage((FeedIntakeInfo) jobInfo);
-                break;
-            case FEED_CONNECT:
-                handleCollectJobStartMessage((FeedConnectJobInfo) jobInfo);
-                break;
-        }
-
-    }
-
-    private void handleJobFinishMessage(Message message) throws Exception {
-        FeedJobInfo jobInfo = jobInfos.get(message.jobId);
-        switch (jobInfo.getJobType()) {
-            case INTAKE:
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Intake Job finished for feed intake " + jobInfo.getJobId());
-                }
-                handleFeedIntakeJobFinishMessage((FeedIntakeInfo) jobInfo, message);
-                break;
-            case FEED_CONNECT:
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Collect Job finished for  " + (FeedConnectJobInfo) jobInfo);
-                }
-                handleFeedCollectJobFinishMessage((FeedConnectJobInfo) jobInfo);
-                break;
-        }
-
-    }
-
-    private synchronized void handleIntakeJobStartMessage(FeedIntakeInfo intakeJobInfo) throws Exception {
-        List<OperatorDescriptorId> intakeOperatorIds = new ArrayList<OperatorDescriptorId>();
-        Map<OperatorDescriptorId, IOperatorDescriptor> operators = intakeJobInfo.getSpec().getOperatorMap();
-        for (Entry<OperatorDescriptorId, IOperatorDescriptor> entry : operators.entrySet()) {
-            IOperatorDescriptor opDesc = entry.getValue();
-            if (opDesc instanceof FeedIntakeOperatorDescriptor) {
-                intakeOperatorIds.add(opDesc.getOperatorId());
-            }
-        }
-
-        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
-        JobInfo info = hcc.getJobInfo(intakeJobInfo.getJobId());
-        List<String> intakeLocations = new ArrayList<String>();
-        for (OperatorDescriptorId intakeOperatorId : intakeOperatorIds) {
-            Map<Integer, String> operatorLocations = info.getOperatorLocations().get(intakeOperatorId);
-            int nOperatorInstances = operatorLocations.size();
-            for (int i = 0; i < nOperatorInstances; i++) {
-                intakeLocations.add(operatorLocations.get(i));
-            }
-        }
-        // intakeLocations is an ordered list; element at position i corresponds to location of i'th instance of operator
-        intakeJobInfo.setIntakeLocation(intakeLocations);
-        intakeJobInfo.getIntakeFeedJoint().setState(State.ACTIVE);
-        intakeJobInfo.setState(FeedJobState.ACTIVE);
-
-        // notify event listeners 
-        notifyFeedEventSubscribers(intakeJobInfo, FeedLifecycleEvent.FEED_INTAKE_STARTED);
-    }
-
-    private void handleCollectJobStartMessage(FeedConnectJobInfo cInfo) throws RemoteException, ACIDException {
-        // set locations of feed sub-operations (intake, compute, store)
-        setLocations(cInfo);
-
-        // activate joints
-        List<IFeedJoint> joints = feedPipeline.get(cInfo.getConnectionId().getFeedId());
-        for (IFeedJoint joint : joints) {
-            if (joint.getProvider().equals(cInfo.getConnectionId())) {
-                joint.setState(State.ACTIVE);
-                if (joint.getType().equals(IFeedJoint.FeedJointType.COMPUTE)) {
-                    cInfo.setComputeFeedJoint(joint);
-                }
-            }
-        }
-        cInfo.setState(FeedJobState.ACTIVE);
-
-        // register activity in metadata
-        registerFeedActivity(cInfo);
-        // notify event listeners
-        notifyFeedEventSubscribers(cInfo, FeedLifecycleEvent.FEED_COLLECT_STARTED);
-    }
-
-    private void notifyFeedEventSubscribers(FeedJobInfo jobInfo, FeedLifecycleEvent event) {
-        JobType jobType = jobInfo.getJobType();
-        List<FeedConnectionId> impactedConnections = new ArrayList<FeedConnectionId>();
-        if (jobType.equals(JobType.INTAKE)) {
-            FeedId feedId = ((FeedIntakeInfo) jobInfo).getFeedId();
-            for (FeedConnectionId connId : eventSubscribers.keySet()) {
-                if (connId.getFeedId().equals(feedId)) {
-                    impactedConnections.add(connId);
-                }
-            }
-        } else {
-            impactedConnections.add(((FeedConnectJobInfo) jobInfo).getConnectionId());
-        }
-
-        for (FeedConnectionId connId : impactedConnections) {
-            List<IFeedLifecycleEventSubscriber> subscribers = eventSubscribers.get(connId);
-            if (subscribers != null && !subscribers.isEmpty()) {
-                for (IFeedLifecycleEventSubscriber subscriber : subscribers) {
-                    subscriber.handleFeedEvent(event);
-                }
-            }
-        }
-    }
-
-    public synchronized void submitFeedConnectionRequest(IFeedJoint feedJoint, final FeedConnectionRequest request)
-            throws Exception {
-        List<String> locations = null;
-        switch (feedJoint.getType()) {
-            case INTAKE:
-                FeedIntakeInfo intakeInfo = intakeJobInfos.get(feedJoint.getOwnerFeedId());
-                locations = intakeInfo.getIntakeLocation();
-                break;
-            case COMPUTE:
-                FeedConnectionId connectionId = feedJoint.getProvider();
-                FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
-                locations = cInfo.getComputeLocations();
-                break;
-        }
-
-        SubscribeFeedWork work = new SubscribeFeedWork(locations.toArray(new String[] {}), request);
-        FeedWorkManager.INSTANCE.submitWork(work, new SubscribeFeedWork.FeedSubscribeWorkEventListener());
-    }
-
-    public IFeedJoint getSourceFeedJoint(FeedConnectionId connectionId) {
-        FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
-        if (cInfo != null) {
-            return cInfo.getSourceFeedJoint();
-        }
-        return null;
-    }
-
-    public Set<FeedConnectionId> getActiveFeedConnections() {
-        Set<FeedConnectionId> activeConnections = new HashSet<FeedConnectionId>();
-        for (FeedConnectJobInfo cInfo : connectJobInfos.values()) {
-            if (cInfo.getState().equals(FeedJobState.ACTIVE)) {
-                activeConnections.add(cInfo.getConnectionId());
-            }
-        }
-        return activeConnections;
-    }
-
-    public boolean isFeedConnectionActive(FeedConnectionId connectionId) {
-        FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
-        if (cInfo != null) {
-            return cInfo.getState().equals(FeedJobState.ACTIVE);
-        }
-        return false;
-    }
-
-    public void setJobState(FeedConnectionId connectionId, FeedJobState jobState) {
-        FeedConnectJobInfo connectJobInfo = connectJobInfos.get(connectionId);
-        connectJobInfo.setState(jobState);
-    }
-
-    public FeedJobState getFeedJobState(FeedConnectionId connectionId) {
-        return connectJobInfos.get(connectionId).getState();
-    }
-
-    private void handleFeedIntakeJobFinishMessage(FeedIntakeInfo intakeInfo, Message message) throws Exception {
-        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
-        JobInfo info = hcc.getJobInfo(message.jobId);
-        JobStatus status = info.getStatus();
-        FeedLifecycleEvent event;
-        event = status.equals(JobStatus.FAILURE) ? FeedLifecycleEvent.FEED_INTAKE_FAILURE
-                : FeedLifecycleEvent.FEED_ENDED;
-
-        // remove feed joints
-        deregisterFeedIntakeJob(message.jobId);
-
-        // notify event listeners 
-        notifyFeedEventSubscribers(intakeInfo, event);
-
-    }
-
-    private void handleFeedCollectJobFinishMessage(FeedConnectJobInfo cInfo) throws Exception {
-        FeedConnectionId connectionId = cInfo.getConnectionId();
-
-        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
-        JobInfo info = hcc.getJobInfo(cInfo.getJobId());
-        JobStatus status = info.getStatus();
-        boolean failure = status != null && status.equals(JobStatus.FAILURE);
-        FeedPolicyAccessor fpa = new FeedPolicyAccessor(cInfo.getFeedPolicy());
-
-        boolean removeJobHistory = !failure;
-        boolean retainSubsription = cInfo.getState().equals(FeedJobState.UNDER_RECOVERY)
-                || (failure && fpa.continueOnHardwareFailure());
-
-        if (!retainSubsription) {
-            IFeedJoint feedJoint = cInfo.getSourceFeedJoint();
-            feedJoint.removeReceiver(connectionId);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Subscription " + cInfo.getConnectionId() + " completed successfully. Removed subscription");
-            }
-            removeFeedJointsPostPipelineTermination(cInfo.getConnectionId());
-        }
-
-        if (removeJobHistory) {
-            connectJobInfos.remove(connectionId);
-            jobInfos.remove(cInfo.getJobId());
-            feedIntakeProgressTrackers.remove(cInfo.getConnectionId());
-        }
-        deregisterFeedActivity(cInfo);
-
-        // notify event listeners 
-        FeedLifecycleEvent event = failure ? FeedLifecycleEvent.FEED_COLLECT_FAILURE : FeedLifecycleEvent.FEED_ENDED;
-        notifyFeedEventSubscribers(cInfo, event);
-    }
-
-    private void registerFeedActivity(FeedConnectJobInfo cInfo) {
-        Map<String, String> feedActivityDetails = new HashMap<String, String>();
-
-        if (cInfo.getCollectLocations() != null) {
-            feedActivityDetails.put(FeedActivity.FeedActivityDetails.INTAKE_LOCATIONS,
-                    StringUtils.join(cInfo.getCollectLocations().iterator(), ','));
-        }
-
-        if (cInfo.getComputeLocations() != null) {
-            feedActivityDetails.put(FeedActivity.FeedActivityDetails.COMPUTE_LOCATIONS,
-                    StringUtils.join(cInfo.getComputeLocations().iterator(), ','));
-        }
-
-        if (cInfo.getStorageLocations() != null) {
-            feedActivityDetails.put(FeedActivity.FeedActivityDetails.STORAGE_LOCATIONS,
-                    StringUtils.join(cInfo.getStorageLocations().iterator(), ','));
-        }
-
-        String policyName = cInfo.getFeedPolicy().get(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY);
-        feedActivityDetails.put(FeedActivity.FeedActivityDetails.FEED_POLICY_NAME, policyName);
-
-        feedActivityDetails.put(FeedActivity.FeedActivityDetails.FEED_CONNECT_TIMESTAMP, (new Date()).toString());
-        try {
-            FeedActivity feedActivity = new FeedActivity(cInfo.getConnectionId().getFeedId().getDataverse(), cInfo
-                    .getConnectionId().getFeedId().getFeedName(), cInfo.getConnectionId().getDatasetName(),
-                    feedActivityDetails);
-            CentralFeedManager.getInstance().getFeedLoadManager()
-                    .reportFeedActivity(cInfo.getConnectionId(), feedActivity);
-
-        } catch (Exception e) {
-            e.printStackTrace();
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Unable to register feed activity for " + cInfo + " " + e.getMessage());
-            }
-
-        }
-
-    }
-
-    public void deregisterFeedActivity(FeedConnectJobInfo cInfo) {
-        try {
-            CentralFeedManager.getInstance().getFeedLoadManager().removeFeedActivity(cInfo.getConnectionId());
-        } catch (Exception e) {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Unable to deregister feed activity for " + cInfo + " " + e.getMessage());
-            }
-        }
-    }
-
-    public void removeFeedJointsPostPipelineTermination(FeedConnectionId connectionId) {
-        FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
-        List<IFeedJoint> feedJoints = feedPipeline.get(connectionId.getFeedId());
-
-        IFeedJoint sourceJoint = cInfo.getSourceFeedJoint();
-        List<FeedConnectionId> all = sourceJoint.getReceivers();
-        boolean removeSourceJoint = all.size() < 2;
-        if (removeSourceJoint) {
-            feedJoints.remove(sourceJoint);
-        }
-
-        IFeedJoint computeJoint = cInfo.getComputeFeedJoint();
-        if (computeJoint != null && computeJoint.getReceivers().size() < 2) {
-            feedJoints.remove(computeJoint);
-        }
-    }
-
-    public boolean isRegisteredFeedJob(JobId jobId) {
-        return jobInfos.get(jobId) != null;
-    }
-
-    public List<String> getFeedComputeLocations(FeedId feedId) {
-        List<IFeedJoint> feedJoints = feedPipeline.get(feedId);
-        for (IFeedJoint joint : feedJoints) {
-            if (joint.getFeedJointKey().getFeedId().equals(feedId)) {
-                return connectJobInfos.get(joint.getProvider()).getComputeLocations();
-            }
-        }
-        return null;
-    }
-
-    public List<String> getFeedStorageLocations(FeedConnectionId connectionId) {
-        return connectJobInfos.get(connectionId).getStorageLocations();
-    }
-
-    public List<String> getFeedCollectLocations(FeedConnectionId connectionId) {
-        return connectJobInfos.get(connectionId).getCollectLocations();
-    }
-
-    public List<String> getFeedIntakeLocations(FeedId feedId) {
-        return intakeJobInfos.get(feedId).getIntakeLocation();
-    }
-
-    public JobId getFeedCollectJobId(FeedConnectionId connectionId) {
-        return connectJobInfos.get(connectionId).getJobId();
-    }
-
-    public void registerFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
-        List<IFeedLifecycleEventSubscriber> subscribers = eventSubscribers.get(connectionId);
-        if (subscribers == null) {
-            subscribers = new ArrayList<IFeedLifecycleEventSubscriber>();
-            eventSubscribers.put(connectionId, subscribers);
-        }
-        subscribers.add(subscriber);
-    }
-
-    public void deregisterFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
-        List<IFeedLifecycleEventSubscriber> subscribers = eventSubscribers.get(connectionId);
-        if (subscribers != null) {
-            subscribers.remove(subscriber);
-        }
-    }
-
-    //============================
-
-    public boolean isFeedPointAvailable(FeedJointKey feedJointKey) {
-        List<IFeedJoint> joints = feedPipeline.get(feedJointKey.getFeedId());
-        if (joints != null && !joints.isEmpty()) {
-            for (IFeedJoint joint : joints) {
-                if (joint.getFeedJointKey().equals(feedJointKey)) {
-                    return true;
-                }
-            }
-        }
-        return false;
-    }
-
-    public Collection<IFeedJoint> getFeedIntakeJoints() {
-        List<IFeedJoint> intakeFeedPoints = new ArrayList<IFeedJoint>();
-        for (FeedIntakeInfo info : intakeJobInfos.values()) {
-            intakeFeedPoints.add(info.getIntakeFeedJoint());
-        }
-        return intakeFeedPoints;
-    }
-
-    public IFeedJoint getFeedJoint(FeedJointKey feedPointKey) {
-        List<IFeedJoint> joints = feedPipeline.get(feedPointKey.getFeedId());
-        if (joints != null && !joints.isEmpty()) {
-            for (IFeedJoint joint : joints) {
-                if (joint.getFeedJointKey().equals(feedPointKey)) {
-                    return joint;
-                }
-            }
-        }
-        return null;
-    }
-
-    public IFeedJoint getAvailableFeedJoint(FeedJointKey feedJointKey) {
-        IFeedJoint feedJoint = getFeedJoint(feedJointKey);
-        if (feedJoint != null) {
-            return feedJoint;
-        } else {
-            String jointKeyString = feedJointKey.getStringRep();
-            List<IFeedJoint> jointsOnPipeline = feedPipeline.get(feedJointKey.getFeedId());
-            IFeedJoint candidateJoint = null;
-            if (jointsOnPipeline != null) {
-                for (IFeedJoint joint : jointsOnPipeline) {
-                    if (jointKeyString.contains(joint.getFeedJointKey().getStringRep())) {
-                        if (candidateJoint == null) {
-                            candidateJoint = joint;
-                        } else if (joint.getFeedJointKey().getStringRep()
-                                .contains(candidateJoint.getFeedJointKey().getStringRep())) { // found feed point is a super set of the earlier find
-                            candidateJoint = joint;
-                        }
-                    }
-                }
-            }
-            return candidateJoint;
-        }
-    }
-
-    public JobSpecification getCollectJobSpecification(FeedConnectionId connectionId) {
-        return connectJobInfos.get(connectionId).getSpec();
-    }
-
-    public IFeedJoint getFeedPoint(FeedId sourceFeedId, IFeedJoint.FeedJointType type) {
-        List<IFeedJoint> joints = feedPipeline.get(sourceFeedId);
-        for (IFeedJoint joint : joints) {
-            if (joint.getType().equals(type)) {
-                return joint;
-            }
-        }
-        return null;
-    }
-
-    public FeedConnectJobInfo getFeedConnectJobInfo(FeedConnectionId connectionId) {
-        return connectJobInfos.get(connectionId);
-    }
-
-    private void setLocations(FeedConnectJobInfo cInfo) {
-        JobSpecification jobSpec = cInfo.getSpec();
-
-        List<OperatorDescriptorId> collectOperatorIds = new ArrayList<OperatorDescriptorId>();
-        List<OperatorDescriptorId> computeOperatorIds = new ArrayList<OperatorDescriptorId>();
-        List<OperatorDescriptorId> storageOperatorIds = new ArrayList<OperatorDescriptorId>();
-
-        Map<OperatorDescriptorId, IOperatorDescriptor> operators = jobSpec.getOperatorMap();
-        for (Entry<OperatorDescriptorId, IOperatorDescriptor> entry : operators.entrySet()) {
-            IOperatorDescriptor opDesc = entry.getValue();
-            IOperatorDescriptor actualOp = null;
-            if (opDesc instanceof FeedMetaOperatorDescriptor) {
-                actualOp = ((FeedMetaOperatorDescriptor) opDesc).getCoreOperator();
-            } else {
-                actualOp = opDesc;
-            }
-
-            if (actualOp instanceof AlgebricksMetaOperatorDescriptor) {
-                AlgebricksMetaOperatorDescriptor op = ((AlgebricksMetaOperatorDescriptor) actualOp);
-                IPushRuntimeFactory[] runtimeFactories = op.getPipeline().getRuntimeFactories();
-                boolean computeOp = false;
-                for (IPushRuntimeFactory rf : runtimeFactories) {
-                    if (rf instanceof AssignRuntimeFactory) {
-                        IConnectorDescriptor connDesc = jobSpec.getOperatorInputMap().get(op.getOperatorId()).get(0);
-                        IOperatorDescriptor sourceOp = jobSpec.getConnectorOperatorMap().get(connDesc.getConnectorId())
-                                .getLeft().getLeft();
-                        if (sourceOp instanceof FeedCollectOperatorDescriptor) {
-                            computeOp = true;
-                            break;
-                        }
-                    }
-                }
-                if (computeOp) {
-                    computeOperatorIds.add(entry.getKey());
-                }
-            } else if (actualOp instanceof LSMTreeIndexInsertUpdateDeleteOperatorDescriptor) {
-                storageOperatorIds.add(entry.getKey());
-            } else if (actualOp instanceof FeedCollectOperatorDescriptor) {
-                collectOperatorIds.add(entry.getKey());
-            }
-        }
-
-        try {
-            IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
-            JobInfo info = hcc.getJobInfo(cInfo.getJobId());
-            List<String> collectLocations = new ArrayList<String>();
-            for (OperatorDescriptorId collectOpId : collectOperatorIds) {
-                Map<Integer, String> operatorLocations = info.getOperatorLocations().get(collectOpId);
-                int nOperatorInstances = operatorLocations.size();
-                for (int i = 0; i < nOperatorInstances; i++) {
-                    collectLocations.add(operatorLocations.get(i));
-                }
-            }
-
-            List<String> computeLocations = new ArrayList<String>();
-            for (OperatorDescriptorId computeOpId : computeOperatorIds) {
-                Map<Integer, String> operatorLocations = info.getOperatorLocations().get(computeOpId);
-                if (operatorLocations != null) {
-                    int nOperatorInstances = operatorLocations.size();
-                    for (int i = 0; i < nOperatorInstances; i++) {
-                        computeLocations.add(operatorLocations.get(i));
-                    }
-                } else {
-                    computeLocations.clear();
-                    computeLocations.addAll(collectLocations);
-                }
-            }
-
-            List<String> storageLocations = new ArrayList<String>();
-            for (OperatorDescriptorId storageOpId : storageOperatorIds) {
-                Map<Integer, String> operatorLocations = info.getOperatorLocations().get(storageOpId);
-                if (operatorLocations == null) {
-                    continue;
-                }
-                int nOperatorInstances = operatorLocations.size();
-                for (int i = 0; i < nOperatorInstances; i++) {
-                    storageLocations.add(operatorLocations.get(i));
-                }
-            }
-            cInfo.setCollectLocations(collectLocations);
-            cInfo.setComputeLocations(computeLocations);
-            cInfo.setStorageLocations(storageLocations);
-
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedJoint.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedJoint.java b/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedJoint.java
deleted file mode 100644
index a0411ce..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedJoint.java
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feeds;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
-import edu.uci.ics.asterix.common.feeds.FeedConnectionRequest;
-import edu.uci.ics.asterix.common.feeds.FeedId;
-import edu.uci.ics.asterix.common.feeds.FeedJointKey;
-import edu.uci.ics.asterix.common.feeds.api.IFeedJoint;
-import edu.uci.ics.asterix.common.feeds.api.IFeedLifecycleListener.ConnectionLocation;
-
-public class FeedJoint implements IFeedJoint {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedJoint.class.getName());
-
-    /** A unique key associated with the feed point **/
-    private final FeedJointKey key;
-
-    /** The state associated with the FeedJoint **/
-    private State state;
-
-    /** A list of subscribers that receive data from this FeedJoint **/
-    private final List<FeedConnectionId> receivers;
-
-    /** The feedId on which the feedPoint resides **/
-    private final FeedId ownerFeedId;
-
-    /** A list of feed subscription requests submitted for subscribing to the FeedPoint's data **/
-    private final List<FeedConnectionRequest> connectionRequests;
-
-    private final ConnectionLocation connectionLocation;
-
-    private final FeedJointType type;
-
-    private FeedConnectionId provider;
-
-    public FeedJoint(FeedJointKey key, FeedId ownerFeedId, ConnectionLocation subscriptionLocation, FeedJointType type,
-            FeedConnectionId provider) {
-        this.key = key;
-        this.ownerFeedId = ownerFeedId;
-        this.type = type;
-        this.receivers = new ArrayList<FeedConnectionId>();
-        this.state = State.CREATED;
-        this.connectionLocation = subscriptionLocation;
-        this.connectionRequests = new ArrayList<FeedConnectionRequest>();
-        this.provider = provider;
-    }
-
-    @Override
-    public int hashCode() {
-        return key.hashCode();
-    }
-
-    public void addReceiver(FeedConnectionId connectionId) {
-        receivers.add(connectionId);
-    }
-
-    public void removeReceiver(FeedConnectionId connectionId) {
-        receivers.remove(connectionId);
-    }
-
-    public synchronized void addConnectionRequest(FeedConnectionRequest request) {
-        connectionRequests.add(request);
-        if (state.equals(State.ACTIVE)) {
-            handlePendingConnectionRequest();
-        }
-    }
-
-    public synchronized void setState(State state) {
-        if (this.state.equals(state)) {
-            return;
-        }
-        this.state = state;
-        if (this.state.equals(State.ACTIVE)) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Feed joint " + this + " is now " + State.ACTIVE);
-            }
-            handlePendingConnectionRequest();
-        }
-    }
-
-    private void handlePendingConnectionRequest() {
-        for (FeedConnectionRequest connectionRequest : connectionRequests) {
-            FeedConnectionId connectionId = new FeedConnectionId(connectionRequest.getReceivingFeedId(),
-                    connectionRequest.getTargetDataset());
-            try {
-                FeedLifecycleListener.INSTANCE.submitFeedConnectionRequest(this, connectionRequest);
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Submitted feed connection request " + connectionRequest + " at feed joint " + this);
-                }
-                addReceiver(connectionId);
-            } catch (Exception e) {
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("Unsuccessful attempt at submitting connection request " + connectionRequest
-                            + " at feed joint " + this + ". Message " + e.getMessage());
-                }
-                e.printStackTrace();
-            }
-        }
-        connectionRequests.clear();
-    }
-
-    public FeedConnectionId getReceiver(FeedConnectionId connectionId) {
-        for (FeedConnectionId cid : receivers) {
-            if (cid.equals(connectionId)) {
-                return cid;
-            }
-        }
-        return null;
-    }
-
-    @Override
-    public String toString() {
-        return key.toString() + " [" + connectionLocation + "]" + "[" + state + "]";
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (o == null) {
-            return false;
-        }
-        if (o == this) {
-            return true;
-        }
-        if (!(o instanceof FeedJoint)) {
-            return false;
-        }
-        return ((FeedJoint) o).getFeedJointKey().equals(this.key);
-    }
-
-    public FeedId getOwnerFeedId() {
-        return ownerFeedId;
-    }
-
-    public List<FeedConnectionRequest> getConnectionRequests() {
-        return connectionRequests;
-    }
-
-    public ConnectionLocation getConnectionLocation() {
-        return connectionLocation;
-    }
-
-    public FeedJointType getType() {
-        return type;
-    }
-
-    @Override
-    public FeedConnectionId getProvider() {
-        return provider;
-    }
-
-    public List<FeedConnectionId> getReceivers() {
-        return receivers;
-    }
-
-    public FeedJointKey getKey() {
-        return key;
-    }
-
-    public synchronized State getState() {
-        return state;
-    }
-
-    @Override
-    public FeedJointKey getFeedJointKey() {
-        return key;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedLifecycleListener.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedLifecycleListener.java b/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedLifecycleListener.java
deleted file mode 100644
index 432a56d..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedLifecycleListener.java
+++ /dev/null
@@ -1,486 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feeds;
-
-import java.io.PrintWriter;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.api.common.SessionConfig;
-import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.DataverseDecl;
-import edu.uci.ics.asterix.aql.expression.DisconnectFeedStatement;
-import edu.uci.ics.asterix.aql.expression.Identifier;
-import edu.uci.ics.asterix.aql.translator.AqlTranslator;
-import edu.uci.ics.asterix.common.api.IClusterManagementWork;
-import edu.uci.ics.asterix.common.api.IClusterManagementWork.ClusterState;
-import edu.uci.ics.asterix.common.api.IClusterManagementWorkResponse;
-import edu.uci.ics.asterix.common.feeds.FeedConnectJobInfo;
-import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
-import edu.uci.ics.asterix.common.feeds.FeedConnectionRequest;
-import edu.uci.ics.asterix.common.feeds.FeedId;
-import edu.uci.ics.asterix.common.feeds.FeedIntakeInfo;
-import edu.uci.ics.asterix.common.feeds.FeedJobInfo;
-import edu.uci.ics.asterix.common.feeds.FeedJobInfo.FeedJobState;
-import edu.uci.ics.asterix.common.feeds.FeedJointKey;
-import edu.uci.ics.asterix.common.feeds.api.IFeedJoint;
-import edu.uci.ics.asterix.common.feeds.api.IFeedLifecycleEventSubscriber;
-import edu.uci.ics.asterix.common.feeds.api.IFeedLifecycleListener;
-import edu.uci.ics.asterix.common.feeds.api.IIntakeProgressTracker;
-import edu.uci.ics.asterix.common.feeds.message.StorageReportFeedMessage;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
-import edu.uci.ics.asterix.metadata.cluster.AddNodeWork;
-import edu.uci.ics.asterix.metadata.cluster.ClusterManager;
-import edu.uci.ics.asterix.metadata.feeds.FeedCollectOperatorDescriptor;
-import edu.uci.ics.asterix.metadata.feeds.FeedIntakeOperatorDescriptor;
-import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
-import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.api.job.IActivityClusterGraphGeneratorFactory;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-
-/**
- * A listener that subscribes to events associated with cluster membership
- * (nodes joining/leaving the cluster) and job lifecycle (start/end of a job).
- * Subscription to such events allows keeping track of feed ingestion jobs and
- * take any corrective action that may be required when a node involved in a
- * feed leaves the cluster.
- */
-public class FeedLifecycleListener implements IFeedLifecycleListener {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedLifecycleListener.class.getName());
-
-    public static FeedLifecycleListener INSTANCE = new FeedLifecycleListener();
-
-    private final LinkedBlockingQueue<Message> jobEventInbox;
-    private final LinkedBlockingQueue<IClusterManagementWorkResponse> responseInbox;
-    private final Map<FeedCollectInfo, List<String>> dependentFeeds = new HashMap<FeedCollectInfo, List<String>>();
-    private final Map<FeedConnectionId, LinkedBlockingQueue<String>> feedReportQueue;
-    private final FeedJobNotificationHandler feedJobNotificationHandler;
-    private final FeedWorkRequestResponseHandler feedWorkRequestResponseHandler;
-    private final ExecutorService executorService;
-
-    private ClusterState state;
-
-    private FeedLifecycleListener() {
-        this.jobEventInbox = new LinkedBlockingQueue<Message>();
-        this.feedJobNotificationHandler = new FeedJobNotificationHandler(jobEventInbox);
-        this.responseInbox = new LinkedBlockingQueue<IClusterManagementWorkResponse>();
-        this.feedWorkRequestResponseHandler = new FeedWorkRequestResponseHandler(responseInbox);
-        this.feedReportQueue = new HashMap<FeedConnectionId, LinkedBlockingQueue<String>>();
-        this.executorService = Executors.newCachedThreadPool();
-        this.executorService.execute(feedJobNotificationHandler);
-        this.executorService.execute(feedWorkRequestResponseHandler);
-        ClusterManager.INSTANCE.registerSubscriber(this);
-        this.state = AsterixClusterProperties.INSTANCE.getState();
-    }
-
-    @Override
-    public void notifyJobStart(JobId jobId) throws HyracksException {
-        if (feedJobNotificationHandler.isRegisteredFeedJob(jobId)) {
-            jobEventInbox.add(new Message(jobId, Message.MessageKind.JOB_START));
-        }
-    }
-
-    @Override
-    public void notifyJobFinish(JobId jobId) throws HyracksException {
-        if (feedJobNotificationHandler.isRegisteredFeedJob(jobId)) {
-            jobEventInbox.add(new Message(jobId, Message.MessageKind.JOB_FINISH));
-        } else {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("NO NEED TO NOTIFY JOB FINISH!");
-            }
-        }
-    }
-
-    public FeedConnectJobInfo getFeedConnectJobInfo(FeedConnectionId connectionId) {
-        return feedJobNotificationHandler.getFeedConnectJobInfo(connectionId);
-    }
-
-    public void registerFeedIntakeProgressTracker(FeedConnectionId connectionId,
-            IIntakeProgressTracker feedIntakeProgressTracker) {
-        feedJobNotificationHandler.registerFeedIntakeProgressTracker(connectionId, feedIntakeProgressTracker);
-    }
-
-    public void deregisterFeedIntakeProgressTracker(FeedConnectionId connectionId) {
-        feedJobNotificationHandler.deregisterFeedIntakeProgressTracker(connectionId);
-    }
-
-    public void updateTrackingInformation(StorageReportFeedMessage srm) {
-        feedJobNotificationHandler.updateTrackingInformation(srm);
-    }
-
-    /*
-     * Traverse job specification to categorize job as a feed intake job or a feed collection job 
-     */
-    @Override
-    public void notifyJobCreation(JobId jobId, IActivityClusterGraphGeneratorFactory acggf) throws HyracksException {
-        JobSpecification spec = acggf.getJobSpecification();
-        FeedConnectionId feedConnectionId = null;
-        Map<String, String> feedPolicy = null;
-        for (IOperatorDescriptor opDesc : spec.getOperatorMap().values()) {
-            if (opDesc instanceof FeedCollectOperatorDescriptor) {
-                feedConnectionId = ((FeedCollectOperatorDescriptor) opDesc).getFeedConnectionId();
-                feedPolicy = ((FeedCollectOperatorDescriptor) opDesc).getFeedPolicyProperties();
-                feedJobNotificationHandler.registerFeedCollectionJob(
-                        ((FeedCollectOperatorDescriptor) opDesc).getSourceFeedId(), feedConnectionId, jobId, spec,
-                        feedPolicy);
-                break;
-            } else if (opDesc instanceof FeedIntakeOperatorDescriptor) {
-                feedJobNotificationHandler.registerFeedIntakeJob(((FeedIntakeOperatorDescriptor) opDesc).getFeedId(),
-                        jobId, spec);
-                break;
-            }
-        }
-    }
-
-    public void setJobState(FeedConnectionId connectionId, FeedJobState jobState) {
-        feedJobNotificationHandler.setJobState(connectionId, jobState);
-    }
-
-    public FeedJobState getFeedJobState(FeedConnectionId connectionId) {
-        return feedJobNotificationHandler.getFeedJobState(connectionId);
-    }
-
-    public static class Message {
-        public JobId jobId;
-
-        public enum MessageKind {
-            JOB_START,
-            JOB_FINISH
-        }
-
-        public MessageKind messageKind;
-
-        public Message(JobId jobId, MessageKind msgKind) {
-            this.jobId = jobId;
-            this.messageKind = msgKind;
-        }
-    }
-
-    @Override
-    public Set<IClusterManagementWork> notifyNodeFailure(Set<String> deadNodeIds) {
-        Set<IClusterManagementWork> workToBeDone = new HashSet<IClusterManagementWork>();
-
-        Collection<FeedIntakeInfo> intakeInfos = feedJobNotificationHandler.getFeedIntakeInfos();
-        Collection<FeedConnectJobInfo> connectJobInfos = feedJobNotificationHandler.getFeedConnectInfos();
-
-        Map<String, List<FeedJobInfo>> impactedJobs = new HashMap<String, List<FeedJobInfo>>();
-
-        for (String deadNode : deadNodeIds) {
-            for (FeedIntakeInfo intakeInfo : intakeInfos) {
-                if (intakeInfo.getIntakeLocation().contains(deadNode)) {
-                    List<FeedJobInfo> infos = impactedJobs.get(deadNode);
-                    if (infos == null) {
-                        infos = new ArrayList<FeedJobInfo>();
-                        impactedJobs.put(deadNode, infos);
-                    }
-                    infos.add(intakeInfo);
-                    intakeInfo.setState(FeedJobState.UNDER_RECOVERY);
-                }
-            }
-
-            for (FeedConnectJobInfo connectInfo : connectJobInfos) {
-                if (connectInfo.getStorageLocations().contains(deadNode)) {
-                    continue;
-                }
-                if (connectInfo.getComputeLocations().contains(deadNode)
-                        || connectInfo.getCollectLocations().contains(deadNode)) {
-                    List<FeedJobInfo> infos = impactedJobs.get(deadNode);
-                    if (infos == null) {
-                        infos = new ArrayList<FeedJobInfo>();
-                        impactedJobs.put(deadNode, infos);
-                    }
-                    infos.add(connectInfo);
-                    connectInfo.setState(FeedJobState.UNDER_RECOVERY);
-                    feedJobNotificationHandler.deregisterFeedActivity(connectInfo);
-                }
-            }
-
-        }
-
-        if (impactedJobs.size() > 0) {
-            AddNodeWork addNodeWork = new AddNodeWork(deadNodeIds, deadNodeIds.size(), this);
-            feedWorkRequestResponseHandler.registerFeedWork(addNodeWork.getWorkId(), impactedJobs);
-            workToBeDone.add(addNodeWork);
-        }
-        return workToBeDone;
-
-    }
-
-    public static class FailureReport {
-
-        private final List<Pair<FeedConnectJobInfo, List<String>>> recoverableConnectJobs;
-        private final Map<IFeedJoint, List<String>> recoverableIntakeFeedIds;
-
-        public FailureReport(Map<IFeedJoint, List<String>> recoverableIntakeFeedIds,
-                List<Pair<FeedConnectJobInfo, List<String>>> recoverableSubscribers) {
-            this.recoverableConnectJobs = recoverableSubscribers;
-            this.recoverableIntakeFeedIds = recoverableIntakeFeedIds;
-        }
-
-        public List<Pair<FeedConnectJobInfo, List<String>>> getRecoverableSubscribers() {
-            return recoverableConnectJobs;
-        }
-
-        public Map<IFeedJoint, List<String>> getRecoverableIntakeFeedIds() {
-            return recoverableIntakeFeedIds;
-        }
-
-    }
-
-    @Override
-    public Set<IClusterManagementWork> notifyNodeJoin(String joinedNodeId) {
-        ClusterState newState = AsterixClusterProperties.INSTANCE.getState();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info(joinedNodeId + " joined the cluster. " + "Asterix state: " + newState);
-        }
-
-        boolean needToReActivateFeeds = !newState.equals(state) && (newState == ClusterState.ACTIVE);
-        if (needToReActivateFeeds) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info(joinedNodeId + " Resuming loser feeds (if any)");
-            }
-            try {
-                FeedsActivator activator = new FeedsActivator();
-                (new Thread(activator)).start();
-            } catch (Exception e) {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Exception in resuming feeds" + e.getMessage());
-                }
-            }
-            state = newState;
-        } else {
-            List<FeedCollectInfo> feedsThatCanBeRevived = new ArrayList<FeedCollectInfo>();
-            for (Entry<FeedCollectInfo, List<String>> entry : dependentFeeds.entrySet()) {
-                List<String> requiredNodeIds = entry.getValue();
-                if (requiredNodeIds.contains(joinedNodeId)) {
-                    requiredNodeIds.remove(joinedNodeId);
-                    if (requiredNodeIds.isEmpty()) {
-                        feedsThatCanBeRevived.add(entry.getKey());
-                    }
-                }
-            }
-            if (!feedsThatCanBeRevived.isEmpty()) {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info(joinedNodeId + " Resuming feeds after rejoining of node " + joinedNodeId);
-                }
-                FeedsActivator activator = new FeedsActivator(feedsThatCanBeRevived);
-                (new Thread(activator)).start();
-            }
-        }
-        return null;
-    }
-
-    @Override
-    public void notifyRequestCompletion(IClusterManagementWorkResponse response) {
-        try {
-            responseInbox.put(response);
-        } catch (InterruptedException e) {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Interrupted exception");
-            }
-        }
-    }
-
-    @Override
-    public void notifyStateChange(ClusterState previousState, ClusterState newState) {
-        switch (newState) {
-            case ACTIVE:
-                if (previousState.equals(ClusterState.UNUSABLE)) {
-                    try {
-                        FeedsActivator activator = new FeedsActivator();
-                        // (new Thread(activator)).start();
-                    } catch (Exception e) {
-                        if (LOGGER.isLoggable(Level.INFO)) {
-                            LOGGER.info("Exception in resuming feeds" + e.getMessage());
-                        }
-                    }
-                }
-                break;
-        }
-
-    }
-
-    public static class FeedsDeActivator implements Runnable {
-
-        private List<FeedConnectJobInfo> failedConnectjobs;
-
-        public FeedsDeActivator(List<FeedConnectJobInfo> failedConnectjobs) {
-            this.failedConnectjobs = failedConnectjobs;
-        }
-
-        @Override
-        public void run() {
-            for (FeedConnectJobInfo failedConnectJob : failedConnectjobs) {
-                endFeed(failedConnectJob);
-            }
-        }
-
-        private void endFeed(FeedConnectJobInfo cInfo) {
-            MetadataTransactionContext ctx = null;
-            PrintWriter writer = new PrintWriter(System.out, true);
-            SessionConfig pc = new SessionConfig(writer, OutputFormat.ADM);
-
-            try {
-                ctx = MetadataManager.INSTANCE.beginTransaction();
-                FeedId feedId = cInfo.getConnectionId().getFeedId();
-                DisconnectFeedStatement stmt = new DisconnectFeedStatement(new Identifier(feedId.getDataverse()),
-                        new Identifier(feedId.getFeedName()), new Identifier(cInfo.getConnectionId().getDatasetName()));
-                List<Statement> statements = new ArrayList<Statement>();
-                DataverseDecl dataverseDecl = new DataverseDecl(new Identifier(feedId.getDataverse()));
-                statements.add(dataverseDecl);
-                statements.add(stmt);
-                AqlTranslator translator = new AqlTranslator(statements, pc);
-                translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null,
-                        AqlTranslator.ResultDelivery.SYNC);
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("End irrecoverable feed: " + cInfo.getConnectionId());
-                }
-                MetadataManager.INSTANCE.commitTransaction(ctx);
-            } catch (Exception e) {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Exception in ending loser feed: " + cInfo.getConnectionId() + " Exception "
-                            + e.getMessage());
-                }
-                e.printStackTrace();
-                try {
-                    MetadataManager.INSTANCE.abortTransaction(ctx);
-                } catch (Exception e2) {
-                    e2.addSuppressed(e);
-                    if (LOGGER.isLoggable(Level.SEVERE)) {
-                        LOGGER.severe("Exception in aborting transaction! System is in inconsistent state");
-                    }
-                }
-
-            }
-
-        }
-    }
-
-    public void submitFeedConnectionRequest(IFeedJoint feedPoint, FeedConnectionRequest subscriptionRequest)
-            throws Exception {
-        feedJobNotificationHandler.submitFeedConnectionRequest(feedPoint, subscriptionRequest);
-    }
-
-    @Override
-    public List<FeedConnectionId> getActiveFeedConnections(FeedId feedId) {
-        List<FeedConnectionId> connections = new ArrayList<FeedConnectionId>();
-        Collection<FeedConnectionId> activeConnections = feedJobNotificationHandler.getActiveFeedConnections();
-        if (feedId != null) {
-            for (FeedConnectionId connectionId : activeConnections) {
-                if (connectionId.getFeedId().equals(feedId)) {
-                    connections.add(connectionId);
-                }
-            }
-        } else {
-            connections.addAll(activeConnections);
-        }
-        return connections;
-    }
-
-    @Override
-    public List<String> getComputeLocations(FeedId feedId) {
-        return feedJobNotificationHandler.getFeedComputeLocations(feedId);
-    }
-
-    @Override
-    public List<String> getIntakeLocations(FeedId feedId) {
-        return feedJobNotificationHandler.getFeedIntakeLocations(feedId);
-    }
-
-    @Override
-    public List<String> getStoreLocations(FeedConnectionId feedConnectionId) {
-        return feedJobNotificationHandler.getFeedStorageLocations(feedConnectionId);
-    }
-
-    @Override
-    public List<String> getCollectLocations(FeedConnectionId feedConnectionId) {
-        return feedJobNotificationHandler.getFeedCollectLocations(feedConnectionId);
-    }
-
-    @Override
-    public boolean isFeedConnectionActive(FeedConnectionId connectionId) {
-        return feedJobNotificationHandler.isFeedConnectionActive(connectionId);
-    }
-
-    public void reportPartialDisconnection(FeedConnectionId connectionId) {
-        feedJobNotificationHandler.removeFeedJointsPostPipelineTermination(connectionId);
-    }
-
-    public void registerFeedReportQueue(FeedConnectionId feedId, LinkedBlockingQueue<String> queue) {
-        feedReportQueue.put(feedId, queue);
-    }
-
-    public void deregisterFeedReportQueue(FeedConnectionId feedId, LinkedBlockingQueue<String> queue) {
-        feedReportQueue.remove(feedId);
-    }
-
-    public LinkedBlockingQueue<String> getFeedReportQueue(FeedConnectionId feedId) {
-        return feedReportQueue.get(feedId);
-    }
-
-    @Override
-    public IFeedJoint getAvailableFeedJoint(FeedJointKey feedJointKey) {
-        return feedJobNotificationHandler.getAvailableFeedJoint(feedJointKey);
-    }
-
-    @Override
-    public boolean isFeedJointAvailable(FeedJointKey feedJointKey) {
-        return feedJobNotificationHandler.isFeedPointAvailable(feedJointKey);
-    }
-
-    public void registerFeedJoint(IFeedJoint feedJoint) {
-        feedJobNotificationHandler.registerFeedJoint(feedJoint);
-    }
-
-    public IFeedJoint getFeedJoint(FeedJointKey feedJointKey) {
-        return feedJobNotificationHandler.getFeedJoint(feedJointKey);
-    }
-
-    public void registerFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
-        feedJobNotificationHandler.registerFeedEventSubscriber(connectionId, subscriber);
-    }
-
-    public void deregisterFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
-        feedJobNotificationHandler.deregisterFeedEventSubscriber(connectionId, subscriber);
-
-    }
-
-    public JobSpecification getCollectJobSpecification(FeedConnectionId connectionId) {
-        return feedJobNotificationHandler.getCollectJobSpecification(connectionId);
-    }
-
-    public JobId getFeedCollectJobId(FeedConnectionId connectionId) {
-        return feedJobNotificationHandler.getFeedCollectJobId(connectionId);
-    }
-
-}


[27/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AccessMethodUtils.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AccessMethodUtils.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AccessMethodUtils.java
new file mode 100644
index 0000000..73377b9
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AccessMethodUtils.java
@@ -0,0 +1,609 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules.am;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.algebra.operators.physical.ExternalDataLookupPOperator;
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.metadata.external.IndexingConstants;
+import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
+import edu.uci.ics.asterix.om.base.ABoolean;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.base.IAObject;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.types.hierachy.ATypeHierarchy;
+import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IAlgebricksConstantValue;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractDataSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExternalDataLookupOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
+import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
+
+/**
+ * Static helper functions for rewriting plans using indexes.
+ */
+public class AccessMethodUtils {
+
+    public static void appendPrimaryIndexTypes(Dataset dataset, IAType itemType, List<Object> target)
+            throws IOException, AlgebricksException {
+        ARecordType recordType = (ARecordType) itemType;
+        List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
+        for (List<String> partitioningKey : partitioningKeys) {
+            target.add(recordType.getSubFieldType(partitioningKey));
+        }
+        target.add(itemType);
+    }
+
+    public static ConstantExpression createStringConstant(String str) {
+        return new ConstantExpression(new AsterixConstantValue(new AString(str)));
+    }
+
+    public static ConstantExpression createInt32Constant(int i) {
+        return new ConstantExpression(new AsterixConstantValue(new AInt32(i)));
+    }
+
+    public static ConstantExpression createBooleanConstant(boolean b) {
+        if (b) {
+            return new ConstantExpression(new AsterixConstantValue(ABoolean.TRUE));
+        } else {
+            return new ConstantExpression(new AsterixConstantValue(ABoolean.FALSE));
+        }
+    }
+
+    public static String getStringConstant(Mutable<ILogicalExpression> expr) {
+        IAObject obj = ((AsterixConstantValue) ((ConstantExpression) expr.getValue()).getValue()).getObject();
+        return ((AString) obj).getStringValue();
+    }
+
+    public static int getInt32Constant(Mutable<ILogicalExpression> expr) {
+        IAObject obj = ((AsterixConstantValue) ((ConstantExpression) expr.getValue()).getValue()).getObject();
+        return ((AInt32) obj).getIntegerValue();
+    }
+
+    public static boolean getBooleanConstant(Mutable<ILogicalExpression> expr) {
+        IAObject obj = ((AsterixConstantValue) ((ConstantExpression) expr.getValue()).getValue()).getObject();
+        return ((ABoolean) obj).getBoolean();
+    }
+
+    public static boolean analyzeFuncExprArgsForOneConstAndVar(AbstractFunctionCallExpression funcExpr,
+            AccessMethodAnalysisContext analysisCtx) {
+        IAlgebricksConstantValue constFilterVal = null;
+        LogicalVariable fieldVar = null;
+        ILogicalExpression arg1 = funcExpr.getArguments().get(0).getValue();
+        ILogicalExpression arg2 = funcExpr.getArguments().get(1).getValue();
+        // One of the args must be a constant, and the other arg must be a variable.
+        if (arg1.getExpressionTag() == LogicalExpressionTag.CONSTANT
+                && arg2.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+            // The arguments of contains() function are asymmetrical, we can only use index if it is on the first argument
+            if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.CONTAINS) {
+                return false;
+            }
+            ConstantExpression constExpr = (ConstantExpression) arg1;
+            constFilterVal = constExpr.getValue();
+            VariableReferenceExpression varExpr = (VariableReferenceExpression) arg2;
+            fieldVar = varExpr.getVariableReference();
+        } else if (arg1.getExpressionTag() == LogicalExpressionTag.VARIABLE
+                && arg2.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
+            ConstantExpression constExpr = (ConstantExpression) arg2;
+            constFilterVal = constExpr.getValue();
+            VariableReferenceExpression varExpr = (VariableReferenceExpression) arg1;
+            fieldVar = varExpr.getVariableReference();
+        } else {
+            return false;
+        }
+        OptimizableFuncExpr newOptFuncExpr = new OptimizableFuncExpr(funcExpr, fieldVar, constFilterVal);
+        for (IOptimizableFuncExpr optFuncExpr : analysisCtx.matchedFuncExprs) {
+            //avoid additional optFuncExpressions in case of a join
+            if (optFuncExpr.getFuncExpr().equals(funcExpr))
+                return true;
+        }
+        analysisCtx.matchedFuncExprs.add(newOptFuncExpr);
+        return true;
+    }
+
+    public static boolean analyzeFuncExprArgsForTwoVars(AbstractFunctionCallExpression funcExpr,
+            AccessMethodAnalysisContext analysisCtx) {
+        LogicalVariable fieldVar1 = null;
+        LogicalVariable fieldVar2 = null;
+        ILogicalExpression arg1 = funcExpr.getArguments().get(0).getValue();
+        ILogicalExpression arg2 = funcExpr.getArguments().get(1).getValue();
+        if (arg1.getExpressionTag() == LogicalExpressionTag.VARIABLE
+                && arg2.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+            fieldVar1 = ((VariableReferenceExpression) arg1).getVariableReference();
+            fieldVar2 = ((VariableReferenceExpression) arg2).getVariableReference();
+        } else {
+            return false;
+        }
+        OptimizableFuncExpr newOptFuncExpr = new OptimizableFuncExpr(funcExpr, new LogicalVariable[] { fieldVar1,
+                fieldVar2 }, null);
+        for (IOptimizableFuncExpr optFuncExpr : analysisCtx.matchedFuncExprs) {
+            //avoid additional optFuncExpressions in case of a join
+            if (optFuncExpr.getFuncExpr().equals(funcExpr))
+                return true;
+        }
+        analysisCtx.matchedFuncExprs.add(newOptFuncExpr);
+        return true;
+    }
+
+    public static int getNumSecondaryKeys(Index index, ARecordType recordType) throws AlgebricksException {
+        switch (index.getIndexType()) {
+            case BTREE:
+            case SINGLE_PARTITION_WORD_INVIX:
+            case SINGLE_PARTITION_NGRAM_INVIX:
+            case LENGTH_PARTITIONED_WORD_INVIX:
+            case LENGTH_PARTITIONED_NGRAM_INVIX: {
+                return index.getKeyFieldNames().size();
+            }
+            case RTREE: {
+                Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(0),
+                        index.getKeyFieldNames().get(0), recordType);
+                IAType keyType = keyPairType.first;
+                int numDimensions = NonTaggedFormatUtil.getNumDimensions(keyType.getTypeTag());
+                return numDimensions * 2;
+            }
+            default: {
+                throw new AlgebricksException("Unknown index kind: " + index.getIndexType());
+            }
+        }
+    }
+
+    /**
+     * Appends the types of the fields produced by the given secondary index to dest.
+     */
+    public static void appendSecondaryIndexTypes(Dataset dataset, ARecordType recordType, Index index,
+            boolean primaryKeysOnly, List<Object> dest) throws AlgebricksException {
+        if (!primaryKeysOnly) {
+            switch (index.getIndexType()) {
+                case BTREE:
+                case SINGLE_PARTITION_WORD_INVIX:
+                case SINGLE_PARTITION_NGRAM_INVIX: {
+                    for (int i = 0; i < index.getKeyFieldNames().size(); i++) {
+                        Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes()
+                                .get(i), index.getKeyFieldNames().get(i), recordType);
+                        dest.add(keyPairType.first);
+                    }
+                    break;
+                }
+                case RTREE: {
+                    Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(
+                            index.getKeyFieldTypes().get(0), index.getKeyFieldNames().get(0), recordType);
+                    IAType keyType = keyPairType.first;
+                    IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(keyType.getTypeTag());
+                    int numKeys = getNumSecondaryKeys(index, recordType);
+                    for (int i = 0; i < numKeys; i++) {
+                        dest.add(nestedKeyType);
+                    }
+                    break;
+                }
+                case LENGTH_PARTITIONED_NGRAM_INVIX:
+                case LENGTH_PARTITIONED_WORD_INVIX:
+                default:
+                    break;
+            }
+        }
+        // Primary keys.
+        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
+            //add primary keys
+            try {
+                appendExternalRecPrimaryKeys(dataset, dest);
+            } catch (AsterixException e) {
+                throw new AlgebricksException(e);
+            }
+        } else {
+            List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
+            for (List<String> partitioningKey : partitioningKeys) {
+                try {
+                    dest.add(recordType.getSubFieldType(partitioningKey));
+                } catch (IOException e) {
+                    throw new AlgebricksException(e);
+                }
+            }
+        }
+    }
+
+    public static void appendSecondaryIndexOutputVars(Dataset dataset, ARecordType recordType, Index index,
+            boolean primaryKeysOnly, IOptimizationContext context, List<LogicalVariable> dest)
+            throws AlgebricksException {
+        int numPrimaryKeys = 0;
+        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
+            numPrimaryKeys = IndexingConstants.getRIDSize(dataset);
+        } else {
+            numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
+        }
+        int numSecondaryKeys = getNumSecondaryKeys(index, recordType);
+        int numVars = (primaryKeysOnly) ? numPrimaryKeys : numPrimaryKeys + numSecondaryKeys;
+        for (int i = 0; i < numVars; i++) {
+            dest.add(context.newVar());
+        }
+    }
+
+    public static List<LogicalVariable> getPrimaryKeyVarsFromSecondaryUnnestMap(Dataset dataset,
+            ILogicalOperator unnestMapOp) {
+        int numPrimaryKeys;
+        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
+            numPrimaryKeys = IndexingConstants.getRIDSize(dataset);
+        } else {
+            numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
+        }
+        List<LogicalVariable> primaryKeyVars = new ArrayList<LogicalVariable>();
+        List<LogicalVariable> sourceVars = ((UnnestMapOperator) unnestMapOp).getVariables();
+        // Assumes the primary keys are located at the end.
+        int start = sourceVars.size() - numPrimaryKeys;
+        int stop = sourceVars.size();
+        for (int i = start; i < stop; i++) {
+            primaryKeyVars.add(sourceVars.get(i));
+        }
+        return primaryKeyVars;
+    }
+
+    public static List<LogicalVariable> getPrimaryKeyVarsFromPrimaryUnnestMap(Dataset dataset,
+            ILogicalOperator unnestMapOp) {
+        int numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
+        List<LogicalVariable> primaryKeyVars = new ArrayList<LogicalVariable>();
+        List<LogicalVariable> sourceVars = ((UnnestMapOperator) unnestMapOp).getVariables();
+        // Assumes the primary keys are located at the beginning.
+        for (int i = 0; i < numPrimaryKeys; i++) {
+            primaryKeyVars.add(sourceVars.get(i));
+        }
+        return primaryKeyVars;
+    }
+
+    /**
+     * Returns the search key expression which feeds a secondary-index search. If we are optimizing a selection query then this method returns
+     * the a ConstantExpression from the first constant value in the optimizable function expression.
+     * If we are optimizing a join, then this method returns the VariableReferenceExpression that should feed the secondary index probe.
+     *
+     * @throws AlgebricksException
+     */
+    public static Pair<ILogicalExpression, Boolean> createSearchKeyExpr(IOptimizableFuncExpr optFuncExpr,
+            OptimizableOperatorSubTree indexSubTree, OptimizableOperatorSubTree probeSubTree)
+            throws AlgebricksException {
+        if (probeSubTree == null) {
+            // We are optimizing a selection query. Search key is a constant.
+            // Type Checking and type promotion is done here
+            IAType fieldType = optFuncExpr.getFieldType(0);
+            IAObject constantObj = ((AsterixConstantValue) optFuncExpr.getConstantVal(0)).getObject();
+            ATypeTag constantValueTag = constantObj.getType().getTypeTag();
+            // type casting applied?
+            boolean typeCastingApplied = false;
+            // type casting happened from real (FLOAT, DOUBLE) value -> INT value?
+            boolean realTypeConvertedToIntegerType = false;
+            AsterixConstantValue replacedConstantValue = null;
+
+            // if the constant type and target type does not match, we do a type conversion
+            if (constantValueTag != fieldType.getTypeTag()) {
+                replacedConstantValue = ATypeHierarchy.getAsterixConstantValueFromNumericTypeObject(constantObj,
+                        fieldType.getTypeTag());
+                if (replacedConstantValue != null) {
+                    typeCastingApplied = true;
+                }
+
+                // To check whether the constant is REAL values, and target field is an INT type field.
+                // In this case, we need to change the search parameter. Refer to the caller section for the detail.
+                switch (constantValueTag) {
+                    case DOUBLE:
+                    case FLOAT:
+                        switch (fieldType.getTypeTag()) {
+                            case INT8:
+                            case INT16:
+                            case INT32:
+                            case INT64:
+                                realTypeConvertedToIntegerType = true;
+                                break;
+                            default:
+                                break;
+                        }
+                    default:
+                        break;
+                }
+            }
+
+            if (typeCastingApplied) {
+                return new Pair<ILogicalExpression, Boolean>(new ConstantExpression(replacedConstantValue),
+                        realTypeConvertedToIntegerType);
+            } else {
+                return new Pair<ILogicalExpression, Boolean>(new ConstantExpression(optFuncExpr.getConstantVal(0)),
+                        false);
+            }
+        } else {
+            // We are optimizing a join query. Determine which variable feeds the secondary index.
+            if (optFuncExpr.getOperatorSubTree(0) == null || optFuncExpr.getOperatorSubTree(0) == probeSubTree) {
+                return new Pair<ILogicalExpression, Boolean>(new VariableReferenceExpression(
+                        optFuncExpr.getLogicalVar(0)), false);
+            } else {
+                return new Pair<ILogicalExpression, Boolean>(new VariableReferenceExpression(
+                        optFuncExpr.getLogicalVar(1)), false);
+            }
+        }
+    }
+
+    /**
+     * Returns the first expr optimizable by this index.
+     */
+    public static IOptimizableFuncExpr chooseFirstOptFuncExpr(Index chosenIndex, AccessMethodAnalysisContext analysisCtx) {
+        List<Pair<Integer, Integer>> indexExprs = analysisCtx.getIndexExprs(chosenIndex);
+        int firstExprIndex = indexExprs.get(0).first;
+        return analysisCtx.matchedFuncExprs.get(firstExprIndex);
+    }
+
+    public static int chooseFirstOptFuncVar(Index chosenIndex, AccessMethodAnalysisContext analysisCtx) {
+        List<Pair<Integer, Integer>> indexExprs = analysisCtx.getIndexExprs(chosenIndex);
+        return indexExprs.get(0).second;
+    }
+
+    public static UnnestMapOperator createSecondaryIndexUnnestMap(Dataset dataset, ARecordType recordType, Index index,
+            ILogicalOperator inputOp, AccessMethodJobGenParams jobGenParams, IOptimizationContext context,
+            boolean outputPrimaryKeysOnly, boolean retainInput) throws AlgebricksException {
+        // The job gen parameters are transferred to the actual job gen via the UnnestMapOperator's function arguments.
+        ArrayList<Mutable<ILogicalExpression>> secondaryIndexFuncArgs = new ArrayList<Mutable<ILogicalExpression>>();
+        jobGenParams.writeToFuncArgs(secondaryIndexFuncArgs);
+        // Variables and types coming out of the secondary-index search.
+        List<LogicalVariable> secondaryIndexUnnestVars = new ArrayList<LogicalVariable>();
+        List<Object> secondaryIndexOutputTypes = new ArrayList<Object>();
+        // Append output variables/types generated by the secondary-index search (not forwarded from input).
+        appendSecondaryIndexOutputVars(dataset, recordType, index, outputPrimaryKeysOnly, context,
+                secondaryIndexUnnestVars);
+        appendSecondaryIndexTypes(dataset, recordType, index, outputPrimaryKeysOnly, secondaryIndexOutputTypes);
+        // An index search is expressed as an unnest over an index-search function.
+        IFunctionInfo secondaryIndexSearch = FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.INDEX_SEARCH);
+        UnnestingFunctionCallExpression secondaryIndexSearchFunc = new UnnestingFunctionCallExpression(
+                secondaryIndexSearch, secondaryIndexFuncArgs);
+        secondaryIndexSearchFunc.setReturnsUniqueValues(true);
+        // This is the operator that jobgen will be looking for. It contains an unnest function that has all necessary arguments to determine
+        // which index to use, which variables contain the index-search keys, what is the original dataset, etc.
+        UnnestMapOperator secondaryIndexUnnestOp = new UnnestMapOperator(secondaryIndexUnnestVars,
+                new MutableObject<ILogicalExpression>(secondaryIndexSearchFunc), secondaryIndexOutputTypes, retainInput);
+        secondaryIndexUnnestOp.getInputs().add(new MutableObject<ILogicalOperator>(inputOp));
+        context.computeAndSetTypeEnvironmentForOperator(secondaryIndexUnnestOp);
+        secondaryIndexUnnestOp.setExecutionMode(ExecutionMode.PARTITIONED);
+        return secondaryIndexUnnestOp;
+    }
+
+    public static UnnestMapOperator createPrimaryIndexUnnestMap(AbstractDataSourceOperator dataSourceOp,
+            Dataset dataset, ARecordType recordType, ILogicalOperator inputOp, IOptimizationContext context,
+            boolean sortPrimaryKeys, boolean retainInput, boolean retainNull, boolean requiresBroadcast)
+            throws AlgebricksException {
+        List<LogicalVariable> primaryKeyVars = AccessMethodUtils.getPrimaryKeyVarsFromSecondaryUnnestMap(dataset,
+                inputOp);
+        // Optionally add a sort on the primary-index keys before searching the primary index.
+        OrderOperator order = null;
+        if (sortPrimaryKeys) {
+            order = new OrderOperator();
+            for (LogicalVariable pkVar : primaryKeyVars) {
+                Mutable<ILogicalExpression> vRef = new MutableObject<ILogicalExpression>(
+                        new VariableReferenceExpression(pkVar));
+                order.getOrderExpressions().add(
+                        new Pair<IOrder, Mutable<ILogicalExpression>>(OrderOperator.ASC_ORDER, vRef));
+            }
+            // The secondary-index search feeds into the sort.
+            order.getInputs().add(new MutableObject<ILogicalOperator>(inputOp));
+            order.setExecutionMode(ExecutionMode.LOCAL);
+            context.computeAndSetTypeEnvironmentForOperator(order);
+        }
+        // The job gen parameters are transferred to the actual job gen via the UnnestMapOperator's function arguments.
+        List<Mutable<ILogicalExpression>> primaryIndexFuncArgs = new ArrayList<Mutable<ILogicalExpression>>();
+        BTreeJobGenParams jobGenParams = new BTreeJobGenParams(dataset.getDatasetName(), IndexType.BTREE,
+                dataset.getDataverseName(), dataset.getDatasetName(), retainInput, retainNull, requiresBroadcast);
+        // Set low/high inclusive to true for a point lookup.
+        jobGenParams.setLowKeyInclusive(true);
+        jobGenParams.setHighKeyInclusive(true);
+        jobGenParams.setLowKeyVarList(primaryKeyVars, 0, primaryKeyVars.size());
+        jobGenParams.setHighKeyVarList(primaryKeyVars, 0, primaryKeyVars.size());
+        jobGenParams.setIsEqCondition(true);
+        jobGenParams.writeToFuncArgs(primaryIndexFuncArgs);
+        // Variables and types coming out of the primary-index search.
+        List<LogicalVariable> primaryIndexUnnestVars = new ArrayList<LogicalVariable>();
+        List<Object> primaryIndexOutputTypes = new ArrayList<Object>();
+        // Append output variables/types generated by the primary-index search (not forwarded from input).
+        primaryIndexUnnestVars.addAll(dataSourceOp.getVariables());
+        try {
+            appendPrimaryIndexTypes(dataset, recordType, primaryIndexOutputTypes);
+        } catch (IOException e) {
+            throw new AlgebricksException(e);
+        }
+        // An index search is expressed as an unnest over an index-search function.
+        IFunctionInfo primaryIndexSearch = FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.INDEX_SEARCH);
+        AbstractFunctionCallExpression primaryIndexSearchFunc = new ScalarFunctionCallExpression(primaryIndexSearch,
+                primaryIndexFuncArgs);
+        // This is the operator that jobgen will be looking for. It contains an unnest function that has all necessary arguments to determine
+        // which index to use, which variables contain the index-search keys, what is the original dataset, etc.
+        UnnestMapOperator primaryIndexUnnestOp = new UnnestMapOperator(primaryIndexUnnestVars,
+                new MutableObject<ILogicalExpression>(primaryIndexSearchFunc), primaryIndexOutputTypes, retainInput);
+        // Fed by the order operator or the secondaryIndexUnnestOp.
+        if (sortPrimaryKeys) {
+            primaryIndexUnnestOp.getInputs().add(new MutableObject<ILogicalOperator>(order));
+        } else {
+            primaryIndexUnnestOp.getInputs().add(new MutableObject<ILogicalOperator>(inputOp));
+        }
+        context.computeAndSetTypeEnvironmentForOperator(primaryIndexUnnestOp);
+        primaryIndexUnnestOp.setExecutionMode(ExecutionMode.PARTITIONED);
+        return primaryIndexUnnestOp;
+    }
+
+    public static ScalarFunctionCallExpression findLOJIsNullFuncInGroupBy(GroupByOperator lojGroupbyOp)
+            throws AlgebricksException {
+        //find IS_NULL function of which argument has the nullPlaceholder variable in the nested plan of groupby.
+        ALogicalPlanImpl subPlan = (ALogicalPlanImpl) lojGroupbyOp.getNestedPlans().get(0);
+        Mutable<ILogicalOperator> subPlanRootOpRef = subPlan.getRoots().get(0);
+        AbstractLogicalOperator subPlanRootOp = (AbstractLogicalOperator) subPlanRootOpRef.getValue();
+        boolean foundSelectNonNull = false;
+        ScalarFunctionCallExpression isNullFuncExpr = null;
+        AbstractLogicalOperator inputOp = subPlanRootOp;
+        while (inputOp != null) {
+            if (inputOp.getOperatorTag() == LogicalOperatorTag.SELECT) {
+                SelectOperator selectOp = (SelectOperator) inputOp;
+                if (selectOp.getCondition().getValue().getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                    if (((AbstractFunctionCallExpression) selectOp.getCondition().getValue()).getFunctionIdentifier()
+                            .equals(AlgebricksBuiltinFunctions.NOT)) {
+                        ScalarFunctionCallExpression notFuncExpr = (ScalarFunctionCallExpression) selectOp
+                                .getCondition().getValue();
+                        if (notFuncExpr.getArguments().get(0).getValue().getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                            if (((AbstractFunctionCallExpression) notFuncExpr.getArguments().get(0).getValue())
+                                    .getFunctionIdentifier().equals(AlgebricksBuiltinFunctions.IS_NULL)) {
+                                isNullFuncExpr = (ScalarFunctionCallExpression) notFuncExpr.getArguments().get(0)
+                                        .getValue();
+                                if (isNullFuncExpr.getArguments().get(0).getValue().getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+                                    foundSelectNonNull = true;
+                                    break;
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+            inputOp = inputOp.getInputs().size() > 0 ? (AbstractLogicalOperator) inputOp.getInputs().get(0).getValue()
+                    : null;
+        }
+
+        if (!foundSelectNonNull) {
+            throw new AlgebricksException(
+                    "Could not find the non-null select operator in GroupByOperator for LEFTOUTERJOIN plan optimization.");
+        }
+        return isNullFuncExpr;
+    }
+
+    public static void resetLOJNullPlaceholderVariableInGroupByOp(AccessMethodAnalysisContext analysisCtx,
+            LogicalVariable newNullPlaceholderVaraible, IOptimizationContext context) throws AlgebricksException {
+
+        //reset the null placeholder variable in groupby operator
+        ScalarFunctionCallExpression isNullFuncExpr = analysisCtx.getLOJIsNullFuncInGroupBy();
+        isNullFuncExpr.getArguments().clear();
+        isNullFuncExpr.getArguments().add(
+                new MutableObject<ILogicalExpression>(new VariableReferenceExpression(newNullPlaceholderVaraible)));
+
+        //recompute type environment.
+        OperatorPropertiesUtil.typeOpRec(analysisCtx.getLOJGroupbyOpRef(), context);
+    }
+
+    // New < For external datasets indexing>
+    private static void appendExternalRecTypes(Dataset dataset, IAType itemType, List<Object> target) {
+        target.add(itemType);
+    }
+
+    private static void appendExternalRecPrimaryKeys(Dataset dataset, List<Object> target) throws AsterixException {
+        int numPrimaryKeys = IndexingConstants.getRIDSize(dataset);
+        for (int i = 0; i < numPrimaryKeys; i++) {
+            target.add(IndexingConstants.getFieldType(i));
+        }
+    }
+
+    private static void writeVarList(List<LogicalVariable> varList, List<Mutable<ILogicalExpression>> funcArgs) {
+        Mutable<ILogicalExpression> numKeysRef = new MutableObject<ILogicalExpression>(new ConstantExpression(
+                new AsterixConstantValue(new AInt32(varList.size()))));
+        funcArgs.add(numKeysRef);
+        for (LogicalVariable keyVar : varList) {
+            Mutable<ILogicalExpression> keyVarRef = new MutableObject<ILogicalExpression>(
+                    new VariableReferenceExpression(keyVar));
+            funcArgs.add(keyVarRef);
+        }
+    }
+
+    private static void addStringArg(String argument, List<Mutable<ILogicalExpression>> funcArgs) {
+        Mutable<ILogicalExpression> stringRef = new MutableObject<ILogicalExpression>(new ConstantExpression(
+                new AsterixConstantValue(new AString(argument))));
+        funcArgs.add(stringRef);
+    }
+
+    public static ExternalDataLookupOperator createExternalDataLookupUnnestMap(AbstractDataSourceOperator dataSourceOp,
+            Dataset dataset, ARecordType recordType, ILogicalOperator inputOp, IOptimizationContext context,
+            Index secondaryIndex, boolean retainInput, boolean retainNull) throws AlgebricksException {
+        List<LogicalVariable> primaryKeyVars = AccessMethodUtils.getPrimaryKeyVarsFromSecondaryUnnestMap(dataset,
+                inputOp);
+
+        // add a sort on the RID fields before fetching external data.
+        OrderOperator order = new OrderOperator();
+        for (LogicalVariable pkVar : primaryKeyVars) {
+            Mutable<ILogicalExpression> vRef = new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
+                    pkVar));
+            order.getOrderExpressions().add(
+                    new Pair<IOrder, Mutable<ILogicalExpression>>(OrderOperator.ASC_ORDER, vRef));
+        }
+        // The secondary-index search feeds into the sort.
+        order.getInputs().add(new MutableObject<ILogicalOperator>(inputOp));
+        order.setExecutionMode(ExecutionMode.LOCAL);
+        context.computeAndSetTypeEnvironmentForOperator(order);
+        List<Mutable<ILogicalExpression>> externalRIDAccessFuncArgs = new ArrayList<Mutable<ILogicalExpression>>();
+        //Add dataverse and dataset to the arguments
+        AccessMethodUtils.addStringArg(dataset.getDataverseName(), externalRIDAccessFuncArgs);
+        AccessMethodUtils.addStringArg(dataset.getDatasetName(), externalRIDAccessFuncArgs);
+        AccessMethodUtils.writeVarList(primaryKeyVars, externalRIDAccessFuncArgs);
+
+        // Variables and types coming out of the external access.
+        List<LogicalVariable> externalAccessByRIDVars = new ArrayList<LogicalVariable>();
+        List<Object> externalAccessOutputTypes = new ArrayList<Object>();
+        // Append output variables/types generated by the data scan (not forwarded from input).
+        externalAccessByRIDVars.addAll(dataSourceOp.getVariables());
+        appendExternalRecTypes(dataset, recordType, externalAccessOutputTypes);
+
+        IFunctionInfo externalAccessByRID = FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.EXTERNAL_LOOKUP);
+        AbstractFunctionCallExpression externalAccessFunc = new ScalarFunctionCallExpression(externalAccessByRID,
+                externalRIDAccessFuncArgs);
+
+        ExternalDataLookupOperator externalLookupOp = new ExternalDataLookupOperator(externalAccessByRIDVars,
+                new MutableObject<ILogicalExpression>(externalAccessFunc), externalAccessOutputTypes, retainInput,
+                dataSourceOp.getDataSource());
+        // Fed by the order operator or the secondaryIndexUnnestOp.
+        externalLookupOp.getInputs().add(new MutableObject<ILogicalOperator>(order));
+
+        context.computeAndSetTypeEnvironmentForOperator(externalLookupOp);
+        externalLookupOp.setExecutionMode(ExecutionMode.PARTITIONED);
+
+        //set the physical operator
+        AqlSourceId dataSourceId = new AqlSourceId(dataset.getDataverseName(), dataset.getDatasetName());
+        externalLookupOp.setPhysicalOperator(new ExternalDataLookupPOperator(dataSourceId, dataset, recordType,
+                secondaryIndex, primaryKeyVars, false, retainInput, retainNull));
+        return externalLookupOp;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/BTreeAccessMethod.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/BTreeAccessMethod.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/BTreeAccessMethod.java
new file mode 100644
index 0000000..22f3b80
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/BTreeAccessMethod.java
@@ -0,0 +1,660 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules.am;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.common.annotations.SkipSecondaryIndexSearchExpressionAnnotation;
+import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.optimizer.rules.util.EquivalenceClassUtils;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IndexedNLJoinExpressionAnnotation;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions.ComparisonKind;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractDataSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExternalDataLookupOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
+
+/**
+ * Class for helping rewrite rules to choose and apply BTree indexes.
+ */
+public class BTreeAccessMethod implements IAccessMethod {
+
+    // Describes whether a search predicate is an open/closed interval.
+    private enum LimitType {
+        LOW_INCLUSIVE,
+        LOW_EXCLUSIVE,
+        HIGH_INCLUSIVE,
+        HIGH_EXCLUSIVE,
+        EQUAL
+    }
+
+    // TODO: There is some redundancy here, since these are listed in AlgebricksBuiltinFunctions as well.
+    private static List<FunctionIdentifier> funcIdents = new ArrayList<FunctionIdentifier>();
+    static {
+        funcIdents.add(AlgebricksBuiltinFunctions.EQ);
+        funcIdents.add(AlgebricksBuiltinFunctions.LE);
+        funcIdents.add(AlgebricksBuiltinFunctions.GE);
+        funcIdents.add(AlgebricksBuiltinFunctions.LT);
+        funcIdents.add(AlgebricksBuiltinFunctions.GT);
+    }
+
+    public static BTreeAccessMethod INSTANCE = new BTreeAccessMethod();
+
+    @Override
+    public List<FunctionIdentifier> getOptimizableFunctions() {
+        return funcIdents;
+    }
+
+    @Override
+    public boolean analyzeFuncExprArgs(AbstractFunctionCallExpression funcExpr,
+            List<AbstractLogicalOperator> assignsAndUnnests, AccessMethodAnalysisContext analysisCtx) {
+        boolean matches = AccessMethodUtils.analyzeFuncExprArgsForOneConstAndVar(funcExpr, analysisCtx);
+        if (!matches) {
+            matches = AccessMethodUtils.analyzeFuncExprArgsForTwoVars(funcExpr, analysisCtx);
+        }
+        return matches;
+    }
+
+    @Override
+    public boolean matchAllIndexExprs() {
+        return false;
+    }
+
+    @Override
+    public boolean matchPrefixIndexExprs() {
+        return true;
+    }
+
+    @Override
+    public boolean applySelectPlanTransformation(Mutable<ILogicalOperator> selectRef,
+            OptimizableOperatorSubTree subTree, Index chosenIndex, AccessMethodAnalysisContext analysisCtx,
+            IOptimizationContext context) throws AlgebricksException {
+        SelectOperator select = (SelectOperator) selectRef.getValue();
+        Mutable<ILogicalExpression> conditionRef = select.getCondition();
+        ILogicalOperator primaryIndexUnnestOp = createSecondaryToPrimaryPlan(selectRef, conditionRef, subTree, null,
+                chosenIndex, analysisCtx, false, false, false, context);
+        if (primaryIndexUnnestOp == null) {
+            return false;
+        }
+        Mutable<ILogicalOperator> opRef = (subTree.assignsAndUnnestsRefs.isEmpty()) ? null
+                : subTree.assignsAndUnnestsRefs.get(0);
+        ILogicalOperator op = null;
+        if (opRef != null) {
+            op = opRef.getValue();
+        }
+        // Generate new select using the new condition.
+        if (conditionRef.getValue() != null) {
+            select.getInputs().clear();
+            if (op != null) {
+                subTree.dataSourceRef.setValue(primaryIndexUnnestOp);
+                select.getInputs().add(new MutableObject<ILogicalOperator>(op));
+            } else {
+                select.getInputs().add(new MutableObject<ILogicalOperator>(primaryIndexUnnestOp));
+            }
+        } else {
+            ((AbstractLogicalOperator) primaryIndexUnnestOp).setExecutionMode(ExecutionMode.PARTITIONED);
+            if (op != null) {
+                subTree.dataSourceRef.setValue(primaryIndexUnnestOp);
+                selectRef.setValue(op);
+            } else {
+                selectRef.setValue(primaryIndexUnnestOp);
+            }
+        }
+        return true;
+    }
+
+    @Override
+    public boolean applyJoinPlanTransformation(Mutable<ILogicalOperator> joinRef,
+            OptimizableOperatorSubTree leftSubTree, OptimizableOperatorSubTree rightSubTree, Index chosenIndex,
+            AccessMethodAnalysisContext analysisCtx, IOptimizationContext context, boolean isLeftOuterJoin,
+            boolean hasGroupBy) throws AlgebricksException {
+        AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) joinRef.getValue();
+        Mutable<ILogicalExpression> conditionRef = joinOp.getCondition();
+        // Determine if the index is applicable on the left or right side (if both, we arbitrarily prefer the left side).
+        Dataset dataset = analysisCtx.indexDatasetMap.get(chosenIndex);
+        // Determine probe and index subtrees based on chosen index.
+        OptimizableOperatorSubTree indexSubTree = null;
+        OptimizableOperatorSubTree probeSubTree = null;
+        if (!isLeftOuterJoin && leftSubTree.hasDataSourceScan()
+                && dataset.getDatasetName().equals(leftSubTree.dataset.getDatasetName())) {
+            indexSubTree = leftSubTree;
+            probeSubTree = rightSubTree;
+        } else if (rightSubTree.hasDataSourceScan()
+                && dataset.getDatasetName().equals(rightSubTree.dataset.getDatasetName())) {
+            indexSubTree = rightSubTree;
+            probeSubTree = leftSubTree;
+        }
+        if (indexSubTree == null) {
+            //This may happen for left outer join case
+            return false;
+        }
+
+        LogicalVariable newNullPlaceHolderVar = null;
+        if (isLeftOuterJoin) {
+            //get a new null place holder variable that is the first field variable of the primary key
+            //from the indexSubTree's datasourceScanOp
+            newNullPlaceHolderVar = indexSubTree.getDataSourceVariables().get(0);
+        }
+
+        ILogicalOperator primaryIndexUnnestOp = createSecondaryToPrimaryPlan(joinRef, conditionRef, indexSubTree,
+                probeSubTree, chosenIndex, analysisCtx, true, isLeftOuterJoin, true, context);
+        if (primaryIndexUnnestOp == null) {
+            return false;
+        }
+
+        if (isLeftOuterJoin && hasGroupBy) {
+            //reset the null place holder variable
+            AccessMethodUtils.resetLOJNullPlaceholderVariableInGroupByOp(analysisCtx, newNullPlaceHolderVar, context);
+        }
+
+        // If there are conditions left, add a new select operator on top.
+        indexSubTree.dataSourceRef.setValue(primaryIndexUnnestOp);
+        if (conditionRef.getValue() != null) {
+            SelectOperator topSelect = new SelectOperator(conditionRef, isLeftOuterJoin, newNullPlaceHolderVar);
+            topSelect.getInputs().add(indexSubTree.rootRef);
+            topSelect.setExecutionMode(ExecutionMode.LOCAL);
+            context.computeAndSetTypeEnvironmentForOperator(topSelect);
+            // Replace the original join with the new subtree rooted at the select op.
+            joinRef.setValue(topSelect);
+        } else {
+            joinRef.setValue(indexSubTree.rootRef.getValue());
+        }
+        return true;
+    }
+
+    private ILogicalOperator createSecondaryToPrimaryPlan(Mutable<ILogicalOperator> topOpRef,
+            Mutable<ILogicalExpression> conditionRef, OptimizableOperatorSubTree indexSubTree,
+            OptimizableOperatorSubTree probeSubTree, Index chosenIndex, AccessMethodAnalysisContext analysisCtx,
+            boolean retainInput, boolean retainNull, boolean requiresBroadcast, IOptimizationContext context)
+            throws AlgebricksException {
+        Dataset dataset = indexSubTree.dataset;
+        ARecordType recordType = indexSubTree.recordType;
+        // we made sure indexSubTree has datasource scan
+        AbstractDataSourceOperator dataSourceOp = (AbstractDataSourceOperator) indexSubTree.dataSourceRef.getValue();
+        List<Pair<Integer, Integer>> exprAndVarList = analysisCtx.indexExprsAndVars.get(chosenIndex);
+        List<IOptimizableFuncExpr> matchedFuncExprs = analysisCtx.matchedFuncExprs;
+        int numSecondaryKeys = analysisCtx.indexNumMatchedKeys.get(chosenIndex);
+        // List of function expressions that will be replaced by the secondary-index search.
+        // These func exprs will be removed from the select condition at the very end of this method.
+        Set<ILogicalExpression> replacedFuncExprs = new HashSet<ILogicalExpression>();
+
+        // Info on high and low keys for the BTree search predicate.
+        ILogicalExpression[] lowKeyExprs = new ILogicalExpression[numSecondaryKeys];
+        ILogicalExpression[] highKeyExprs = new ILogicalExpression[numSecondaryKeys];
+        LimitType[] lowKeyLimits = new LimitType[numSecondaryKeys];
+        LimitType[] highKeyLimits = new LimitType[numSecondaryKeys];
+        boolean[] lowKeyInclusive = new boolean[numSecondaryKeys];
+        boolean[] highKeyInclusive = new boolean[numSecondaryKeys];
+
+        // TODO: For now we don't do any sophisticated analysis of the func exprs to come up with "the best" range predicate.
+        // If we can't figure out how to integrate a certain funcExpr into the current predicate, we just bail by setting this flag.
+        boolean couldntFigureOut = false;
+        boolean doneWithExprs = false;
+        boolean isEqCondition = false;
+        // TODO: For now don't consider prefix searches.
+        BitSet setLowKeys = new BitSet(numSecondaryKeys);
+        BitSet setHighKeys = new BitSet(numSecondaryKeys);
+        // Go through the func exprs listed as optimizable by the chosen index,
+        // and formulate a range predicate on the secondary-index keys.
+
+        // checks whether a type casting happened from a real (FLOAT, DOUBLE) value to an INT value
+        // since we have a round issues when dealing with LT(<) OR GT(>) operator.
+        boolean realTypeConvertedToIntegerType = false;
+
+        for (Pair<Integer, Integer> exprIndex : exprAndVarList) {
+            // Position of the field of matchedFuncExprs.get(exprIndex) in the chosen index's indexed exprs.
+            IOptimizableFuncExpr optFuncExpr = matchedFuncExprs.get(exprIndex.first);
+            int keyPos = indexOf(optFuncExpr.getFieldName(0), chosenIndex.getKeyFieldNames());
+            if (keyPos < 0) {
+                if (optFuncExpr.getNumLogicalVars() > 1) {
+                    // If we are optimizing a join, the matching field may be the second field name.
+                    keyPos = indexOf(optFuncExpr.getFieldName(1), chosenIndex.getKeyFieldNames());
+                }
+            }
+            if (keyPos < 0) {
+                throw new AlgebricksException(
+                        "Could not match optimizable function expression to any index field name.");
+            }
+            Pair<ILogicalExpression, Boolean> returnedSearchKeyExpr = AccessMethodUtils.createSearchKeyExpr(
+                    optFuncExpr, indexSubTree, probeSubTree);
+            ILogicalExpression searchKeyExpr = returnedSearchKeyExpr.first;
+            realTypeConvertedToIntegerType = returnedSearchKeyExpr.second;
+
+            LimitType limit = getLimitType(optFuncExpr, probeSubTree);
+
+            // If a DOUBLE or FLOAT constant is converted to an INT type value,
+            // we need to check a corner case where two real values are located between an INT value.
+            // For example, for the following query,
+            //
+            // for $emp in dataset empDataset
+            // where $emp.age > double("2.3") and $emp.age < double("3.3")
+            // return $emp.id;
+            //
+            // It should generate a result if there is a tuple that satisfies the condition, which is 3,
+            // however, it does not generate the desired result since finding candidates
+            // fail after truncating the fraction part (there is no INT whose value is greater than 2 and less than 3.)
+            //
+            // Therefore, we convert LT(<) to LE(<=) and GT(>) to GE(>=) to find candidates.
+            // This does not change the result of an actual comparison since this conversion is only applied
+            // for finding candidates from an index.
+            //
+            if (realTypeConvertedToIntegerType) {
+                if (limit == LimitType.HIGH_EXCLUSIVE) {
+                    limit = LimitType.HIGH_INCLUSIVE;
+                } else if (limit == LimitType.LOW_EXCLUSIVE) {
+                    limit = LimitType.LOW_INCLUSIVE;
+                }
+            }
+
+            switch (limit) {
+                case EQUAL: {
+                    if (lowKeyLimits[keyPos] == null && highKeyLimits[keyPos] == null) {
+                        lowKeyLimits[keyPos] = highKeyLimits[keyPos] = limit;
+                        lowKeyInclusive[keyPos] = highKeyInclusive[keyPos] = true;
+                        lowKeyExprs[keyPos] = highKeyExprs[keyPos] = searchKeyExpr;
+                        setLowKeys.set(keyPos);
+                        setHighKeys.set(keyPos);
+                        isEqCondition = true;
+                    } else {
+                        // Has already been set to the identical values. When optimizing join we may encounter the same optimizable expression twice
+                        // (once from analyzing each side of the join)
+                        if (lowKeyLimits[keyPos] == limit && lowKeyInclusive[keyPos] == true
+                                && lowKeyExprs[keyPos].equals(searchKeyExpr) && highKeyLimits[keyPos] == limit
+                                && highKeyInclusive[keyPos] == true && highKeyExprs[keyPos].equals(searchKeyExpr)) {
+                            isEqCondition = true;
+                            break;
+                        }
+                        couldntFigureOut = true;
+                    }
+                    // TODO: For now don't consider prefix searches.
+                    // If high and low keys are set, we exit for now.
+                    if (setLowKeys.cardinality() == numSecondaryKeys && setHighKeys.cardinality() == numSecondaryKeys) {
+                        doneWithExprs = true;
+                    }
+                    break;
+                }
+                case HIGH_EXCLUSIVE: {
+                    if (highKeyLimits[keyPos] == null || (highKeyLimits[keyPos] != null && highKeyInclusive[keyPos])) {
+                        highKeyLimits[keyPos] = limit;
+                        highKeyExprs[keyPos] = searchKeyExpr;
+                        highKeyInclusive[keyPos] = false;
+                    } else {
+                        // Has already been set to the identical values. When optimizing join we may encounter the same optimizable expression twice
+                        // (once from analyzing each side of the join)
+                        if (highKeyLimits[keyPos] == limit && highKeyInclusive[keyPos] == false
+                                && highKeyExprs[keyPos].equals(searchKeyExpr)) {
+                            break;
+                        }
+                        couldntFigureOut = true;
+                        doneWithExprs = true;
+                    }
+                    break;
+                }
+                case HIGH_INCLUSIVE: {
+                    if (highKeyLimits[keyPos] == null) {
+                        highKeyLimits[keyPos] = limit;
+                        highKeyExprs[keyPos] = searchKeyExpr;
+                        highKeyInclusive[keyPos] = true;
+                    } else {
+                        // Has already been set to the identical values. When optimizing join we may encounter the same optimizable expression twice
+                        // (once from analyzing each side of the join)
+                        if (highKeyLimits[keyPos] == limit && highKeyInclusive[keyPos] == true
+                                && highKeyExprs[keyPos].equals(searchKeyExpr)) {
+                            break;
+                        }
+                        couldntFigureOut = true;
+                        doneWithExprs = true;
+                    }
+                    break;
+                }
+                case LOW_EXCLUSIVE: {
+                    if (lowKeyLimits[keyPos] == null || (lowKeyLimits[keyPos] != null && lowKeyInclusive[keyPos])) {
+                        lowKeyLimits[keyPos] = limit;
+                        lowKeyExprs[keyPos] = searchKeyExpr;
+                        lowKeyInclusive[keyPos] = false;
+                    } else {
+                        // Has already been set to the identical values. When optimizing join we may encounter the same optimizable expression twice
+                        // (once from analyzing each side of the join)
+                        if (lowKeyLimits[keyPos] == limit && lowKeyInclusive[keyPos] == false
+                                && lowKeyExprs[keyPos].equals(searchKeyExpr)) {
+                            break;
+                        }
+                        couldntFigureOut = true;
+                        doneWithExprs = true;
+                    }
+                    break;
+                }
+                case LOW_INCLUSIVE: {
+                    if (lowKeyLimits[keyPos] == null) {
+                        lowKeyLimits[keyPos] = limit;
+                        lowKeyExprs[keyPos] = searchKeyExpr;
+                        lowKeyInclusive[keyPos] = true;
+                    } else {
+                        // Has already been set to the identical values. When optimizing join we may encounter the same optimizable expression twice
+                        // (once from analyzing each side of the join)
+                        if (lowKeyLimits[keyPos] == limit && lowKeyInclusive[keyPos] == true
+                                && lowKeyExprs[keyPos].equals(searchKeyExpr)) {
+                            break;
+                        }
+                        couldntFigureOut = true;
+                        doneWithExprs = true;
+                    }
+                    break;
+                }
+                default: {
+                    throw new IllegalStateException();
+                }
+            }
+            if (!couldntFigureOut) {
+                // Remember to remove this funcExpr later.
+                replacedFuncExprs.add(matchedFuncExprs.get(exprIndex.first).getFuncExpr());
+            }
+            if (doneWithExprs) {
+                break;
+            }
+        }
+        if (couldntFigureOut) {
+            return null;
+        }
+
+        // If the select condition contains mixed open/closed intervals on multiple keys, then we make all intervals closed to obtain a superset of answers and leave the original selection in place.
+        boolean primaryIndexPostProccessingIsNeeded = false;
+        for (int i = 1; i < numSecondaryKeys; ++i) {
+            if (lowKeyInclusive[i] != lowKeyInclusive[0]) {
+                Arrays.fill(lowKeyInclusive, true);
+                primaryIndexPostProccessingIsNeeded = true;
+                break;
+            }
+        }
+        for (int i = 1; i < numSecondaryKeys; ++i) {
+            if (highKeyInclusive[i] != highKeyInclusive[0]) {
+                Arrays.fill(highKeyInclusive, true);
+                primaryIndexPostProccessingIsNeeded = true;
+                break;
+            }
+        }
+
+        // determine cases when prefix search could be applied
+        for (int i = 1; i < lowKeyExprs.length; i++) {
+            if (lowKeyLimits[0] == null && lowKeyLimits[i] != null || lowKeyLimits[0] != null
+                    && lowKeyLimits[i] == null || highKeyLimits[0] == null && highKeyLimits[i] != null
+                    || highKeyLimits[0] != null && highKeyLimits[i] == null) {
+                numSecondaryKeys--;
+                primaryIndexPostProccessingIsNeeded = true;
+            }
+        }
+        if (lowKeyLimits[0] == null) {
+            lowKeyInclusive[0] = true;
+        }
+        if (highKeyLimits[0] == null) {
+            highKeyInclusive[0] = true;
+        }
+
+        // Here we generate vars and funcs for assigning the secondary-index keys to be fed into the secondary-index search.
+        // List of variables for the assign.
+        ArrayList<LogicalVariable> keyVarList = new ArrayList<LogicalVariable>();
+        // List of variables and expressions for the assign.
+        ArrayList<LogicalVariable> assignKeyVarList = new ArrayList<LogicalVariable>();
+        ArrayList<Mutable<ILogicalExpression>> assignKeyExprList = new ArrayList<Mutable<ILogicalExpression>>();
+        int numLowKeys = createKeyVarsAndExprs(numSecondaryKeys, lowKeyLimits, lowKeyExprs, assignKeyVarList,
+                assignKeyExprList, keyVarList, context);
+        int numHighKeys = createKeyVarsAndExprs(numSecondaryKeys, highKeyLimits, highKeyExprs, assignKeyVarList,
+                assignKeyExprList, keyVarList, context);
+
+        BTreeJobGenParams jobGenParams = new BTreeJobGenParams(chosenIndex.getIndexName(), IndexType.BTREE,
+                dataset.getDataverseName(), dataset.getDatasetName(), retainInput, retainNull, requiresBroadcast);
+        jobGenParams.setLowKeyInclusive(lowKeyInclusive[0]);
+        jobGenParams.setHighKeyInclusive(highKeyInclusive[0]);
+        jobGenParams.setIsEqCondition(isEqCondition);
+        jobGenParams.setLowKeyVarList(keyVarList, 0, numLowKeys);
+        jobGenParams.setHighKeyVarList(keyVarList, numLowKeys, numHighKeys);
+
+        ILogicalOperator inputOp = null;
+        if (!assignKeyVarList.isEmpty()) {
+            // Assign operator that sets the constant secondary-index search-key fields if necessary.
+            AssignOperator assignConstantSearchKeys = new AssignOperator(assignKeyVarList, assignKeyExprList);
+            // Input to this assign is the EmptyTupleSource (which the dataSourceScan also must have had as input).
+            assignConstantSearchKeys.getInputs().add(dataSourceOp.getInputs().get(0));
+            assignConstantSearchKeys.setExecutionMode(dataSourceOp.getExecutionMode());
+            inputOp = assignConstantSearchKeys;
+        } else {
+            // All index search keys are variables.
+            inputOp = probeSubTree.root;
+        }
+
+        UnnestMapOperator secondaryIndexUnnestOp = AccessMethodUtils.createSecondaryIndexUnnestMap(dataset, recordType,
+                chosenIndex, inputOp, jobGenParams, context, false, retainInput);
+
+        // Generate the rest of the upstream plan which feeds the search results into the primary index.
+        UnnestMapOperator primaryIndexUnnestOp = null;
+        boolean isPrimaryIndex = chosenIndex.isPrimaryIndex();
+        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
+            // External dataset
+            ExternalDataLookupOperator externalDataAccessOp = AccessMethodUtils.createExternalDataLookupUnnestMap(
+                    dataSourceOp, dataset, recordType, secondaryIndexUnnestOp, context, chosenIndex, retainInput,
+                    retainNull);
+            indexSubTree.dataSourceRef.setValue(externalDataAccessOp);
+            return externalDataAccessOp;
+        } else if (!isPrimaryIndex) {
+            primaryIndexUnnestOp = AccessMethodUtils.createPrimaryIndexUnnestMap(dataSourceOp, dataset, recordType,
+                    secondaryIndexUnnestOp, context, true, retainInput, retainNull, false);
+
+            // Replace the datasource scan with the new plan rooted at
+            // primaryIndexUnnestMap.
+            indexSubTree.dataSourceRef.setValue(primaryIndexUnnestOp);
+        } else {
+            List<Object> primaryIndexOutputTypes = new ArrayList<Object>();
+            try {
+                AccessMethodUtils.appendPrimaryIndexTypes(dataset, recordType, primaryIndexOutputTypes);
+            } catch (IOException e) {
+                throw new AlgebricksException(e);
+            }
+            List<LogicalVariable> scanVariables = dataSourceOp.getVariables();
+            primaryIndexUnnestOp = new UnnestMapOperator(scanVariables, secondaryIndexUnnestOp.getExpressionRef(),
+                    primaryIndexOutputTypes, retainInput);
+            primaryIndexUnnestOp.getInputs().add(new MutableObject<ILogicalOperator>(inputOp));
+
+            if (!primaryIndexPostProccessingIsNeeded) {
+                List<Mutable<ILogicalExpression>> remainingFuncExprs = new ArrayList<Mutable<ILogicalExpression>>();
+                getNewConditionExprs(conditionRef, replacedFuncExprs, remainingFuncExprs);
+                // Generate new condition.
+                if (!remainingFuncExprs.isEmpty()) {
+                    ILogicalExpression pulledCond = createSelectCondition(remainingFuncExprs);
+                    conditionRef.setValue(pulledCond);
+                } else {
+                    conditionRef.setValue(null);
+                }
+            }
+
+            // Adds equivalence classes --- one equivalent class between a primary key
+            // variable and a record field-access expression.
+            EquivalenceClassUtils.addEquivalenceClassesForPrimaryIndexAccess(primaryIndexUnnestOp, scanVariables,
+                    recordType, dataset, context);
+        }
+
+        return primaryIndexUnnestOp;
+    }
+
+    private int createKeyVarsAndExprs(int numKeys, LimitType[] keyLimits, ILogicalExpression[] searchKeyExprs,
+            ArrayList<LogicalVariable> assignKeyVarList, ArrayList<Mutable<ILogicalExpression>> assignKeyExprList,
+            ArrayList<LogicalVariable> keyVarList, IOptimizationContext context) {
+        if (keyLimits[0] == null) {
+            return 0;
+        }
+        for (int i = 0; i < numKeys; i++) {
+            ILogicalExpression searchKeyExpr = searchKeyExprs[i];
+            LogicalVariable keyVar = null;
+            if (searchKeyExpr.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
+                keyVar = context.newVar();
+                assignKeyExprList.add(new MutableObject<ILogicalExpression>(searchKeyExpr));
+                assignKeyVarList.add(keyVar);
+            } else {
+                keyVar = ((VariableReferenceExpression) searchKeyExpr).getVariableReference();
+            }
+            keyVarList.add(keyVar);
+        }
+        return numKeys;
+    }
+
+    private void getNewConditionExprs(Mutable<ILogicalExpression> conditionRef,
+            Set<ILogicalExpression> replacedFuncExprs, List<Mutable<ILogicalExpression>> remainingFuncExprs) {
+        remainingFuncExprs.clear();
+        if (replacedFuncExprs.isEmpty()) {
+            return;
+        }
+        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) conditionRef.getValue();
+        if (replacedFuncExprs.size() == 1) {
+            Iterator<ILogicalExpression> it = replacedFuncExprs.iterator();
+            if (!it.hasNext()) {
+                return;
+            }
+            if (funcExpr == it.next()) {
+                // There are no remaining function exprs.
+                return;
+            }
+        }
+        // The original select cond must be an AND. Check it just to be sure.
+        if (funcExpr.getFunctionIdentifier() != AlgebricksBuiltinFunctions.AND) {
+            throw new IllegalStateException();
+        }
+        // Clean the conjuncts.
+        for (Mutable<ILogicalExpression> arg : funcExpr.getArguments()) {
+            ILogicalExpression argExpr = arg.getValue();
+            if (argExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+                continue;
+            }
+            // If the function expression was not replaced by the new index
+            // plan, then add it to the list of remaining function expressions.
+            if (!replacedFuncExprs.contains(argExpr)) {
+                remainingFuncExprs.add(arg);
+            }
+        }
+    }
+
+    private <T> int indexOf(T value, List<T> coll) {
+        int i = 0;
+        for (T member : coll) {
+            if (member.equals(value)) {
+                return i;
+            }
+            i++;
+        }
+        return -1;
+    }
+
+    private LimitType getLimitType(IOptimizableFuncExpr optFuncExpr, OptimizableOperatorSubTree probeSubTree) {
+        ComparisonKind ck = AlgebricksBuiltinFunctions.getComparisonType(optFuncExpr.getFuncExpr()
+                .getFunctionIdentifier());
+        LimitType limit = null;
+        switch (ck) {
+            case EQ: {
+                limit = LimitType.EQUAL;
+                break;
+            }
+            case GE: {
+                limit = probeIsOnLhs(optFuncExpr, probeSubTree) ? LimitType.HIGH_INCLUSIVE : LimitType.LOW_INCLUSIVE;
+                break;
+            }
+            case GT: {
+                limit = probeIsOnLhs(optFuncExpr, probeSubTree) ? LimitType.HIGH_EXCLUSIVE : LimitType.LOW_EXCLUSIVE;
+                break;
+            }
+            case LE: {
+                limit = probeIsOnLhs(optFuncExpr, probeSubTree) ? LimitType.LOW_INCLUSIVE : LimitType.HIGH_INCLUSIVE;
+                break;
+            }
+            case LT: {
+                limit = probeIsOnLhs(optFuncExpr, probeSubTree) ? LimitType.LOW_EXCLUSIVE : LimitType.HIGH_EXCLUSIVE;
+                break;
+            }
+            case NEQ: {
+                limit = null;
+                break;
+            }
+            default: {
+                throw new IllegalStateException();
+            }
+        }
+        return limit;
+    }
+
+    private boolean probeIsOnLhs(IOptimizableFuncExpr optFuncExpr, OptimizableOperatorSubTree probeSubTree) {
+        if (probeSubTree == null) {
+            // We are optimizing a selection query. Search key is a constant. Return true if constant is on lhs.
+            return optFuncExpr.getFuncExpr().getArguments().get(0) == optFuncExpr.getConstantVal(0);
+        } else {
+            // We are optimizing a join query. Determine whether the feeding variable is on the lhs.
+            return (optFuncExpr.getOperatorSubTree(0) == null || optFuncExpr.getOperatorSubTree(0) == probeSubTree);
+        }
+    }
+
+    private ILogicalExpression createSelectCondition(List<Mutable<ILogicalExpression>> predList) {
+        if (predList.size() > 1) {
+            IFunctionInfo finfo = FunctionUtils.getFunctionInfo(AlgebricksBuiltinFunctions.AND);
+            return new ScalarFunctionCallExpression(finfo, predList);
+        }
+        return predList.get(0).getValue();
+    }
+
+    @Override
+    public boolean exprIsOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) {
+        // If we are optimizing a join, check for the indexed nested-loop join hint.
+        if (optFuncExpr.getNumLogicalVars() == 2) {
+            if (!optFuncExpr.getFuncExpr().getAnnotations().containsKey(IndexedNLJoinExpressionAnnotation.INSTANCE)) {
+                return false;
+            }
+        }
+        if (!index.isPrimaryIndex()
+                && optFuncExpr.getFuncExpr().getAnnotations()
+                        .containsKey(SkipSecondaryIndexSearchExpressionAnnotation.INSTANCE)) {
+            return false;
+        }
+        // No additional analysis required for BTrees.
+        return true;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/BTreeJobGenParams.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/BTreeJobGenParams.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/BTreeJobGenParams.java
new file mode 100644
index 0000000..a3df9d3
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/BTreeJobGenParams.java
@@ -0,0 +1,134 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules.am;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+
+/**
+ * Helper class for reading and writing job-gen parameters for BTree access methods to
+ * and from a list of function arguments, typically of an unnest-map.
+ */
+public class BTreeJobGenParams extends AccessMethodJobGenParams {
+
+    protected List<LogicalVariable> lowKeyVarList;
+    protected List<LogicalVariable> highKeyVarList;
+
+    protected boolean lowKeyInclusive;
+    protected boolean highKeyInclusive;
+    protected boolean isEqCondition;
+
+    public BTreeJobGenParams() {
+        super();
+    }
+
+    public BTreeJobGenParams(String indexName, IndexType indexType, String dataverseName, String datasetName,
+            boolean retainInput, boolean retainNull, boolean requiresBroadcast) {
+        super(indexName, indexType, dataverseName, datasetName, retainInput, retainNull, requiresBroadcast);
+    }
+
+    public void setLowKeyVarList(List<LogicalVariable> keyVarList, int startIndex, int numKeys) {
+        lowKeyVarList = new ArrayList<LogicalVariable>(numKeys);
+        setKeyVarList(keyVarList, lowKeyVarList, startIndex, numKeys);
+    }
+
+    public void setHighKeyVarList(List<LogicalVariable> keyVarList, int startIndex, int numKeys) {
+        highKeyVarList = new ArrayList<LogicalVariable>(numKeys);
+        setKeyVarList(keyVarList, highKeyVarList, startIndex, numKeys);
+    }
+
+    private void setKeyVarList(List<LogicalVariable> src, List<LogicalVariable> dest, int startIndex, int numKeys) {
+        for (int i = 0; i < numKeys; i++) {
+            dest.add(src.get(startIndex + i));
+        }
+    }
+
+    public void setLowKeyInclusive(boolean lowKeyInclusive) {
+        this.lowKeyInclusive = lowKeyInclusive;
+    }
+
+    public void setHighKeyInclusive(boolean highKeyInclusive) {
+        this.highKeyInclusive = highKeyInclusive;
+    }
+
+    public void setIsEqCondition(boolean isEqConsition) {
+        this.isEqCondition = isEqConsition;
+    }
+
+    public void writeToFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
+        super.writeToFuncArgs(funcArgs);
+        writeVarList(lowKeyVarList, funcArgs);
+        writeVarList(highKeyVarList, funcArgs);
+        writeBoolean(lowKeyInclusive, funcArgs);
+        writeBoolean(highKeyInclusive, funcArgs);
+        writeBoolean(isEqCondition, funcArgs);
+    }
+
+    public void readFromFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
+        super.readFromFuncArgs(funcArgs);
+        int index = super.getNumParams();
+        lowKeyVarList = new ArrayList<LogicalVariable>();
+        highKeyVarList = new ArrayList<LogicalVariable>();
+        int nextIndex = readVarList(funcArgs, index, lowKeyVarList);
+        nextIndex = readVarList(funcArgs, nextIndex, highKeyVarList);
+        nextIndex = readKeyInclusives(funcArgs, nextIndex);
+        readIsEqCondition(funcArgs, nextIndex);
+    }
+
+    private int readKeyInclusives(List<Mutable<ILogicalExpression>> funcArgs, int index) {
+        lowKeyInclusive = ((ConstantExpression) funcArgs.get(index).getValue()).getValue().isTrue();
+        // Read the next function argument at index + 1.
+        highKeyInclusive = ((ConstantExpression) funcArgs.get(index + 1).getValue()).getValue().isTrue();
+        // We have read two of the function arguments, so the next index is at index + 2.
+        return index + 2;
+    }
+
+    private void readIsEqCondition(List<Mutable<ILogicalExpression>> funcArgs, int index) {
+        isEqCondition = ((ConstantExpression) funcArgs.get(index).getValue()).getValue().isTrue();
+    }
+
+    private void writeBoolean(boolean val, List<Mutable<ILogicalExpression>> funcArgs) {
+        ILogicalExpression keyExpr = val ? ConstantExpression.TRUE : ConstantExpression.FALSE;
+        funcArgs.add(new MutableObject<ILogicalExpression>(keyExpr));
+    }
+
+    public List<LogicalVariable> getLowKeyVarList() {
+        return lowKeyVarList;
+    }
+
+    public List<LogicalVariable> getHighKeyVarList() {
+        return highKeyVarList;
+    }
+
+    public boolean isEqCondition() {
+        return isEqCondition;
+    }
+
+    public boolean isLowKeyInclusive() {
+        return lowKeyInclusive;
+    }
+
+    public boolean isHighKeyInclusive() {
+        return highKeyInclusive;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IAccessMethod.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IAccessMethod.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IAccessMethod.java
new file mode 100644
index 0000000..d03d1a0
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IAccessMethod.java
@@ -0,0 +1,96 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules.am;
+
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+
+/**
+ * Interface that an access method should implement to work with the rewrite
+ * rules to apply it for join and/or selection queries. This interface provides
+ * methods for analyzing a select/join condition, and for rewriting the plan
+ * with a given index.
+ */
+public interface IAccessMethod {
+
+    /**
+     * @return A list of function identifiers that are optimizable by this
+     *         access method.
+     */
+    public List<FunctionIdentifier> getOptimizableFunctions();
+
+    /**
+     * Analyzes the arguments of a given optimizable funcExpr to see if this
+     * access method is applicable (e.g., one arg is a constant and one is a
+     * var). We assume that the funcExpr has already been determined to be
+     * optimizable by this access method based on its function identifier. If
+     * funcExpr has been found to be optimizable, this method adds an
+     * OptimizableFunction to analysisCtx.matchedFuncExprs for further analysis.
+     *
+     * @return true if funcExpr is optimizable by this access method, false
+     *         otherwise
+     */
+    public boolean analyzeFuncExprArgs(AbstractFunctionCallExpression funcExpr,
+            List<AbstractLogicalOperator> assignsAndUnnests, AccessMethodAnalysisContext analysisCtx);
+
+    /**
+     * Indicates whether all index expressions must be matched in order for this
+     * index to be applicable.
+     *
+     * @return boolean
+     */
+    public boolean matchAllIndexExprs();
+
+    /**
+     * Indicates whether this index is applicable if only a prefix of the index
+     * expressions are matched.
+     *
+     * @return boolean
+     */
+    public boolean matchPrefixIndexExprs();
+
+    /**
+     * Applies the plan transformation to use chosenIndex to optimize a selection query.
+     */
+    public boolean applySelectPlanTransformation(Mutable<ILogicalOperator> selectRef,
+            OptimizableOperatorSubTree subTree, Index chosenIndex, AccessMethodAnalysisContext analysisCtx,
+            IOptimizationContext context) throws AlgebricksException;
+
+    /**
+     * Applies the plan transformation to use chosenIndex to optimize a join query.
+     * In the case of a LeftOuterJoin, there may or may not be a needed groupby operation
+     * If there is, we will need to include it in the transformation
+     */
+    public boolean applyJoinPlanTransformation(Mutable<ILogicalOperator> joinRef,
+            OptimizableOperatorSubTree leftSubTree, OptimizableOperatorSubTree rightSubTree, Index chosenIndex,
+            AccessMethodAnalysisContext analysisCtx, IOptimizationContext context, boolean isLeftOuterJoin,
+            boolean hasGroupBy) throws AlgebricksException;
+
+    /**
+     * Analyzes expr to see whether it is optimizable by the given concrete index.
+     * 
+     * @throws AlgebricksException
+     */
+    public boolean exprIsOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) throws AlgebricksException;
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IOptimizableFuncExpr.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IOptimizableFuncExpr.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IOptimizableFuncExpr.java
new file mode 100644
index 0000000..326d38b
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IOptimizableFuncExpr.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules.am;
+
+import java.util.List;
+
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IAlgebricksConstantValue;
+
+/**
+ * Describes a function expression that is optimizable by an access method.
+ * Provides convenient methods for accessing arguments (constants, variables)
+ * and metadata of such a function.
+ */
+public interface IOptimizableFuncExpr {
+    public AbstractFunctionCallExpression getFuncExpr();
+
+    public int getNumLogicalVars();
+
+    public int getNumConstantVals();
+
+    public LogicalVariable getLogicalVar(int index);
+
+    public void setLogicalExpr(int index, ILogicalExpression logExpr);
+
+    public ILogicalExpression getLogicalExpr(int index);
+
+    public void setFieldName(int index, List<String> fieldName);
+
+    public List<String> getFieldName(int index);
+
+    public void setFieldType(int index, IAType fieldName);
+
+    public IAType getFieldType(int index);
+
+    public void setOptimizableSubTree(int index, OptimizableOperatorSubTree subTree);
+
+    public OptimizableOperatorSubTree getOperatorSubTree(int index);
+
+    public IAlgebricksConstantValue getConstantVal(int index);
+
+    public int findLogicalVar(LogicalVariable var);
+
+    public int findFieldName(List<String> fieldName);
+
+    public void substituteVar(LogicalVariable original, LogicalVariable substitution);
+
+    public void setPartialField(boolean partialField);
+
+    public boolean containsPartialField();
+
+    public void setSourceVar(int index, LogicalVariable var);
+
+    public LogicalVariable getSourceVar(int index);
+}


[39/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
deleted file mode 100644
index aa7a6ac..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
+++ /dev/null
@@ -1,242 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules.am;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.optimizer.base.AnalysisUtil;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractUnnestOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExternalDataLookupOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
-
-/**
- * Operator subtree that matches the following patterns, and provides convenient access to its nodes:
- * (select)? <-- (assign | unnest)* <-- (datasource scan | unnest-map)
- */
-public class OptimizableOperatorSubTree {
-
-    public static enum DataSourceType {
-        DATASOURCE_SCAN,
-        EXTERNAL_SCAN,
-        PRIMARY_INDEX_LOOKUP,
-        COLLECTION_SCAN,
-        NO_DATASOURCE
-    }
-
-    public ILogicalOperator root = null;
-    public Mutable<ILogicalOperator> rootRef = null;
-    public final List<Mutable<ILogicalOperator>> assignsAndUnnestsRefs = new ArrayList<Mutable<ILogicalOperator>>();
-    public final List<AbstractLogicalOperator> assignsAndUnnests = new ArrayList<AbstractLogicalOperator>();
-    public Mutable<ILogicalOperator> dataSourceRef = null;
-    public DataSourceType dataSourceType = DataSourceType.NO_DATASOURCE;
-    // Dataset and type metadata. Set in setDatasetAndTypeMetadata().
-    public Dataset dataset = null;
-    public ARecordType recordType = null;
-
-    public boolean initFromSubTree(Mutable<ILogicalOperator> subTreeOpRef) {
-        reset();
-        rootRef = subTreeOpRef;
-        root = subTreeOpRef.getValue();
-        // Examine the op's children to match the expected patterns.
-        AbstractLogicalOperator subTreeOp = (AbstractLogicalOperator) subTreeOpRef.getValue();
-        do {
-            // Skip select operator.
-            if (subTreeOp.getOperatorTag() == LogicalOperatorTag.SELECT) {
-                subTreeOpRef = subTreeOp.getInputs().get(0);
-                subTreeOp = (AbstractLogicalOperator) subTreeOpRef.getValue();
-            }
-            // Check primary-index pattern.
-            if (subTreeOp.getOperatorTag() != LogicalOperatorTag.ASSIGN
-                    && subTreeOp.getOperatorTag() != LogicalOperatorTag.UNNEST) {
-                // Pattern may still match if we are looking for primary index matches as well.
-                return initializeDataSource(subTreeOpRef);
-            }
-            // Match (assign | unnest)+.
-            while (subTreeOp.getOperatorTag() == LogicalOperatorTag.ASSIGN
-                    || subTreeOp.getOperatorTag() == LogicalOperatorTag.UNNEST) {
-                assignsAndUnnestsRefs.add(subTreeOpRef);
-                assignsAndUnnests.add(subTreeOp);
-
-                subTreeOpRef = subTreeOp.getInputs().get(0);
-                subTreeOp = (AbstractLogicalOperator) subTreeOpRef.getValue();
-            };
-        } while (subTreeOp.getOperatorTag() == LogicalOperatorTag.SELECT);
-
-        // Match data source (datasource scan or primary index search).
-        return initializeDataSource(subTreeOpRef);
-    }
-
-    private boolean initializeDataSource(Mutable<ILogicalOperator> subTreeOpRef) {
-        AbstractLogicalOperator subTreeOp = (AbstractLogicalOperator) subTreeOpRef.getValue();
-        if (subTreeOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
-            dataSourceType = DataSourceType.DATASOURCE_SCAN;
-            dataSourceRef = subTreeOpRef;
-            return true;
-        } else if (subTreeOp.getOperatorTag() == LogicalOperatorTag.EXTERNAL_LOOKUP) {
-            dataSourceType = DataSourceType.EXTERNAL_SCAN;
-            dataSourceRef = subTreeOpRef;
-            return true;
-        } else if (subTreeOp.getOperatorTag() == LogicalOperatorTag.EMPTYTUPLESOURCE) {
-            dataSourceType = DataSourceType.COLLECTION_SCAN;
-            dataSourceRef = subTreeOpRef;
-            return true;
-        } else if (subTreeOp.getOperatorTag() == LogicalOperatorTag.UNNEST_MAP) {
-            UnnestMapOperator unnestMapOp = (UnnestMapOperator) subTreeOp;
-            ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
-            if (unnestExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
-                if (f.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INDEX_SEARCH)) {
-                    AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
-                    jobGenParams.readFromFuncArgs(f.getArguments());
-                    if (jobGenParams.isPrimaryIndex()) {
-                        dataSourceType = DataSourceType.PRIMARY_INDEX_LOOKUP;
-                        dataSourceRef = subTreeOpRef;
-                        return true;
-                    }
-                }
-            }
-        }
-
-        return false;
-    }
-
-    /**
-     * Find the dataset corresponding to the datasource scan in the metadata.
-     * Also sets recordType to be the type of that dataset.
-     */
-    public boolean setDatasetAndTypeMetadata(AqlMetadataProvider metadataProvider) throws AlgebricksException {
-        String dataverseName = null;
-        String datasetName = null;
-        switch (dataSourceType) {
-            case DATASOURCE_SCAN:
-                DataSourceScanOperator dataSourceScan = (DataSourceScanOperator) dataSourceRef.getValue();
-                Pair<String, String> datasetInfo = AnalysisUtil.getDatasetInfo(dataSourceScan);
-                dataverseName = datasetInfo.first;
-                datasetName = datasetInfo.second;
-                break;
-            case PRIMARY_INDEX_LOOKUP:
-                AbstractUnnestOperator unnestMapOp = (AbstractUnnestOperator) dataSourceRef.getValue();
-                ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
-                AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
-                AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
-                jobGenParams.readFromFuncArgs(f.getArguments());
-                datasetName = jobGenParams.getDatasetName();
-                dataverseName = jobGenParams.getDataverseName();
-                break;
-            case EXTERNAL_SCAN:
-                ExternalDataLookupOperator externalScan = (ExternalDataLookupOperator) dataSourceRef.getValue();
-                datasetInfo = AnalysisUtil.getDatasetInfo(externalScan);
-                dataverseName = datasetInfo.first;
-                datasetName = datasetInfo.second;
-                break;
-            case COLLECTION_SCAN:
-                return true;
-            case NO_DATASOURCE:
-            default:
-                return false;
-        }
-        if (dataverseName == null || datasetName == null) {
-            return false;
-        }
-        // Find the dataset corresponding to the datasource in the metadata.
-        dataset = metadataProvider.findDataset(dataverseName, datasetName);
-        if (dataset == null) {
-            throw new AlgebricksException("No metadata for dataset " + datasetName);
-        }
-        // Get the record type for that dataset.
-        IAType itemType = metadataProvider.findType(dataverseName, dataset.getItemTypeName());
-        if (itemType.getTypeTag() != ATypeTag.RECORD) {
-            return false;
-        }
-        recordType = (ARecordType) itemType;
-        return true;
-    }
-
-    public boolean hasDataSource() {
-        return dataSourceType != DataSourceType.NO_DATASOURCE;
-    }
-
-    public boolean hasDataSourceScan() {
-        return dataSourceType == DataSourceType.DATASOURCE_SCAN;
-    }
-
-    public void reset() {
-        root = null;
-        rootRef = null;
-        assignsAndUnnestsRefs.clear();
-        assignsAndUnnests.clear();
-        dataSourceRef = null;
-        dataSourceType = DataSourceType.NO_DATASOURCE;
-        dataset = null;
-        recordType = null;
-    }
-
-    public void getPrimaryKeyVars(List<LogicalVariable> target) throws AlgebricksException {
-        switch (dataSourceType) {
-            case DATASOURCE_SCAN:
-                DataSourceScanOperator dataSourceScan = (DataSourceScanOperator) dataSourceRef.getValue();
-                int numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
-                for (int i = 0; i < numPrimaryKeys; i++) {
-                    target.add(dataSourceScan.getVariables().get(i));
-                }
-                break;
-            case PRIMARY_INDEX_LOOKUP:
-                UnnestMapOperator unnestMapOp = (UnnestMapOperator) dataSourceRef.getValue();
-                List<LogicalVariable> primaryKeys = null;
-                primaryKeys = AccessMethodUtils.getPrimaryKeyVarsFromPrimaryUnnestMap(dataset, unnestMapOp);
-                target.addAll(primaryKeys);
-                break;
-            case NO_DATASOURCE:
-            default:
-                throw new AlgebricksException("The subtree does not have any data source.");
-        }
-    }
-
-    public List<LogicalVariable> getDataSourceVariables() throws AlgebricksException {
-        switch (dataSourceType) {
-            case DATASOURCE_SCAN:
-            case EXTERNAL_SCAN:
-            case PRIMARY_INDEX_LOOKUP:
-                AbstractScanOperator scanOp = (AbstractScanOperator) dataSourceRef.getValue();
-                return scanOp.getVariables();
-            case COLLECTION_SCAN:
-                return new ArrayList<LogicalVariable>();
-            case NO_DATASOURCE:
-            default:
-                throw new AlgebricksException("The subtree does not have any data source.");
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/RTreeAccessMethod.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/RTreeAccessMethod.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/RTreeAccessMethod.java
deleted file mode 100644
index e2dffd1..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/RTreeAccessMethod.java
+++ /dev/null
@@ -1,255 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules.am;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.common.annotations.SkipSecondaryIndexSearchExpressionAnnotation;
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractDataSourceOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExternalDataLookupOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
-
-/**
- * Class for helping rewrite rules to choose and apply RTree indexes.
- */
-public class RTreeAccessMethod implements IAccessMethod {
-
-    private static List<FunctionIdentifier> funcIdents = new ArrayList<FunctionIdentifier>();
-    static {
-        funcIdents.add(AsterixBuiltinFunctions.SPATIAL_INTERSECT);
-    }
-
-    public static RTreeAccessMethod INSTANCE = new RTreeAccessMethod();
-
-    @Override
-    public List<FunctionIdentifier> getOptimizableFunctions() {
-        return funcIdents;
-    }
-
-    @Override
-    public boolean analyzeFuncExprArgs(AbstractFunctionCallExpression funcExpr,
-            List<AbstractLogicalOperator> assignsAndUnnests, AccessMethodAnalysisContext analysisCtx) {
-        boolean matches = AccessMethodUtils.analyzeFuncExprArgsForOneConstAndVar(funcExpr, analysisCtx);
-        if (!matches) {
-            matches = AccessMethodUtils.analyzeFuncExprArgsForTwoVars(funcExpr, analysisCtx);
-        }
-        return matches;
-    }
-
-    @Override
-    public boolean matchAllIndexExprs() {
-        return true;
-    }
-
-    @Override
-    public boolean matchPrefixIndexExprs() {
-        return false;
-    }
-
-    @Override
-    public boolean applySelectPlanTransformation(Mutable<ILogicalOperator> selectRef,
-            OptimizableOperatorSubTree subTree, Index chosenIndex, AccessMethodAnalysisContext analysisCtx,
-            IOptimizationContext context) throws AlgebricksException {
-        // TODO: We can probably do something smarter here based on selectivity or MBR area.
-        IOptimizableFuncExpr optFuncExpr = AccessMethodUtils.chooseFirstOptFuncExpr(chosenIndex, analysisCtx);
-        ILogicalOperator primaryIndexUnnestOp = createSecondaryToPrimaryPlan(subTree, null, chosenIndex, optFuncExpr,
-                analysisCtx, false, false, false, context);
-        if (primaryIndexUnnestOp == null) {
-            return false;
-        }
-        // Replace the datasource scan with the new plan rooted at primaryIndexUnnestMap.
-        subTree.dataSourceRef.setValue(primaryIndexUnnestOp);
-        return true;
-    }
-
-    @Override
-    public boolean applyJoinPlanTransformation(Mutable<ILogicalOperator> joinRef,
-            OptimizableOperatorSubTree leftSubTree, OptimizableOperatorSubTree rightSubTree, Index chosenIndex,
-            AccessMethodAnalysisContext analysisCtx, IOptimizationContext context, boolean isLeftOuterJoin,
-            boolean hasGroupBy) throws AlgebricksException {
-        // Determine if the index is applicable on the left or right side (if both, we arbitrarily prefer the left side).
-        Dataset dataset = analysisCtx.indexDatasetMap.get(chosenIndex);
-        // Determine probe and index subtrees based on chosen index.
-        OptimizableOperatorSubTree indexSubTree = null;
-        OptimizableOperatorSubTree probeSubTree = null;
-        if (!isLeftOuterJoin && leftSubTree.hasDataSourceScan()
-                && dataset.getDatasetName().equals(leftSubTree.dataset.getDatasetName())) {
-            indexSubTree = leftSubTree;
-            probeSubTree = rightSubTree;
-        } else if (rightSubTree.hasDataSourceScan()
-                && dataset.getDatasetName().equals(rightSubTree.dataset.getDatasetName())) {
-            indexSubTree = rightSubTree;
-            probeSubTree = leftSubTree;
-        }
-        if (indexSubTree == null) {
-            //This may happen for left outer join case
-            return false;
-        }
-
-        LogicalVariable newNullPlaceHolderVar = null;
-        if (isLeftOuterJoin) {
-            //get a new null place holder variable that is the first field variable of the primary key
-            //from the indexSubTree's datasourceScanOp
-            newNullPlaceHolderVar = indexSubTree.getDataSourceVariables().get(0);
-        }
-
-        // TODO: We can probably do something smarter here based on selectivity or MBR area.
-        IOptimizableFuncExpr optFuncExpr = AccessMethodUtils.chooseFirstOptFuncExpr(chosenIndex, analysisCtx);
-        ILogicalOperator primaryIndexUnnestOp = createSecondaryToPrimaryPlan(indexSubTree, probeSubTree, chosenIndex,
-                optFuncExpr, analysisCtx, true, isLeftOuterJoin, true, context);
-        if (primaryIndexUnnestOp == null) {
-            return false;
-        }
-
-        if (isLeftOuterJoin && hasGroupBy) {
-            //reset the null place holder variable
-            AccessMethodUtils.resetLOJNullPlaceholderVariableInGroupByOp(analysisCtx, newNullPlaceHolderVar, context);
-        }
-
-        indexSubTree.dataSourceRef.setValue(primaryIndexUnnestOp);
-        // Change join into a select with the same condition.
-        AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) joinRef.getValue();
-        SelectOperator topSelect = new SelectOperator(joinOp.getCondition(), isLeftOuterJoin, newNullPlaceHolderVar);
-        topSelect.getInputs().add(indexSubTree.rootRef);
-        topSelect.setExecutionMode(ExecutionMode.LOCAL);
-        context.computeAndSetTypeEnvironmentForOperator(topSelect);
-        // Replace the original join with the new subtree rooted at the select op.
-        joinRef.setValue(topSelect);
-        return true;
-    }
-
-    private ILogicalOperator createSecondaryToPrimaryPlan(OptimizableOperatorSubTree indexSubTree,
-            OptimizableOperatorSubTree probeSubTree, Index chosenIndex, IOptimizableFuncExpr optFuncExpr,
-            AccessMethodAnalysisContext analysisCtx, boolean retainInput, boolean retainNull,
-            boolean requiresBroadcast, IOptimizationContext context) throws AlgebricksException {
-        Dataset dataset = indexSubTree.dataset;
-        ARecordType recordType = indexSubTree.recordType;
-
-        int optFieldIdx = AccessMethodUtils.chooseFirstOptFuncVar(chosenIndex, analysisCtx);
-        Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(optFuncExpr.getFieldType(optFieldIdx),
-                optFuncExpr.getFieldName(optFieldIdx), recordType);
-        if (keyPairType == null) {
-            return null;
-        }
-
-        // Get the number of dimensions corresponding to the field indexed by chosenIndex.
-        IAType spatialType = keyPairType.first;
-        int numDimensions = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
-        int numSecondaryKeys = numDimensions * 2;
-        // we made sure indexSubTree has datasource scan
-        AbstractDataSourceOperator dataSourceOp = (AbstractDataSourceOperator) indexSubTree.dataSourceRef.getValue();
-        RTreeJobGenParams jobGenParams = new RTreeJobGenParams(chosenIndex.getIndexName(), IndexType.RTREE,
-                dataset.getDataverseName(), dataset.getDatasetName(), retainInput, retainNull, requiresBroadcast);
-        // A spatial object is serialized in the constant of the func expr we are optimizing.
-        // The R-Tree expects as input an MBR represented with 1 field per dimension.
-        // Here we generate vars and funcs for extracting MBR fields from the constant into fields of a tuple (as the R-Tree expects them).
-        // List of variables for the assign.
-        ArrayList<LogicalVariable> keyVarList = new ArrayList<LogicalVariable>();
-        // List of expressions for the assign.
-        ArrayList<Mutable<ILogicalExpression>> keyExprList = new ArrayList<Mutable<ILogicalExpression>>();
-        Pair<ILogicalExpression, Boolean> returnedSearchKeyExpr = AccessMethodUtils.createSearchKeyExpr(optFuncExpr,
-                indexSubTree, probeSubTree);
-        ILogicalExpression searchKeyExpr = returnedSearchKeyExpr.first;
-
-        for (int i = 0; i < numSecondaryKeys; i++) {
-            // The create MBR function "extracts" one field of an MBR around the given spatial object.
-            AbstractFunctionCallExpression createMBR = new ScalarFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.CREATE_MBR));
-            // Spatial object is the constant from the func expr we are optimizing.
-            createMBR.getArguments().add(new MutableObject<ILogicalExpression>(searchKeyExpr));
-            // The number of dimensions.
-            createMBR.getArguments().add(
-                    new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(
-                            numDimensions)))));
-            // Which part of the MBR to extract.
-            createMBR.getArguments().add(
-                    new MutableObject<ILogicalExpression>(new ConstantExpression(
-                            new AsterixConstantValue(new AInt32(i)))));
-            // Add a variable and its expr to the lists which will be passed into an assign op.
-            LogicalVariable keyVar = context.newVar();
-            keyVarList.add(keyVar);
-            keyExprList.add(new MutableObject<ILogicalExpression>(createMBR));
-        }
-        jobGenParams.setKeyVarList(keyVarList);
-
-        // Assign operator that "extracts" the MBR fields from the func-expr constant into a tuple.
-        AssignOperator assignSearchKeys = new AssignOperator(keyVarList, keyExprList);
-        if (probeSubTree == null) {
-            // We are optimizing a selection query.
-            // Input to this assign is the EmptyTupleSource (which the dataSourceScan also must have had as input).
-            assignSearchKeys.getInputs().add(dataSourceOp.getInputs().get(0));
-            assignSearchKeys.setExecutionMode(dataSourceOp.getExecutionMode());
-        } else {
-            // We are optimizing a join, place the assign op top of the probe subtree.
-            assignSearchKeys.getInputs().add(probeSubTree.rootRef);
-        }
-
-        UnnestMapOperator secondaryIndexUnnestOp = AccessMethodUtils.createSecondaryIndexUnnestMap(dataset, recordType,
-                chosenIndex, assignSearchKeys, jobGenParams, context, false, retainInput);
-
-        // Generate the rest of the upstream plan which feeds the search results into the primary index.
-        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
-            ExternalDataLookupOperator externalDataAccessOp = AccessMethodUtils.createExternalDataLookupUnnestMap(
-                    dataSourceOp, dataset, recordType, secondaryIndexUnnestOp, context, chosenIndex, retainInput,
-                    retainNull);
-            return externalDataAccessOp;
-        } else {
-            UnnestMapOperator primaryIndexUnnestOp = AccessMethodUtils.createPrimaryIndexUnnestMap(dataSourceOp,
-                    dataset, recordType, secondaryIndexUnnestOp, context, true, retainInput, false, false);
-
-            return primaryIndexUnnestOp;
-        }
-    }
-
-    @Override
-    public boolean exprIsOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) {
-        if (optFuncExpr.getFuncExpr().getAnnotations()
-                .containsKey(SkipSecondaryIndexSearchExpressionAnnotation.INSTANCE)) {
-            return false;
-        }
-        // No additional analysis required.
-        return true;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/RTreeJobGenParams.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/RTreeJobGenParams.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/RTreeJobGenParams.java
deleted file mode 100644
index 9b6ff5f..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/RTreeJobGenParams.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules.am;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-
-/**
- * Helper class for reading and writing job-gen parameters for RTree access methods to
- * and from a list of function arguments, typically of an unnest-map.
- */
-public class RTreeJobGenParams extends AccessMethodJobGenParams {
-
-    protected List<LogicalVariable> keyVarList;
-
-    public RTreeJobGenParams() {
-    }
-
-    public RTreeJobGenParams(String indexName, IndexType indexType, String dataverseName, String datasetName,
-            boolean retainInput, boolean retainNull, boolean requiresBroadcast) {
-        super(indexName, indexType, dataverseName, datasetName, retainInput, retainNull, requiresBroadcast);
-    }
-
-    public void writeToFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
-        super.writeToFuncArgs(funcArgs);
-        writeVarList(keyVarList, funcArgs);
-    }
-
-    public void readFromFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
-        super.readFromFuncArgs(funcArgs);
-        int index = super.getNumParams();
-        keyVarList = new ArrayList<LogicalVariable>();
-        readVarList(funcArgs, index, keyVarList);
-    }
-
-    public void setKeyVarList(List<LogicalVariable> keyVarList) {
-        this.keyVarList = keyVarList;
-    }
-
-    public List<LogicalVariable> getKeyVarList() {
-        return keyVarList;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/temporal/TranslateIntervalExpressionRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/temporal/TranslateIntervalExpressionRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/temporal/TranslateIntervalExpressionRule.java
deleted file mode 100644
index 567d85d..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/temporal/TranslateIntervalExpressionRule.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules.temporal;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-/**
- * Finds interval conditional expressions and convert them to interval start and end conditional statements.
- * The translation exposes the condition to the Algebricks optimizer.
- */
-public class TranslateIntervalExpressionRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
-            return false;
-        }
-        SelectOperator selectOp = (SelectOperator) op;
-
-        Mutable<ILogicalExpression> exprRef = selectOp.getCondition();
-        boolean modified = false;
-        ILogicalExpression expr = exprRef.getValue();
-        if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            return false;
-        }
-        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
-        if (funcExpr.getArguments().size() != 2) {
-            return false;
-        }
-        ILogicalExpression interval1 = funcExpr.getArguments().get(0).getValue();
-        ILogicalExpression interval2 = funcExpr.getArguments().get(1).getValue();
-        if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_MEETS)) {
-            exprRef.setValue(getEqualExpr(getIntervalEndExpr(interval1), getIntervalStartExpr(interval2)));
-            modified = true;
-        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_MET_BY)) {
-            exprRef.setValue(getEqualExpr(getIntervalStartExpr(interval1), getIntervalEndExpr(interval2)));
-            modified = true;
-        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_STARTS)) {
-            ILogicalExpression startExpr = getEqualExpr(getIntervalStartExpr(interval1),
-                    getIntervalStartExpr(interval2));
-            ILogicalExpression endExpr = getLessThanOrEqualExpr(getIntervalEndExpr(interval1),
-                    getIntervalEndExpr(interval2));
-            exprRef.setValue(getAndExpr(startExpr, endExpr));
-            modified = true;
-        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_STARTED_BY)) {
-            ILogicalExpression startExpr = getEqualExpr(getIntervalStartExpr(interval1),
-                    getIntervalStartExpr(interval2));
-            ILogicalExpression endExpr = getLessThanOrEqualExpr(getIntervalEndExpr(interval2),
-                    getIntervalEndExpr(interval1));
-            exprRef.setValue(getAndExpr(startExpr, endExpr));
-            modified = true;
-        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_ENDS)) {
-            ILogicalExpression endExpr = getEqualExpr(getIntervalEndExpr(interval1), getIntervalEndExpr(interval2));
-            ILogicalExpression startExpr = getLessThanOrEqualExpr(getIntervalStartExpr(interval1),
-                    getIntervalStartExpr(interval2));
-            exprRef.setValue(getAndExpr(startExpr, endExpr));
-            modified = true;
-        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_ENDED_BY)) {
-            ILogicalExpression endExpr = getEqualExpr(getIntervalEndExpr(interval1), getIntervalEndExpr(interval2));
-            ILogicalExpression startExpr = getLessThanOrEqualExpr(getIntervalStartExpr(interval2),
-                    getIntervalStartExpr(interval1));
-            exprRef.setValue(getAndExpr(startExpr, endExpr));
-            modified = true;
-        } else if (funcExpr.getFunctionInfo().equals(AsterixBuiltinFunctions.INTERVAL_BEFORE)) {
-            // Requires new strategy, no translation for this interval and the remaining listed.
-        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_AFTER)) {
-        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_OVERLAPS)) {
-        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_OVERLAPPED_BY)) {
-        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_OVERLAPPING)) {
-        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_COVERS)) {
-        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_COVERED_BY)) {
-        }
-
-        return modified;
-    }
-
-    private ILogicalExpression getAndExpr(ILogicalExpression arg1, ILogicalExpression arg2) {
-        return getScalarExpr(AlgebricksBuiltinFunctions.AND, arg1, arg2);
-    }
-
-    private ILogicalExpression getEqualExpr(ILogicalExpression arg1, ILogicalExpression arg2) {
-        return getScalarExpr(AlgebricksBuiltinFunctions.EQ, arg1, arg2);
-    }
-
-    private ILogicalExpression getLessThanOrEqualExpr(ILogicalExpression arg1, ILogicalExpression arg2) {
-        return getScalarExpr(AlgebricksBuiltinFunctions.LE, arg1, arg2);
-    }
-
-    private ILogicalExpression getIntervalStartExpr(ILogicalExpression interval) {
-        return getScalarExpr(AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_INTERVAL_START, interval);
-    }
-
-    private ILogicalExpression getIntervalEndExpr(ILogicalExpression interval) {
-        return getScalarExpr(AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_INTERVAL_END, interval);
-    }
-
-    private ILogicalExpression getScalarExpr(FunctionIdentifier func, ILogicalExpression interval) {
-        List<Mutable<ILogicalExpression>> intervalArg = new ArrayList<Mutable<ILogicalExpression>>();
-        intervalArg.add(new MutableObject<ILogicalExpression>(interval));
-        return new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(func), intervalArg);
-    }
-
-    private ILogicalExpression getScalarExpr(FunctionIdentifier func, ILogicalExpression interval1,
-            ILogicalExpression interval2) {
-        List<Mutable<ILogicalExpression>> intervalArg = new ArrayList<Mutable<ILogicalExpression>>();
-        intervalArg.add(new MutableObject<ILogicalExpression>(interval1));
-        intervalArg.add(new MutableObject<ILogicalExpression>(interval2));
-        return new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(func), intervalArg);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java
deleted file mode 100644
index f6bf3c3..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java
+++ /dev/null
@@ -1,548 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.optimizer.rules.typecast;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.om.base.ANull;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.pointables.base.DefaultOpenFieldType;
-import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.AUnionType;
-import edu.uci.ics.asterix.om.types.AbstractCollectionType;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
-
-/**
- * This class is utility to do type cast.
- * It offers two public methods:
- * 1. public static boolean rewriteListExpr(AbstractFunctionCallExpression funcExpr, IAType reqType, IAType inputType,
- * IVariableTypeEnvironment env) throws AlgebricksException, which only enforces the list type recursively.
- * 2. public static boolean rewriteFuncExpr(AbstractFunctionCallExpression funcExpr, IAType reqType, IAType inputType,
- * IVariableTypeEnvironment env) throws AlgebricksException, which enforces the list type and the record type recursively.
- *
- * @author yingyib
- */
-public class StaticTypeCastUtil {
-
-    /**
-     * This method is only called when funcExpr contains list constructor function calls.
-     * The List constructor is very special because a nested list is of type List<ANY>.
-     * However, the bottom-up type inference (InferTypeRule in algebricks) did not infer that so we need this method to enforce the type.
-     * We do not want to break the generality of algebricks so this method is called in an ASTERIX rule: @ IntroduceEnforcedListTypeRule} .
-     * 
-     * @param funcExpr
-     *            record constructor function expression
-     * @param requiredListType
-     *            required record type
-     * @param inputRecordType
-     * @param env
-     *            type environment
-     * @throws AlgebricksException
-     */
-    public static boolean rewriteListExpr(AbstractFunctionCallExpression funcExpr, IAType reqType, IAType inputType,
-            IVariableTypeEnvironment env) throws AlgebricksException {
-        if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR) {
-            if (reqType.equals(BuiltinType.ANY)) {
-                reqType = DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE;
-            }
-            return rewriteListFuncExpr(funcExpr, (AbstractCollectionType) reqType, (AbstractCollectionType) inputType,
-                    env);
-        } else if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR) {
-            if (reqType.equals(BuiltinType.ANY)) {
-                reqType = DefaultOpenFieldType.NESTED_OPEN_AORDERED_LIST_TYPE;
-            }
-            return rewriteListFuncExpr(funcExpr, (AbstractCollectionType) reqType, (AbstractCollectionType) inputType,
-                    env);
-        } else {
-            List<Mutable<ILogicalExpression>> args = funcExpr.getArguments();
-            boolean changed = false;
-            for (Mutable<ILogicalExpression> arg : args) {
-                ILogicalExpression argExpr = arg.getValue();
-                if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                    AbstractFunctionCallExpression argFuncExpr = (AbstractFunctionCallExpression) argExpr;
-                    IAType exprType = (IAType) env.getType(argFuncExpr);
-                    changed = rewriteListExpr(argFuncExpr, exprType, exprType, env) || changed;
-                }
-            }
-            return changed;
-        }
-    }
-
-    /**
-     * This method is to recursively enforce required types, for the list type and the record type.
-     * The List constructor is very special because
-     * 1. a nested list in a list is of type List<ANY>;
-     * 2. a nested record in a list is of type Open_Record{}.
-     * The open record constructor is very special because
-     * 1. a nested list in the open part is of type List<ANY>;
-     * 2. a nested record in the open part is of type Open_Record{}.
-     * However, the bottom-up type inference (InferTypeRule in algebricks) did not infer that so we need this method to enforce the type.
-     * We do not want to break the generality of algebricks so this method is called in an ASTERIX rule: @ IntroduceStaticTypeCastRule} .
-     * 
-     * @param funcExpr
-     *            the function expression whose type needs to be top-down enforced
-     * @param reqType
-     *            the required type inferred from parent operators/expressions
-     * @param inputType
-     *            the current inferred
-     * @param env
-     *            the type environment
-     * @return true if the type is casted; otherwise, false.
-     * @throws AlgebricksException
-     */
-    public static boolean rewriteFuncExpr(AbstractFunctionCallExpression funcExpr, IAType reqType, IAType inputType,
-            IVariableTypeEnvironment env) throws AlgebricksException {
-        /**
-         * sanity check: if there are list(ordered or unordered)/record variable expressions in the funcExpr, we will not do STATIC type casting
-         * because they are not "statically cast-able".
-         * instead, the record will be dynamically casted at the runtime
-         */
-        if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR) {
-            if (reqType.equals(BuiltinType.ANY)) {
-                reqType = DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE;
-            }
-            return rewriteListFuncExpr(funcExpr, (AbstractCollectionType) reqType, (AbstractCollectionType) inputType,
-                    env);
-        } else if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR) {
-            if (reqType.equals(BuiltinType.ANY)) {
-                reqType = DefaultOpenFieldType.NESTED_OPEN_AORDERED_LIST_TYPE;
-            }
-            return rewriteListFuncExpr(funcExpr, (AbstractCollectionType) reqType, (AbstractCollectionType) inputType,
-                    env);
-        } else if (inputType.getTypeTag().equals(ATypeTag.RECORD)) {
-            if (reqType.equals(BuiltinType.ANY)) {
-                reqType = DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE;
-            }
-            return rewriteRecordFuncExpr(funcExpr, (ARecordType) reqType, (ARecordType) inputType, env);
-        } else {
-            List<Mutable<ILogicalExpression>> args = funcExpr.getArguments();
-            boolean changed = false;
-            for (Mutable<ILogicalExpression> arg : args) {
-                ILogicalExpression argExpr = arg.getValue();
-                if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                    AbstractFunctionCallExpression argFuncExpr = (AbstractFunctionCallExpression) argExpr;
-                    IAType exprType = (IAType) env.getType(argFuncExpr);
-                    changed = changed || rewriteFuncExpr(argFuncExpr, exprType, exprType, env);
-                }
-            }
-            if (!compatible(reqType, inputType)) {
-                throw new AlgebricksException("type mismatch, required: " + reqType.toString() + " actual: "
-                        + inputType.toString());
-            }
-            return changed;
-        }
-    }
-
-    /**
-     * only called when funcExpr is record constructor
-     * 
-     * @param funcExpr
-     *            record constructor function expression
-     * @param requiredListType
-     *            required record type
-     * @param inputRecordType
-     * @param env
-     *            type environment
-     * @throws AlgebricksException
-     */
-    private static boolean rewriteRecordFuncExpr(AbstractFunctionCallExpression funcExpr,
-            ARecordType requiredRecordType, ARecordType inputRecordType, IVariableTypeEnvironment env)
-            throws AlgebricksException {
-        // if already rewritten, the required type is not null
-        if (TypeComputerUtilities.getRequiredType(funcExpr) != null)
-            return false;
-        boolean casted = staticRecordTypeCast(funcExpr, requiredRecordType, inputRecordType, env);
-        if (casted) {
-            //enforce the required type if it is statically casted
-            TypeComputerUtilities.setRequiredAndInputTypes(funcExpr, requiredRecordType, inputRecordType);
-        }
-        return casted;
-    }
-
-    /**
-     * only called when funcExpr is list constructor
-     * 
-     * @param funcExpr
-     *            list constructor function expression
-     * @param requiredListType
-     *            required list type
-     * @param inputListType
-     * @param env
-     *            type environment
-     * @throws AlgebricksException
-     */
-    private static boolean rewriteListFuncExpr(AbstractFunctionCallExpression funcExpr,
-            AbstractCollectionType requiredListType, AbstractCollectionType inputListType, IVariableTypeEnvironment env)
-            throws AlgebricksException {
-        if (TypeComputerUtilities.getRequiredType(funcExpr) != null)
-            return false;
-
-        TypeComputerUtilities.setRequiredAndInputTypes(funcExpr, requiredListType, inputListType);
-        List<Mutable<ILogicalExpression>> args = funcExpr.getArguments();
-
-        IAType itemType = requiredListType.getItemType();
-        IAType inputItemType = inputListType.getItemType();
-        boolean changed = false;
-        for (int j = 0; j < args.size(); j++) {
-            ILogicalExpression arg = args.get(j).getValue();
-            IAType currentItemType = (inputItemType == null || inputItemType == BuiltinType.ANY) ? (IAType) env
-                    .getType(arg) : inputItemType;
-            switch (arg.getExpressionTag()) {
-                case FUNCTION_CALL:
-                    ScalarFunctionCallExpression argFunc = (ScalarFunctionCallExpression) arg;
-                    changed = rewriteFuncExpr(argFunc, itemType, currentItemType, env) || changed;
-                    break;
-                case VARIABLE:
-                    changed = injectCastToRelaxType(args.get(j), currentItemType, env) || changed;
-                    break;
-            }
-        }
-        return changed;
-    }
-
-    /**
-     * This method statically cast the type of records from their current type to the required type.
-     * 
-     * @param func
-     *            The record constructor expression.
-     * @param reqType
-     *            The required type.
-     * @param inputType
-     *            The current type.
-     * @param env
-     *            The type environment.
-     * @throws AlgebricksException
-     */
-    private static boolean staticRecordTypeCast(AbstractFunctionCallExpression func, ARecordType reqType,
-            ARecordType inputType, IVariableTypeEnvironment env) throws AlgebricksException {
-        if (!(func.getFunctionIdentifier() == AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR || func
-                .getFunctionIdentifier() == AsterixBuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR)) {
-            return false;
-        }
-        IAType[] reqFieldTypes = reqType.getFieldTypes();
-        String[] reqFieldNames = reqType.getFieldNames();
-        IAType[] inputFieldTypes = inputType.getFieldTypes();
-        String[] inputFieldNames = inputType.getFieldNames();
-
-        int[] fieldPermutation = new int[reqFieldTypes.length];
-        boolean[] nullFields = new boolean[reqFieldTypes.length];
-        boolean[] openFields = new boolean[inputFieldTypes.length];
-
-        Arrays.fill(nullFields, false);
-        Arrays.fill(openFields, true);
-        Arrays.fill(fieldPermutation, -1);
-
-        // forward match: match from actual to required
-        boolean matched = false;
-        for (int i = 0; i < inputFieldNames.length; i++) {
-            String fieldName = inputFieldNames[i];
-            IAType fieldType = inputFieldTypes[i];
-
-            if (2 * i + 1 > func.getArguments().size()) {
-                // it is not a record constructor function
-                return false;
-            }
-
-            // 2*i+1 is the index of field value expression
-            ILogicalExpression arg = func.getArguments().get(2 * i + 1).getValue();
-            matched = false;
-            for (int j = 0; j < reqFieldNames.length; j++) {
-                String reqFieldName = reqFieldNames[j];
-                IAType reqFieldType = reqFieldTypes[j];
-                if (fieldName.equals(reqFieldName)) {
-                    //type matched
-                    if (fieldType.equals(reqFieldType)) {
-                        fieldPermutation[j] = i;
-                        openFields[i] = false;
-                        matched = true;
-
-                        if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                            ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
-                            rewriteFuncExpr(scalarFunc, reqFieldType, fieldType, env);
-                        }
-                        break;
-                    }
-
-                    // match the optional field
-                    if (NonTaggedFormatUtil.isOptional(reqFieldType)) {
-                        IAType itemType = ((AUnionType) reqFieldType).getNullableType();
-                        reqFieldType = itemType;
-                        if (fieldType.equals(BuiltinType.ANULL) || fieldType.equals(itemType)) {
-                            fieldPermutation[j] = i;
-                            openFields[i] = false;
-                            matched = true;
-
-                            // rewrite record expr
-                            if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                                ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
-                                rewriteFuncExpr(scalarFunc, reqFieldType, fieldType, env);
-                            }
-                            break;
-                        }
-                    }
-
-                    // match the optional type input for a non-optional field
-                    // delay that to runtime by calling the not-null function
-                    if (NonTaggedFormatUtil.isOptional(fieldType)) {
-                        IAType itemType = ((AUnionType) fieldType).getNullableType();
-                        if (reqFieldType.equals(itemType)) {
-                            fieldPermutation[j] = i;
-                            openFields[i] = false;
-                            matched = true;
-
-                            ScalarFunctionCallExpression notNullFunc = new ScalarFunctionCallExpression(
-                                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.NOT_NULL));
-                            notNullFunc.getArguments().add(new MutableObject<ILogicalExpression>(arg));
-                            //wrap the not null function to the original function
-                            func.getArguments().get(2 * i + 1).setValue(notNullFunc);
-                            break;
-                        }
-                    }
-
-                    // match the record field: need cast
-                    if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                        ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
-                        rewriteFuncExpr(scalarFunc, reqFieldType, fieldType, env);
-                        fieldPermutation[j] = i;
-                        openFields[i] = false;
-                        matched = true;
-                        break;
-                    }
-                }
-            }
-            // the input has extra fields
-            if (!matched && !reqType.isOpen()) {
-                throw new AlgebricksException("static type mismatch: the input record includes an extra closed field "
-                        + fieldName + ":" + fieldType + "! Please check the field name and type.");
-            }
-        }
-
-        // backward match: match from required to actual
-        for (int i = 0; i < reqFieldNames.length; i++) {
-            String reqFieldName = reqFieldNames[i];
-            IAType reqFieldType = reqFieldTypes[i];
-            matched = false;
-            for (int j = 0; j < inputFieldNames.length; j++) {
-                String fieldName = inputFieldNames[j];
-                IAType fieldType = inputFieldTypes[j];
-                if (!fieldName.equals(reqFieldName))
-                    continue;
-                // should check open field here
-                // because number of entries in fieldPermuations is the
-                // number of required schema fields
-                // here we want to check if an input field is matched
-                // the entry index of fieldPermuatons is req field index
-                if (!openFields[j]) {
-                    matched = true;
-                    break;
-                }
-
-                // match the optional field
-                if (NonTaggedFormatUtil.isOptional(reqFieldType)) {
-                    IAType itemType = ((AUnionType) reqFieldType).getNullableType();
-                    if (fieldType.equals(BuiltinType.ANULL) || fieldType.equals(itemType)) {
-                        matched = true;
-                        break;
-                    }
-                }
-            }
-            if (matched)
-                continue;
-
-            if (NonTaggedFormatUtil.isOptional(reqFieldType)) {
-                // add a null field
-                nullFields[i] = true;
-            } else {
-                // no matched field in the input for a required closed field
-                if (inputType.isOpen()) {
-                    //if the input type is open, return false, give that to dynamic type cast to defer the error to the runtime
-                    return false;
-                } else {
-                    throw new AlgebricksException(
-                            "static type mismatch: the input record misses a required closed field " + reqFieldName
-                                    + ":" + reqFieldType + "! Please check the field name and type.");
-                }
-            }
-        }
-
-        List<Mutable<ILogicalExpression>> arguments = func.getArguments();
-        List<Mutable<ILogicalExpression>> originalArguments = new ArrayList<Mutable<ILogicalExpression>>();
-        originalArguments.addAll(arguments);
-        arguments.clear();
-        // re-order the closed part and fill in null fields
-        for (int i = 0; i < fieldPermutation.length; i++) {
-            int pos = fieldPermutation[i];
-            if (pos >= 0) {
-                arguments.add(originalArguments.get(2 * pos));
-                arguments.add(originalArguments.get(2 * pos + 1));
-            }
-            if (nullFields[i]) {
-                // add a null field
-                arguments.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
-                        new AString(reqFieldNames[i])))));
-                arguments.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
-                        ANull.NULL))));
-            }
-        }
-
-        // add the open part
-        for (int i = 0; i < openFields.length; i++) {
-            if (openFields[i]) {
-                arguments.add(originalArguments.get(2 * i));
-                Mutable<ILogicalExpression> expRef = originalArguments.get(2 * i + 1);
-                injectCastToRelaxType(expRef, inputFieldTypes[i], env);
-                arguments.add(expRef);
-            }
-        }
-        return true;
-    }
-
-    private static boolean injectCastToRelaxType(Mutable<ILogicalExpression> expRef, IAType inputFieldType,
-            IVariableTypeEnvironment env) throws AlgebricksException {
-        ILogicalExpression argExpr = expRef.getValue();
-        List<LogicalVariable> parameterVars = new ArrayList<LogicalVariable>();
-        argExpr.getUsedVariables(parameterVars);
-        // we need to handle open fields recursively by their default
-        // types
-        // for list, their item type is any
-        // for record, their
-        boolean castInjected = false;
-        if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL
-                || argExpr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-            IAType reqFieldType = inputFieldType;
-            FunctionIdentifier fi = null;
-            // do not enforce nested type in the case of no-used variables
-            switch (inputFieldType.getTypeTag()) {
-                case RECORD:
-                    reqFieldType = DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE;
-                    fi = AsterixBuiltinFunctions.CAST_RECORD;
-                    break;
-                case ORDEREDLIST:
-                    reqFieldType = DefaultOpenFieldType.NESTED_OPEN_AORDERED_LIST_TYPE;
-                    fi = AsterixBuiltinFunctions.CAST_LIST;
-                    break;
-                case UNORDEREDLIST:
-                    reqFieldType = DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE;
-                    fi = AsterixBuiltinFunctions.CAST_LIST;
-            }
-            if (fi != null && !inputFieldType.equals(reqFieldType) && parameterVars.size() > 0) {
-                //inject dynamic type casting
-                injectCastFunction(FunctionUtils.getFunctionInfo(fi), reqFieldType, inputFieldType, expRef, argExpr);
-                castInjected = true;
-            }
-            if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                //recursively rewrite function arguments
-                if (TypeComputerUtilities.getRequiredType((AbstractFunctionCallExpression) argExpr) == null
-                        && reqFieldType != null) {
-                    if (castInjected) {
-                        //rewrite the arg expression inside the dynamic cast
-                        ScalarFunctionCallExpression argFunc = (ScalarFunctionCallExpression) argExpr;
-                        rewriteFuncExpr(argFunc, inputFieldType, inputFieldType, env);
-                    } else {
-                        //rewrite arg
-                        ScalarFunctionCallExpression argFunc = (ScalarFunctionCallExpression) argExpr;
-                        rewriteFuncExpr(argFunc, reqFieldType, inputFieldType, env);
-                    }
-                }
-            }
-        }
-        return castInjected;
-    }
-
-    /**
-     * Inject a dynamic cast function wrapping an existing expression
-     * 
-     * @param funcInfo
-     *            the cast function
-     * @param reqType
-     *            the required type
-     * @param inputType
-     *            the original type
-     * @param exprRef
-     *            the expression reference
-     * @param argExpr
-     *            the original expression
-     */
-    private static void injectCastFunction(IFunctionInfo funcInfo, IAType reqType, IAType inputType,
-            Mutable<ILogicalExpression> exprRef, ILogicalExpression argExpr) {
-        ScalarFunctionCallExpression cast = new ScalarFunctionCallExpression(funcInfo);
-        cast.getArguments().add(new MutableObject<ILogicalExpression>(argExpr));
-        exprRef.setValue(cast);
-        TypeComputerUtilities.setRequiredAndInputTypes(cast, reqType, inputType);
-    }
-
-    /**
-     * Determine if two types are compatible
-     * 
-     * @param reqType
-     *            the required type
-     * @param inputType
-     *            the input type
-     * @return true if the two types are compatible; false otherwise
-     */
-    public static boolean compatible(IAType reqType, IAType inputType) {
-        if (reqType.getTypeTag() == ATypeTag.ANY || inputType.getTypeTag() == ATypeTag.ANY) {
-            return true;
-        }
-        if (reqType.getTypeTag() != ATypeTag.UNION && inputType.getTypeTag() != ATypeTag.UNION) {
-            if (reqType.equals(inputType)) {
-                return true;
-            } else {
-                return false;
-            }
-        }
-        Set<IAType> reqTypePossible = new HashSet<IAType>();
-        Set<IAType> inputTypePossible = new HashSet<IAType>();
-        if (reqType.getTypeTag() == ATypeTag.UNION) {
-            AUnionType unionType = (AUnionType) reqType;
-            reqTypePossible.addAll(unionType.getUnionList());
-        } else {
-            reqTypePossible.add(reqType);
-        }
-
-        if (inputType.getTypeTag() == ATypeTag.UNION) {
-            AUnionType unionType = (AUnionType) inputType;
-            inputTypePossible.addAll(unionType.getUnionList());
-        } else {
-            inputTypePossible.add(inputType);
-        }
-        return reqTypePossible.equals(inputTypePossible);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/util/EquivalenceClassUtils.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/util/EquivalenceClassUtils.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/util/EquivalenceClassUtils.java
deleted file mode 100644
index 6c82c11..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/util/EquivalenceClassUtils.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.optimizer.rules.util;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.lang3.mutable.MutableObject;
-import org.mortbay.util.SingletonList;
-
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.EquivalenceClass;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-
-public class EquivalenceClassUtils {
-
-    /**
-     * Adds equivalent classes for primary index accesses, including unnest-map for
-     * primary index access and data source scan through primary index ---
-     * one equivalent class between a primary key variable and a record field-access expression.
-     * 
-     * @param operator
-     *            , the primary index access operator.
-     * @param indexSearchVars
-     *            , the returned variables from primary index access. The last variable
-     *            is the record variable.
-     * @param recordType
-     *            , the record type of an index payload record.
-     * @param dataset
-     *            , the accessed dataset.
-     * @param context
-     *            , the optimization context.
-     * @throws AlgebricksException
-     */
-    @SuppressWarnings("unchecked")
-    public static void addEquivalenceClassesForPrimaryIndexAccess(ILogicalOperator operator,
-            List<LogicalVariable> indexSearchVars, ARecordType recordType, Dataset dataset, IOptimizationContext context)
-            throws AlgebricksException {
-        if (dataset.getDatasetDetails().getDatasetType() != DatasetType.INTERNAL) {
-            return;
-        }
-        InternalDatasetDetails datasetDetails = (InternalDatasetDetails) dataset.getDatasetDetails();
-        List<List<String>> primaryKey = datasetDetails.getPrimaryKey();
-        Map<String, Integer> fieldNameToIndexMap = new HashMap<String, Integer>();
-        String[] fieldNames = recordType.getFieldNames();
-        for (int fieldIndex = 0; fieldIndex < fieldNames.length; ++fieldIndex) {
-            fieldNameToIndexMap.put(fieldNames[fieldIndex], fieldIndex);
-        }
-
-        LogicalVariable recordVar = indexSearchVars.get(indexSearchVars.size() - 1);
-        for (int pkIndex = 0; pkIndex < primaryKey.size(); ++pkIndex) {
-            String pkFieldName = primaryKey.get(pkIndex).get(0);
-            int fieldIndexInRecord = fieldNameToIndexMap.get(pkFieldName);
-            LogicalVariable var = indexSearchVars.get(pkIndex);
-            ILogicalExpression expr = new ScalarFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX),
-                    new MutableObject<ILogicalExpression>(new VariableReferenceExpression(recordVar)),
-                    new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(
-                            fieldIndexInRecord)))));
-            EquivalenceClass equivClass = new EquivalenceClass(SingletonList.newSingletonList(var), var,
-                    SingletonList.newSingletonList(expr));
-            Map<LogicalVariable, EquivalenceClass> equivalenceMap = context.getEquivalenceClassMap(operator);
-            if (equivalenceMap == null) {
-                equivalenceMap = new HashMap<LogicalVariable, EquivalenceClass>();
-                context.putEquivalenceClassMap(operator, equivalenceMap);
-            }
-            equivalenceMap.put(var, equivClass);
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java
deleted file mode 100644
index 1c31da0..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java
+++ /dev/null
@@ -1,185 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.translator;
-
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.DatasetDecl;
-import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
-import edu.uci.ics.asterix.aql.expression.DeleteStatement;
-import edu.uci.ics.asterix.aql.expression.DropStatement;
-import edu.uci.ics.asterix.aql.expression.InsertStatement;
-import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
-import edu.uci.ics.asterix.common.api.IClusterManagementWork.ClusterState;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
-import edu.uci.ics.asterix.metadata.dataset.hints.DatasetHints;
-import edu.uci.ics.asterix.metadata.entities.AsterixBuiltinTypeMap;
-import edu.uci.ics.asterix.metadata.entities.Dataverse;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
-import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-
-/**
- * Base class for AQL translators. Contains the common validation logic for AQL
- * statements.
- */
-public abstract class AbstractAqlTranslator {
-
-    protected static final Logger LOGGER = Logger.getLogger(AbstractAqlTranslator.class.getName());
-
-    protected static final Map<String, BuiltinType> builtinTypeMap = AsterixBuiltinTypeMap.getBuiltinTypes();
-
-    public void validateOperation(Dataverse defaultDataverse, Statement stmt) throws AsterixException {
-
-        if (!(AsterixClusterProperties.INSTANCE.getState().equals(ClusterState.ACTIVE) && AsterixClusterProperties.INSTANCE
-                .isGlobalRecoveryCompleted())) {
-            int maxWaitCycles = AsterixAppContextInfo.getInstance().getExternalProperties().getMaxWaitClusterActive();
-            int waitCycleCount = 0;
-            try {
-                while (!AsterixClusterProperties.INSTANCE.getState().equals(ClusterState.ACTIVE)
-                        && waitCycleCount < maxWaitCycles) {
-                    Thread.sleep(1000);
-                    waitCycleCount++;
-                }
-            } catch (InterruptedException e) {
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("Thread interrupted while waiting for cluster to be "
-                            + ClusterState.ACTIVE);
-                }
-            }
-            if (!AsterixClusterProperties.INSTANCE.getState().equals(ClusterState.ACTIVE)) {
-                throw new AsterixException(" Asterix Cluster is in " + ClusterState.UNUSABLE
-                        + " state." + "\n One or more Node Controllers have left or haven't joined yet.\n");
-            } else {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Cluster is now " + ClusterState.ACTIVE);
-                }
-            }
-        }
-
-        if (AsterixClusterProperties.INSTANCE.getState().equals(ClusterState.UNUSABLE)) {
-            throw new AsterixException(" Asterix Cluster is in " + ClusterState.UNUSABLE + " state."
-                    + "\n One or more Node Controllers have left.\n");
-        }
-
-        if (!AsterixClusterProperties.INSTANCE.isGlobalRecoveryCompleted()) {
-            int maxWaitCycles = AsterixAppContextInfo.getInstance().getExternalProperties().getMaxWaitClusterActive();
-            int waitCycleCount = 0;
-            try {
-                while (!AsterixClusterProperties.INSTANCE.isGlobalRecoveryCompleted() && waitCycleCount < maxWaitCycles) {
-                    Thread.sleep(1000);
-                    waitCycleCount++;
-                }
-            } catch (InterruptedException e) {
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("Thread interrupted while waiting for cluster to complete global recovery ");
-                }
-            }
-            if (!AsterixClusterProperties.INSTANCE.isGlobalRecoveryCompleted()) {
-                throw new AsterixException(" Asterix Cluster Global recovery is not yet complete and The system is in "
-                        + ClusterState.ACTIVE + " state");
-            }
-        }
-
-        boolean invalidOperation = false;
-        String message = null;
-        String dataverse = defaultDataverse != null ? defaultDataverse.getDataverseName() : null;
-        switch (stmt.getKind()) {
-            case INSERT:
-                InsertStatement insertStmt = (InsertStatement) stmt;
-                if (insertStmt.getDataverseName() != null) {
-                    dataverse = insertStmt.getDataverseName().getValue();
-                }
-                invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
-                if (invalidOperation) {
-                    message = "Insert operation is not permitted in dataverse "
-                            + MetadataConstants.METADATA_DATAVERSE_NAME;
-                }
-                break;
-
-            case DELETE:
-                DeleteStatement deleteStmt = (DeleteStatement) stmt;
-                if (deleteStmt.getDataverseName() != null) {
-                    dataverse = deleteStmt.getDataverseName().getValue();
-                }
-                invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
-                if (invalidOperation) {
-                    message = "Delete operation is not permitted in dataverse "
-                            + MetadataConstants.METADATA_DATAVERSE_NAME;
-                }
-                break;
-
-            case NODEGROUP_DROP:
-                String nodegroupName = ((NodeGroupDropStatement) stmt).getNodeGroupName().getValue();
-                invalidOperation = MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME.equals(nodegroupName);
-                if (invalidOperation) {
-                    message = "Cannot drop nodegroup:" + nodegroupName;
-                }
-                break;
-
-            case DATAVERSE_DROP:
-                DataverseDropStatement dvDropStmt = (DataverseDropStatement) stmt;
-                invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dvDropStmt.getDataverseName()
-                        .getValue());
-                if (invalidOperation) {
-                    message = "Cannot drop dataverse:" + dvDropStmt.getDataverseName().getValue();
-                }
-                break;
-
-            case DATASET_DROP:
-                DropStatement dropStmt = (DropStatement) stmt;
-                if (dropStmt.getDataverseName() != null) {
-                    dataverse = dropStmt.getDataverseName().getValue();
-                }
-                invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
-                if (invalidOperation) {
-                    message = "Cannot drop a dataset belonging to the dataverse:"
-                            + MetadataConstants.METADATA_DATAVERSE_NAME;
-                }
-                break;
-            case DATASET_DECL:
-                DatasetDecl datasetStmt = (DatasetDecl) stmt;
-                Map<String, String> hints = datasetStmt.getHints();
-                if (hints != null && !hints.isEmpty()) {
-                    Pair<Boolean, String> validationResult = null;
-                    StringBuffer errorMsgBuffer = new StringBuffer();
-                    for (Entry<String, String> hint : hints.entrySet()) {
-                        validationResult = DatasetHints.validate(hint.getKey(), hint.getValue());
-                        if (!validationResult.first) {
-                            errorMsgBuffer.append("Dataset: " + datasetStmt.getName().getValue()
-                                    + " error in processing hint: " + hint.getKey() + " " + validationResult.second);
-                            errorMsgBuffer.append(" \n");
-                        }
-                    }
-                    invalidOperation = errorMsgBuffer.length() > 0;
-                    if (invalidOperation) {
-                        message = errorMsgBuffer.toString();
-                    }
-                }
-                break;
-
-        }
-
-        if (invalidOperation) {
-            throw new AsterixException("Invalid operation - " + message);
-        }
-    }
-}


[28/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SweepIllegalNonfunctionalFunctions.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SweepIllegalNonfunctionalFunctions.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SweepIllegalNonfunctionalFunctions.java
new file mode 100644
index 0000000..e2f2f4f
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SweepIllegalNonfunctionalFunctions.java
@@ -0,0 +1,296 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExternalDataLookupOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteResultOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.AbstractExtractExprRule;
+
+public class SweepIllegalNonfunctionalFunctions extends AbstractExtractExprRule implements IAlgebraicRewriteRule {
+
+    private final IllegalNonfunctionalFunctionSweeperOperatorVisitor visitor;
+
+    public SweepIllegalNonfunctionalFunctions() {
+        visitor = new IllegalNonfunctionalFunctionSweeperOperatorVisitor();
+    }
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        ILogicalOperator op = opRef.getValue();
+        if (context.checkIfInDontApplySet(this, op)) {
+            return false;
+        }
+
+        op.accept(visitor, null);
+        context.computeAndSetTypeEnvironmentForOperator(op);
+        context.addToDontApplySet(this, op);
+        return false;
+    }
+
+    private class IllegalNonfunctionalFunctionSweeperOperatorVisitor implements ILogicalOperatorVisitor<Void, Void> {
+
+        private void sweepExpression(ILogicalExpression expr, ILogicalOperator op) throws AlgebricksException {
+            if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                if (!expr.isFunctional()) {
+                    AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expr;
+                    throw new AlgebricksException("Found non-functional function " + fce.getFunctionIdentifier()
+                            + " in op " + op);
+                }
+            }
+        }
+
+        @Override
+        public Void visitAggregateOperator(AggregateOperator op, Void arg) throws AlgebricksException {
+            for (Mutable<ILogicalExpression> me : op.getExpressions()) {
+                sweepExpression(me.getValue(), op);
+            }
+            List<Mutable<ILogicalExpression>> mergeExprs = op.getMergeExpressions();
+            if (mergeExprs != null) {
+                for (Mutable<ILogicalExpression> me : mergeExprs) {
+                    sweepExpression(me.getValue(), op);
+                }
+            }
+            return null;
+        }
+
+        @Override
+        public Void visitRunningAggregateOperator(RunningAggregateOperator op, Void arg) throws AlgebricksException {
+            for (Mutable<ILogicalExpression> me : op.getExpressions()) {
+                sweepExpression(me.getValue(), op);
+            }
+            return null;
+        }
+
+        @Override
+        public Void visitEmptyTupleSourceOperator(EmptyTupleSourceOperator op, Void arg) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitGroupByOperator(GroupByOperator op, Void arg) throws AlgebricksException {
+            for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : op.getGroupByList()) {
+                sweepExpression(p.second.getValue(), op);
+            }
+            for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : op.getDecorList()) {
+                sweepExpression(p.second.getValue(), op);
+            }
+            return null;
+        }
+
+        @Override
+        public Void visitLimitOperator(LimitOperator op, Void arg) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitInnerJoinOperator(InnerJoinOperator op, Void arg) throws AlgebricksException {
+            sweepExpression(op.getCondition().getValue(), op);
+            return null;
+        }
+
+        @Override
+        public Void visitLeftOuterJoinOperator(LeftOuterJoinOperator op, Void arg) throws AlgebricksException {
+            sweepExpression(op.getCondition().getValue(), op);
+            return null;
+        }
+
+        @Override
+        public Void visitNestedTupleSourceOperator(NestedTupleSourceOperator op, Void arg) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitOrderOperator(OrderOperator op, Void arg) throws AlgebricksException {
+            for (Pair<IOrder, Mutable<ILogicalExpression>> p : op.getOrderExpressions()) {
+                sweepExpression(p.second.getValue(), op);
+            }
+            return null;
+        }
+
+        @Override
+        public Void visitAssignOperator(AssignOperator op, Void arg) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitSelectOperator(SelectOperator op, Void arg) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitExtensionOperator(ExtensionOperator op, Void arg) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitProjectOperator(ProjectOperator op, Void arg) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitPartitioningSplitOperator(PartitioningSplitOperator op, Void arg) throws AlgebricksException {
+            for (Mutable<ILogicalExpression> expr : op.getExpressions()) {
+                sweepExpression(expr.getValue(), op);
+            }
+            return null;
+        }
+
+        @Override
+        public Void visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitMaterializeOperator(MaterializeOperator op, Void arg) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitScriptOperator(ScriptOperator op, Void arg) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitSubplanOperator(SubplanOperator op, Void arg) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitSinkOperator(SinkOperator op, Void arg) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitUnionOperator(UnionAllOperator op, Void arg) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitUnnestOperator(UnnestOperator op, Void arg) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitUnnestMapOperator(UnnestMapOperator op, Void arg) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitDataScanOperator(DataSourceScanOperator op, Void arg) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitDistinctOperator(DistinctOperator op, Void arg) throws AlgebricksException {
+            for (Mutable<ILogicalExpression> expr : op.getExpressions()) {
+                sweepExpression(expr.getValue(), op);
+            }
+            return null;
+        }
+
+        @Override
+        public Void visitExchangeOperator(ExchangeOperator op, Void arg) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitWriteOperator(WriteOperator op, Void arg) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitDistributeResultOperator(DistributeResultOperator op, Void arg) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitWriteResultOperator(WriteResultOperator op, Void arg) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitInsertDeleteOperator(InsertDeleteOperator op, Void tag) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitIndexInsertDeleteOperator(IndexInsertDeleteOperator op, Void tag) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitTokenizeOperator(TokenizeOperator op, Void tag) throws AlgebricksException {
+            return null;
+        }
+
+        @Override
+        public Void visitExternalDataLookupOperator(ExternalDataLookupOperator op, Void arg) throws AlgebricksException {
+            return null;
+        }
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/UnnestToDataScanRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/UnnestToDataScanRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/UnnestToDataScanRule.java
new file mode 100644
index 0000000..6defc07
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/UnnestToDataScanRule.java
@@ -0,0 +1,248 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.feeds.FeedActivity.FeedActivityDetails;
+import edu.uci.ics.asterix.common.feeds.api.IFeedLifecycleListener.ConnectionLocation;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
+import edu.uci.ics.asterix.metadata.declared.FeedDataSource;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.Dataverse;
+import edu.uci.ics.asterix.metadata.entities.Feed;
+import edu.uci.ics.asterix.metadata.entities.FeedPolicy;
+import edu.uci.ics.asterix.metadata.feeds.BuiltinFeedPolicies;
+import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.optimizer.rules.util.EquivalenceClassUtils;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IAlgebricksConstantValue;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.FunctionalDependency;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class UnnestToDataScanRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.UNNEST) {
+            return false;
+        }
+        UnnestOperator unnest = (UnnestOperator) op;
+        ILogicalExpression unnestExpr = unnest.getExpressionRef().getValue();
+
+        if (unnestExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+            AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
+            FunctionIdentifier fid = f.getFunctionIdentifier();
+
+            if (fid.equals(AsterixBuiltinFunctions.DATASET)) {
+                if (unnest.getPositionalVariable() != null) {
+                    // TODO remove this after enabling the support of positional variables in data scan
+                    throw new AlgebricksException("No positional variables are allowed over datasets.");
+                }
+                ILogicalExpression expr = f.getArguments().get(0).getValue();
+                if (expr.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+                    return false;
+                }
+                ConstantExpression ce = (ConstantExpression) expr;
+                IAlgebricksConstantValue acv = ce.getValue();
+                if (!(acv instanceof AsterixConstantValue)) {
+                    return false;
+                }
+                AsterixConstantValue acv2 = (AsterixConstantValue) acv;
+                if (acv2.getObject().getType().getTypeTag() != ATypeTag.STRING) {
+                    return false;
+                }
+                String datasetArg = ((AString) acv2.getObject()).getStringValue();
+
+                AqlMetadataProvider metadataProvider = (AqlMetadataProvider) context.getMetadataProvider();
+                Pair<String, String> datasetReference = parseDatasetReference(metadataProvider, datasetArg);
+                String dataverseName = datasetReference.first;
+                String datasetName = datasetReference.second;
+                Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
+                if (dataset == null) {
+                    throw new AlgebricksException("Could not find dataset " + datasetName + " in dataverse "
+                            + dataverseName);
+                }
+
+                AqlSourceId asid = new AqlSourceId(dataverseName, datasetName);
+
+                ArrayList<LogicalVariable> v = new ArrayList<LogicalVariable>();
+
+                if (dataset.getDatasetType() == DatasetType.INTERNAL) {
+                    int numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
+                    for (int i = 0; i < numPrimaryKeys; i++) {
+                        v.add(context.newVar());
+                    }
+                }
+                v.add(unnest.getVariable());
+                AqlDataSource dataSource = metadataProvider.findDataSource(asid);
+                DataSourceScanOperator scan = new DataSourceScanOperator(v, dataSource);
+                List<Mutable<ILogicalOperator>> scanInpList = scan.getInputs();
+                scanInpList.addAll(unnest.getInputs());
+                opRef.setValue(scan);
+                addPrimaryKey(v, context);
+                context.computeAndSetTypeEnvironmentForOperator(scan);
+
+                // Adds equivalence classes --- one equivalent class between a primary key
+                // variable and a record field-access expression.
+                IAType[] schemaTypes = dataSource.getSchemaTypes();
+                ARecordType recordType = (ARecordType) schemaTypes[schemaTypes.length - 1];
+                EquivalenceClassUtils.addEquivalenceClassesForPrimaryIndexAccess(scan, v, recordType, dataset, context);
+                return true;
+            }
+
+            if (fid.equals(AsterixBuiltinFunctions.FEED_COLLECT)) {
+                if (unnest.getPositionalVariable() != null) {
+                    throw new AlgebricksException("No positional variables are allowed over datasets.");
+                }
+
+                String dataverse = getStringArgument(f, 0);
+                String sourceFeedName = getStringArgument(f, 1);
+                String getTargetFeed = getStringArgument(f, 2);
+                String subscriptionLocation = getStringArgument(f, 3);
+                String targetDataset = getStringArgument(f, 4);
+                String outputType = getStringArgument(f, 5);
+
+                AqlMetadataProvider metadataProvider = (AqlMetadataProvider) context.getMetadataProvider();
+
+                AqlSourceId asid = new AqlSourceId(dataverse, getTargetFeed);
+                String policyName = metadataProvider.getConfig().get(FeedActivityDetails.FEED_POLICY_NAME);
+                FeedPolicy policy = metadataProvider.findFeedPolicy(dataverse, policyName);
+                if (policy == null) {
+                    policy = BuiltinFeedPolicies.getFeedPolicy(policyName);
+                    if (policy == null) {
+                        throw new AlgebricksException("Unknown feed policy:" + policyName);
+                    }
+                }
+
+                ArrayList<LogicalVariable> v = new ArrayList<LogicalVariable>();
+                v.add(unnest.getVariable());
+
+                String csLocations = metadataProvider.getConfig().get(FeedActivityDetails.COLLECT_LOCATIONS);
+                DataSourceScanOperator scan = new DataSourceScanOperator(v, createFeedDataSource(asid, targetDataset,
+                        sourceFeedName, subscriptionLocation, metadataProvider, policy, outputType, csLocations));
+
+                List<Mutable<ILogicalOperator>> scanInpList = scan.getInputs();
+                scanInpList.addAll(unnest.getInputs());
+                opRef.setValue(scan);
+                addPrimaryKey(v, context);
+                context.computeAndSetTypeEnvironmentForOperator(scan);
+
+                return true;
+            }
+
+        }
+
+        return false;
+    }
+
+    public void addPrimaryKey(List<LogicalVariable> scanVariables, IOptimizationContext context) {
+        int n = scanVariables.size();
+        List<LogicalVariable> head = new ArrayList<LogicalVariable>(scanVariables.subList(0, n - 1));
+        List<LogicalVariable> tail = new ArrayList<LogicalVariable>(1);
+        tail.add(scanVariables.get(n - 1));
+        FunctionalDependency pk = new FunctionalDependency(head, tail);
+        context.addPrimaryKey(pk);
+    }
+
+    private AqlDataSource createFeedDataSource(AqlSourceId aqlId, String targetDataset, String sourceFeedName,
+            String subscriptionLocation, AqlMetadataProvider metadataProvider, FeedPolicy feedPolicy,
+            String outputType, String locations) throws AlgebricksException {
+        if (!aqlId.getDataverseName().equals(
+                metadataProvider.getDefaultDataverse() == null ? null : metadataProvider.getDefaultDataverse()
+                        .getDataverseName())) {
+            return null;
+        }
+        IAType feedOutputType = metadataProvider.findType(aqlId.getDataverseName(), outputType);
+        Feed sourceFeed = metadataProvider.findFeed(aqlId.getDataverseName(), sourceFeedName);
+
+        FeedDataSource feedDataSource = new FeedDataSource(aqlId, targetDataset, feedOutputType,
+                AqlDataSource.AqlDataSourceType.FEED, sourceFeed.getFeedId(), sourceFeed.getFeedType(),
+                ConnectionLocation.valueOf(subscriptionLocation), locations.split(","));
+        feedDataSource.getProperties().put(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY, feedPolicy);
+        return feedDataSource;
+    }
+
+    private Pair<String, String> parseDatasetReference(AqlMetadataProvider metadataProvider, String datasetArg)
+            throws AlgebricksException {
+        String[] datasetNameComponents = datasetArg.split("\\.");
+        String dataverseName;
+        String datasetName;
+        if (datasetNameComponents.length == 1) {
+            Dataverse defaultDataverse = metadataProvider.getDefaultDataverse();
+            if (defaultDataverse == null) {
+                throw new AlgebricksException("Unresolved dataset " + datasetArg + " Dataverse not specified.");
+            }
+            dataverseName = defaultDataverse.getDataverseName();
+            datasetName = datasetNameComponents[0];
+        } else {
+            dataverseName = datasetNameComponents[0];
+            datasetName = datasetNameComponents[1];
+        }
+        return new Pair<String, String>(dataverseName, datasetName);
+    }
+
+    private String getStringArgument(AbstractFunctionCallExpression f, int index) {
+
+        ILogicalExpression expr = f.getArguments().get(index).getValue();
+        if (expr.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+            return null;
+        }
+        ConstantExpression ce = (ConstantExpression) expr;
+        IAlgebricksConstantValue acv = ce.getValue();
+        if (!(acv instanceof AsterixConstantValue)) {
+            return null;
+        }
+        AsterixConstantValue acv2 = (AsterixConstantValue) acv;
+        if (acv2.getObject().getType().getTypeTag() != ATypeTag.STRING) {
+            return null;
+        }
+        String argument = ((AString) acv2.getObject()).getStringValue();
+        return argument;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
new file mode 100644
index 0000000..b6d07a3
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
@@ -0,0 +1,747 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules.am;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.asterix.dataflow.data.common.AqlExpressionTypeComputer;
+import edu.uci.ics.asterix.metadata.api.IMetadataEntity;
+import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AOrderedList;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.types.hierachy.ATypeHierarchy;
+import edu.uci.ics.asterix.optimizer.rules.am.OptimizableOperatorSubTree.DataSourceType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * Class that embodies the commonalities between rewrite rules for access
+ * methods.
+ */
+public abstract class AbstractIntroduceAccessMethodRule implements IAlgebraicRewriteRule {
+
+    private AqlMetadataProvider metadataProvider;
+
+    public abstract Map<FunctionIdentifier, List<IAccessMethod>> getAccessMethods();
+
+    protected static void registerAccessMethod(IAccessMethod accessMethod,
+            Map<FunctionIdentifier, List<IAccessMethod>> accessMethods) {
+        List<FunctionIdentifier> funcs = accessMethod.getOptimizableFunctions();
+        for (FunctionIdentifier funcIdent : funcs) {
+            List<IAccessMethod> l = accessMethods.get(funcIdent);
+            if (l == null) {
+                l = new ArrayList<IAccessMethod>();
+                accessMethods.put(funcIdent, l);
+            }
+            l.add(accessMethod);
+        }
+    }
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+        return false;
+    }
+
+    protected void setMetadataDeclarations(IOptimizationContext context) {
+        metadataProvider = (AqlMetadataProvider) context.getMetadataProvider();
+    }
+
+    protected void fillSubTreeIndexExprs(OptimizableOperatorSubTree subTree,
+            Map<IAccessMethod, AccessMethodAnalysisContext> analyzedAMs, IOptimizationContext context)
+            throws AlgebricksException {
+        Iterator<Map.Entry<IAccessMethod, AccessMethodAnalysisContext>> amIt = analyzedAMs.entrySet().iterator();
+        // Check applicability of indexes by access method type.
+        while (amIt.hasNext()) {
+            Map.Entry<IAccessMethod, AccessMethodAnalysisContext> entry = amIt.next();
+            AccessMethodAnalysisContext amCtx = entry.getValue();
+            // For the current access method type, map variables to applicable
+            // indexes.
+            fillAllIndexExprs(subTree, amCtx, context);
+        }
+    }
+
+    protected void pruneIndexCandidates(Map<IAccessMethod, AccessMethodAnalysisContext> analyzedAMs)
+            throws AlgebricksException {
+        Iterator<Map.Entry<IAccessMethod, AccessMethodAnalysisContext>> amIt = analyzedAMs.entrySet().iterator();
+        // Check applicability of indexes by access method type.
+        while (amIt.hasNext()) {
+            Map.Entry<IAccessMethod, AccessMethodAnalysisContext> entry = amIt.next();
+            AccessMethodAnalysisContext amCtx = entry.getValue();
+            pruneIndexCandidates(entry.getKey(), amCtx);
+            // Remove access methods for which there are definitely no
+            // applicable indexes.
+            if (amCtx.indexExprsAndVars.isEmpty()) {
+                amIt.remove();
+            }
+        }
+    }
+
+    /**
+     * Simply picks the first index that it finds. TODO: Improve this decision
+     * process by making it more systematic.
+     */
+    protected Pair<IAccessMethod, Index> chooseIndex(Map<IAccessMethod, AccessMethodAnalysisContext> analyzedAMs) {
+        Iterator<Map.Entry<IAccessMethod, AccessMethodAnalysisContext>> amIt = analyzedAMs.entrySet().iterator();
+        while (amIt.hasNext()) {
+            Map.Entry<IAccessMethod, AccessMethodAnalysisContext> amEntry = amIt.next();
+            AccessMethodAnalysisContext analysisCtx = amEntry.getValue();
+            Iterator<Map.Entry<Index, List<Pair<Integer, Integer>>>> indexIt = analysisCtx.indexExprsAndVars.entrySet()
+                    .iterator();
+            if (indexIt.hasNext()) {
+                Map.Entry<Index, List<Pair<Integer, Integer>>> indexEntry = indexIt.next();
+                // To avoid a case where the chosen access method and a chosen
+                // index type is different.
+                // Allowed Case: [BTreeAccessMethod , IndexType.BTREE],
+                //               [RTreeAccessMethod , IndexType.RTREE],
+                //               [InvertedIndexAccessMethod,
+                //                 IndexType.SINGLE_PARTITION_WORD_INVIX ||
+                //                           SINGLE_PARTITION_NGRAM_INVIX ||
+                //                           LENGTH_PARTITIONED_WORD_INVIX ||
+                //                           LENGTH_PARTITIONED_NGRAM_INVIX]
+                IAccessMethod chosenAccessMethod = amEntry.getKey();
+                Index chosenIndex = indexEntry.getKey();
+                boolean isKeywordOrNgramIndexChosen = false;
+                if (chosenIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX
+                        || chosenIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX
+                        || chosenIndex.getIndexType() == IndexType.SINGLE_PARTITION_WORD_INVIX
+                        || chosenIndex.getIndexType() == IndexType.SINGLE_PARTITION_NGRAM_INVIX)
+                    isKeywordOrNgramIndexChosen = true;
+                if ((chosenAccessMethod == BTreeAccessMethod.INSTANCE && chosenIndex.getIndexType() != IndexType.BTREE)
+                        || (chosenAccessMethod == RTreeAccessMethod.INSTANCE && chosenIndex.getIndexType() != IndexType.RTREE)
+                        || (chosenAccessMethod == InvertedIndexAccessMethod.INSTANCE && !isKeywordOrNgramIndexChosen)) {
+                    continue;
+                }
+                return new Pair<IAccessMethod, Index>(chosenAccessMethod, chosenIndex);
+            }
+        }
+        return null;
+    }
+
+    /**
+     * Removes irrelevant access methods candidates, based on whether the
+     * expressions in the query match those in the index. For example, some
+     * index may require all its expressions to be matched, and some indexes may
+     * only require a match on a prefix of fields to be applicable. This methods
+     * removes all index candidates indexExprs that are definitely not
+     * applicable according to the expressions involved.
+     *
+     * @throws AlgebricksException
+     */
+    public void pruneIndexCandidates(IAccessMethod accessMethod, AccessMethodAnalysisContext analysisCtx)
+            throws AlgebricksException {
+        Iterator<Map.Entry<Index, List<Pair<Integer, Integer>>>> indexExprAndVarIt = analysisCtx.indexExprsAndVars
+                .entrySet().iterator();
+        // Used to keep track of matched expressions (added for prefix search)
+        int numMatchedKeys = 0;
+        ArrayList<Integer> matchedExpressions = new ArrayList<Integer>();
+        while (indexExprAndVarIt.hasNext()) {
+            Map.Entry<Index, List<Pair<Integer, Integer>>> indexExprAndVarEntry = indexExprAndVarIt.next();
+            Index index = indexExprAndVarEntry.getKey();
+            boolean allUsed = true;
+            int lastFieldMatched = -1;
+            boolean foundKeyField = false;
+            matchedExpressions.clear();
+            numMatchedKeys = 0;
+
+            // Remove the candidate if the dataset is a metadata dataset and the index is secondary
+            // TODO: fix the way secondary metadata indexes are implemented and remove this check
+            if (accessMethod.matchPrefixIndexExprs()) {
+                if (index.getDataverseName().equals(MetadataConstants.METADATA_DATAVERSE_NAME)
+                        && !index.isPrimaryIndex()) {
+                    indexExprAndVarIt.remove();
+                    continue;
+                }
+            }
+            for (int i = 0; i < index.getKeyFieldNames().size(); i++) {
+                List<String> keyField = index.getKeyFieldNames().get(i);
+                final IAType keyType = index.getKeyFieldTypes().get(i);
+                Iterator<Pair<Integer, Integer>> exprsAndVarIter = indexExprAndVarEntry.getValue().iterator();
+                while (exprsAndVarIter.hasNext()) {
+                    final Pair<Integer, Integer> exprAndVarIdx = exprsAndVarIter.next();
+                    final IOptimizableFuncExpr optFuncExpr = analysisCtx.matchedFuncExprs.get(exprAndVarIdx.first);
+                    // If expr is not optimizable by concrete index then remove
+                    // expr and continue.
+                    if (!accessMethod.exprIsOptimizable(index, optFuncExpr)) {
+                        exprsAndVarIter.remove();
+                        continue;
+                    }
+                    boolean typeMatch = true;
+                    //Prune indexes based on field types
+                    List<IAType> indexedTypes = new ArrayList<IAType>();
+                    //retrieve types of expressions joined/selected with an indexed field
+                    for (int j = 0; j < optFuncExpr.getNumLogicalVars(); j++)
+                        if (j != exprAndVarIdx.second)
+                            indexedTypes.add(optFuncExpr.getFieldType(j));
+                    //add constants in case of select
+                    if (indexedTypes.size() < 2 && optFuncExpr.getNumLogicalVars() == 1) {
+                        indexedTypes.add((IAType) AqlExpressionTypeComputer.INSTANCE.getType(new ConstantExpression(
+                                optFuncExpr.getConstantVal(0)), null, null));
+                    }
+                    //infer type of logicalExpr based on index keyType
+                    indexedTypes.add((IAType) AqlExpressionTypeComputer.INSTANCE.getType(
+                            optFuncExpr.getLogicalExpr(exprAndVarIdx.second), null, new IVariableTypeEnvironment() {
+
+                                @Override
+                                public Object getVarType(LogicalVariable var) throws AlgebricksException {
+                                    if (var.equals(optFuncExpr.getSourceVar(exprAndVarIdx.second)))
+                                        return keyType;
+                                    throw new IllegalArgumentException();
+                                }
+
+                                @Override
+                                public Object getVarType(LogicalVariable var, List<LogicalVariable> nonNullVariables,
+                                        List<List<LogicalVariable>> correlatedNullableVariableLists)
+                                        throws AlgebricksException {
+                                    if (var.equals(optFuncExpr.getSourceVar(exprAndVarIdx.second)))
+                                        return keyType;
+                                    throw new IllegalArgumentException();
+                                }
+
+                                @Override
+                                public void setVarType(LogicalVariable var, Object type) {
+                                    throw new IllegalArgumentException();
+                                }
+
+                                @Override
+                                public Object getType(ILogicalExpression expr) throws AlgebricksException {
+                                    return AqlExpressionTypeComputer.INSTANCE.getType(expr, null, this);
+                                }
+
+                                @Override
+                                public boolean substituteProducedVariable(LogicalVariable v1, LogicalVariable v2)
+                                        throws AlgebricksException {
+                                    throw new IllegalArgumentException();
+                                }
+                            }));
+
+                    //for the case when jaccard similarity is measured between ordered & unordered lists
+                    boolean jaccardSimilarity = optFuncExpr.getFuncExpr().getFunctionIdentifier().getName()
+                            .startsWith("similarity-jaccard-check");
+
+                    for (int j = 0; j < indexedTypes.size(); j++)
+                        for (int k = j + 1; k < indexedTypes.size(); k++)
+                            typeMatch &= isMatched(indexedTypes.get(j), indexedTypes.get(k), jaccardSimilarity);
+
+                    // Check if any field name in the optFuncExpr matches.
+                    if (optFuncExpr.findFieldName(keyField) != -1) {
+                        foundKeyField = typeMatch
+                                && optFuncExpr.getOperatorSubTree(exprAndVarIdx.second).hasDataSourceScan();
+                        if (foundKeyField) {
+                            matchedExpressions.add(exprAndVarIdx.first);
+                            numMatchedKeys++;
+                            if (lastFieldMatched == i - 1) {
+                                lastFieldMatched = i;
+                            }
+                            break;
+                        }
+                    }
+                }
+                if (!foundKeyField) {
+                    allUsed = false;
+                    // if any expression was matched, remove the non-matched expressions, otherwise the index is unusable
+                    if (lastFieldMatched >= 0) {
+                        exprsAndVarIter = indexExprAndVarEntry.getValue().iterator();
+                        while (exprsAndVarIter.hasNext()) {
+                            if (!matchedExpressions.contains(exprsAndVarIter.next().first)) {
+                                exprsAndVarIter.remove();
+                            }
+                        }
+                    }
+                    break;
+                }
+            }
+            // If the access method requires all exprs to be matched but they
+            // are not, remove this candidate.
+            if (!allUsed && accessMethod.matchAllIndexExprs()) {
+                indexExprAndVarIt.remove();
+                continue;
+            }
+            // A prefix of the index exprs may have been matched.
+            if (accessMethod.matchPrefixIndexExprs()) {
+                if (lastFieldMatched < 0) {
+                    indexExprAndVarIt.remove();
+                    continue;
+                }
+            }
+            analysisCtx.indexNumMatchedKeys.put(index, new Integer(numMatchedKeys));
+        }
+    }
+
+    private boolean isMatched(IAType type1, IAType type2, boolean useListDomain) throws AlgebricksException {
+        if (ATypeHierarchy.isSameTypeDomain(Index.getNonNullableType(type1).first.getTypeTag(),
+                Index.getNonNullableType(type2).first.getTypeTag(), useListDomain))
+            return true;
+        return ATypeHierarchy.canPromote(Index.getNonNullableType(type1).first.getTypeTag(),
+                Index.getNonNullableType(type2).first.getTypeTag());
+    }
+
+    /**
+     * Analyzes the given selection condition, filling analyzedAMs with
+     * applicable access method types. At this point we are not yet consulting
+     * the metadata whether an actual index exists or not.
+     */
+    protected boolean analyzeCondition(ILogicalExpression cond, List<AbstractLogicalOperator> assignsAndUnnests,
+            Map<IAccessMethod, AccessMethodAnalysisContext> analyzedAMs) {
+        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) cond;
+        FunctionIdentifier funcIdent = funcExpr.getFunctionIdentifier();
+        // Don't consider optimizing a disjunctive condition with an index (too
+        // complicated for now).
+        if (funcIdent == AlgebricksBuiltinFunctions.OR) {
+            return false;
+        }
+        boolean found = analyzeFunctionExpr(funcExpr, assignsAndUnnests, analyzedAMs);
+        for (Mutable<ILogicalExpression> arg : funcExpr.getArguments()) {
+            ILogicalExpression argExpr = arg.getValue();
+            if (argExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+                continue;
+            }
+            AbstractFunctionCallExpression argFuncExpr = (AbstractFunctionCallExpression) argExpr;
+            boolean matchFound = analyzeFunctionExpr(argFuncExpr, assignsAndUnnests, analyzedAMs);
+            found = found || matchFound;
+        }
+        return found;
+    }
+
+    /**
+     * Finds applicable access methods for the given function expression based
+     * on the function identifier, and an analysis of the function's arguments.
+     * Updates the analyzedAMs accordingly.
+     */
+    protected boolean analyzeFunctionExpr(AbstractFunctionCallExpression funcExpr,
+            List<AbstractLogicalOperator> assignsAndUnnests, Map<IAccessMethod, AccessMethodAnalysisContext> analyzedAMs) {
+        FunctionIdentifier funcIdent = funcExpr.getFunctionIdentifier();
+        if (funcIdent == AlgebricksBuiltinFunctions.AND) {
+            return false;
+        }
+        // Retrieves the list of access methods that are relevant based on the
+        // funcIdent.
+        List<IAccessMethod> relevantAMs = getAccessMethods().get(funcIdent);
+        if (relevantAMs == null) {
+            return false;
+        }
+        boolean atLeastOneMatchFound = false;
+        // Place holder for a new analysis context in case we need one.
+        AccessMethodAnalysisContext newAnalysisCtx = new AccessMethodAnalysisContext();
+        for (IAccessMethod accessMethod : relevantAMs) {
+            AccessMethodAnalysisContext analysisCtx = analyzedAMs.get(accessMethod);
+            // Use the current place holder.
+            if (analysisCtx == null) {
+                analysisCtx = newAnalysisCtx;
+            }
+            // Analyzes the funcExpr's arguments to see if the accessMethod is
+            // truly applicable.
+            boolean matchFound = accessMethod.analyzeFuncExprArgs(funcExpr, assignsAndUnnests, analysisCtx);
+            if (matchFound) {
+                // If we've used the current new context placeholder, replace it
+                // with a new one.
+                if (analysisCtx == newAnalysisCtx) {
+                    analyzedAMs.put(accessMethod, analysisCtx);
+                    newAnalysisCtx = new AccessMethodAnalysisContext();
+                }
+                atLeastOneMatchFound = true;
+            }
+        }
+        return atLeastOneMatchFound;
+    }
+
+    /**
+     * Finds secondary indexes whose keys include fieldName, and adds a mapping
+     * in analysisCtx.indexEsprs from that index to the a corresponding
+     * optimizable function expression.
+     *
+     * @return true if a candidate index was added to foundIndexExprs, false
+     *         otherwise
+     * @throws AlgebricksException
+     */
+    protected boolean fillIndexExprs(List<Index> datasetIndexes, List<String> fieldName, IAType fieldType,
+            IOptimizableFuncExpr optFuncExpr, int matchedFuncExprIndex, int varIdx,
+            OptimizableOperatorSubTree matchedSubTree, AccessMethodAnalysisContext analysisCtx)
+            throws AlgebricksException {
+        List<Index> indexCandidates = new ArrayList<Index>();
+        // Add an index to the candidates if one of the indexed fields is
+        // fieldName
+        for (Index index : datasetIndexes) {
+            // Need to also verify the index is pending no op
+            if (index.getKeyFieldNames().contains(fieldName) && index.getPendingOp() == IMetadataEntity.PENDING_NO_OP) {
+                indexCandidates.add(index);
+                if (optFuncExpr.getFieldType(varIdx) == BuiltinType.ANULL
+                        || optFuncExpr.getFieldType(varIdx) == BuiltinType.ANY)
+                    optFuncExpr.setFieldType(varIdx,
+                            index.getKeyFieldTypes().get(index.getKeyFieldNames().indexOf(fieldName)));
+                analysisCtx.addIndexExpr(matchedSubTree.dataset, index, matchedFuncExprIndex, varIdx);
+            }
+        }
+        // No index candidates for fieldName.
+        if (indexCandidates.isEmpty()) {
+            return false;
+        }
+        return true;
+    }
+
+    protected void fillAllIndexExprs(OptimizableOperatorSubTree subTree, AccessMethodAnalysisContext analysisCtx,
+            IOptimizationContext context) throws AlgebricksException {
+        int optFuncExprIndex = 0;
+        List<Index> datasetIndexes = new ArrayList<Index>();
+        if (subTree.dataSourceType != DataSourceType.COLLECTION_SCAN)
+            datasetIndexes = metadataProvider.getDatasetIndexes(subTree.dataset.getDataverseName(),
+                    subTree.dataset.getDatasetName());
+        for (IOptimizableFuncExpr optFuncExpr : analysisCtx.matchedFuncExprs) {
+            // Try to match variables from optFuncExpr to assigns or unnests.
+            for (int assignOrUnnestIndex = 0; assignOrUnnestIndex < subTree.assignsAndUnnests.size(); assignOrUnnestIndex++) {
+                AbstractLogicalOperator op = subTree.assignsAndUnnests.get(assignOrUnnestIndex);
+                if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+                    AssignOperator assignOp = (AssignOperator) op;
+                    List<LogicalVariable> varList = assignOp.getVariables();
+                    for (int varIndex = 0; varIndex < varList.size(); varIndex++) {
+                        LogicalVariable var = varList.get(varIndex);
+                        int optVarIndex = optFuncExpr.findLogicalVar(var);
+                        // No matching var in optFuncExpr.
+                        if (optVarIndex == -1) {
+                            continue;
+                        }
+                        // At this point we have matched the optimizable func
+                        // expr at optFuncExprIndex to an assigned variable.
+                        // Remember matching subtree.
+                        optFuncExpr.setOptimizableSubTree(optVarIndex, subTree);
+                        List<String> fieldName = getFieldNameFromSubTree(optFuncExpr, subTree, assignOrUnnestIndex,
+                                varIndex, subTree.recordType, optVarIndex, optFuncExpr.getFuncExpr().getArguments()
+                                        .get(optVarIndex).getValue());
+                        if (fieldName == null) {
+                            continue;
+                        }
+                        IAType fieldType = (IAType) context.getOutputTypeEnvironment(assignOp).getType(
+                                optFuncExpr.getLogicalExpr(optVarIndex));
+                        // Set the fieldName in the corresponding matched
+                        // function expression.
+                        optFuncExpr.setFieldName(optVarIndex, fieldName);
+                        optFuncExpr.setFieldType(optVarIndex, fieldType);
+
+                        setTypeTag(context, subTree, optFuncExpr, optVarIndex);
+                        if (subTree.hasDataSource()) {
+                            fillIndexExprs(datasetIndexes, fieldName, fieldType, optFuncExpr, optFuncExprIndex,
+                                    optVarIndex, subTree, analysisCtx);
+                        }
+                    }
+                } else {
+                    UnnestOperator unnestOp = (UnnestOperator) op;
+                    LogicalVariable var = unnestOp.getVariable();
+                    int funcVarIndex = optFuncExpr.findLogicalVar(var);
+                    // No matching var in optFuncExpr.
+                    if (funcVarIndex == -1) {
+                        continue;
+                    }
+                    // At this point we have matched the optimizable func expr
+                    // at optFuncExprIndex to an unnest variable.
+                    // Remember matching subtree.
+                    optFuncExpr.setOptimizableSubTree(funcVarIndex, subTree);
+                    List<String> fieldName = null;
+                    if (subTree.dataSourceType == DataSourceType.COLLECTION_SCAN) {
+                        optFuncExpr.setLogicalExpr(funcVarIndex, new VariableReferenceExpression(var));
+                    } else {
+                        fieldName = getFieldNameFromSubTree(optFuncExpr, subTree, assignOrUnnestIndex, 0,
+                                subTree.recordType, funcVarIndex,
+                                optFuncExpr.getFuncExpr().getArguments().get(funcVarIndex).getValue());
+                        if (fieldName == null) {
+                            continue;
+                        }
+                    }
+                    IAType fieldType = (IAType) context.getOutputTypeEnvironment(unnestOp).getType(
+                            optFuncExpr.getLogicalExpr(funcVarIndex));
+                    // Set the fieldName in the corresponding matched function
+                    // expression.
+                    optFuncExpr.setFieldName(funcVarIndex, fieldName);
+                    optFuncExpr.setFieldType(funcVarIndex, fieldType);
+
+                    setTypeTag(context, subTree, optFuncExpr, funcVarIndex);
+                    if (subTree.hasDataSource()) {
+                        fillIndexExprs(datasetIndexes, fieldName, fieldType, optFuncExpr, optFuncExprIndex,
+                                funcVarIndex, subTree, analysisCtx);
+                    }
+                }
+            }
+
+            // Try to match variables from optFuncExpr to datasourcescan if not
+            // already matched in assigns.
+            List<LogicalVariable> dsVarList = subTree.getDataSourceVariables();
+            for (int varIndex = 0; varIndex < dsVarList.size(); varIndex++) {
+                LogicalVariable var = dsVarList.get(varIndex);
+                int funcVarIndex = optFuncExpr.findLogicalVar(var);
+                // No matching var in optFuncExpr.
+                if (funcVarIndex == -1) {
+                    continue;
+                }
+                // The variable value is one of the partitioning fields.
+                List<String> fieldName = DatasetUtils.getPartitioningKeys(subTree.dataset).get(varIndex);
+                IAType fieldType = (IAType) context.getOutputTypeEnvironment(subTree.dataSourceRef.getValue())
+                        .getVarType(var);
+                // Set the fieldName in the corresponding matched function
+                // expression, and remember matching subtree.
+                optFuncExpr.setFieldName(funcVarIndex, fieldName);
+                optFuncExpr.setOptimizableSubTree(funcVarIndex, subTree);
+                optFuncExpr.setSourceVar(funcVarIndex, var);
+                optFuncExpr.setLogicalExpr(funcVarIndex, new VariableReferenceExpression(var));
+                setTypeTag(context, subTree, optFuncExpr, funcVarIndex);
+                if (subTree.hasDataSourceScan()) {
+                    fillIndexExprs(datasetIndexes, fieldName, fieldType, optFuncExpr, optFuncExprIndex, funcVarIndex,
+                            subTree, analysisCtx);
+                }
+            }
+            optFuncExprIndex++;
+        }
+    }
+
+    private void setTypeTag(IOptimizationContext context, OptimizableOperatorSubTree subTree,
+            IOptimizableFuncExpr optFuncExpr, int funcVarIndex) throws AlgebricksException {
+        // Set the typeTag if the type is not null
+        IAType type = (IAType) context.getOutputTypeEnvironment(subTree.root).getVarType(
+                optFuncExpr.getLogicalVar(funcVarIndex));
+        optFuncExpr.setFieldType(funcVarIndex, type);
+    }
+
+    /**
+     * Returns the field name corresponding to the assigned variable at
+     * varIndex. Returns null if the expr at varIndex does not yield to a field
+     * access function after following a set of allowed functions.
+     * 
+     * @throws AlgebricksException
+     */
+    protected List<String> getFieldNameFromSubTree(IOptimizableFuncExpr optFuncExpr,
+            OptimizableOperatorSubTree subTree, int opIndex, int assignVarIndex, ARecordType recordType,
+            int funcVarIndex, ILogicalExpression parentFuncExpr) throws AlgebricksException {
+        // Get expression corresponding to opVar at varIndex.
+        AbstractLogicalExpression expr = null;
+        AbstractFunctionCallExpression childFuncExpr = null;
+        AbstractLogicalOperator op = subTree.assignsAndUnnests.get(opIndex);
+        if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+            AssignOperator assignOp = (AssignOperator) op;
+            expr = (AbstractLogicalExpression) assignOp.getExpressions().get(assignVarIndex).getValue();
+            childFuncExpr = (AbstractFunctionCallExpression) expr;
+        } else {
+            UnnestOperator unnestOp = (UnnestOperator) op;
+            expr = (AbstractLogicalExpression) unnestOp.getExpressionRef().getValue();
+            if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+                return null;
+            }
+            childFuncExpr = (AbstractFunctionCallExpression) expr;
+            if (childFuncExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.SCAN_COLLECTION) {
+                return null;
+            }
+            expr = (AbstractLogicalExpression) childFuncExpr.getArguments().get(0).getValue();
+        }
+        if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return null;
+        }
+        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+        FunctionIdentifier funcIdent = funcExpr.getFunctionIdentifier();
+
+        boolean isByName = false;
+        boolean isFieldAccess = false;
+        String fieldName = null;
+        List<String> nestedAccessFieldName = null;
+        int fieldIndex = -1;
+        if (funcIdent == AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME) {
+            ILogicalExpression nameArg = funcExpr.getArguments().get(1).getValue();
+            if (nameArg.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+                return null;
+            }
+            ConstantExpression constExpr = (ConstantExpression) nameArg;
+            fieldName = ((AString) ((AsterixConstantValue) constExpr.getValue()).getObject()).getStringValue();
+            isFieldAccess = true;
+            isByName = true;
+        } else if (funcIdent == AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX) {
+            ILogicalExpression idxArg = funcExpr.getArguments().get(1).getValue();
+            if (idxArg.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+                return null;
+            }
+            ConstantExpression constExpr = (ConstantExpression) idxArg;
+            fieldIndex = ((AInt32) ((AsterixConstantValue) constExpr.getValue()).getObject()).getIntegerValue();
+            isFieldAccess = true;
+        } else if (funcIdent == AsterixBuiltinFunctions.FIELD_ACCESS_NESTED) {
+            ILogicalExpression nameArg = funcExpr.getArguments().get(1).getValue();
+            if (nameArg.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+                return null;
+            }
+            ConstantExpression constExpr = (ConstantExpression) nameArg;
+            AOrderedList orderedNestedFieldName = (AOrderedList) ((AsterixConstantValue) constExpr.getValue())
+                    .getObject();
+            nestedAccessFieldName = new ArrayList<String>();
+            for (int i = 0; i < orderedNestedFieldName.size(); i++) {
+                nestedAccessFieldName.add(((AString) orderedNestedFieldName.getItem(i)).getStringValue());
+            }
+            isFieldAccess = true;
+            isByName = true;
+        }
+        if (isFieldAccess) {
+            optFuncExpr.setLogicalExpr(funcVarIndex, parentFuncExpr);
+            int[] assignAndExpressionIndexes = null;
+
+            //go forward through nested assigns until you find the relevant one
+            for (int i = opIndex + 1; i < subTree.assignsAndUnnests.size(); i++) {
+                AbstractLogicalOperator subOp = subTree.assignsAndUnnests.get(i);
+                List<LogicalVariable> varList;
+
+                if (subOp.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+                    //Nested was an assign
+                    varList = ((AssignOperator) subOp).getVariables();
+                } else if (subOp.getOperatorTag() == LogicalOperatorTag.UNNEST) {
+                    //Nested is not an assign
+                    varList = ((UnnestOperator) subOp).getVariables();
+                } else {
+                    break;
+                }
+
+                //Go through variables in assign to check for match
+                for (int varIndex = 0; varIndex < varList.size(); varIndex++) {
+                    LogicalVariable var = varList.get(varIndex);
+                    ArrayList<LogicalVariable> parentVars = new ArrayList<LogicalVariable>();
+                    expr.getUsedVariables(parentVars);
+
+                    if (parentVars.contains(var)) {
+                        //Found the variable we are looking for.
+                        //return assign and index of expression
+                        int[] returnValues = { i, varIndex };
+                        assignAndExpressionIndexes = returnValues;
+                    }
+                }
+            }
+            if (assignAndExpressionIndexes != null && assignAndExpressionIndexes[0] > -1) {
+                //We found the nested assign
+
+                //Recursive call on nested assign
+                List<String> parentFieldNames = getFieldNameFromSubTree(optFuncExpr, subTree,
+                        assignAndExpressionIndexes[0], assignAndExpressionIndexes[1], recordType, funcVarIndex,
+                        parentFuncExpr);
+
+                if (parentFieldNames == null) {
+                    //Nested assign was not a field access. 
+                    //We will not use index
+                    return null;
+                }
+
+                if (!isByName) {
+                    try {
+                        fieldName = ((ARecordType) recordType.getSubFieldType(parentFieldNames)).getFieldNames()[fieldIndex];
+                    } catch (IOException e) {
+                        throw new AlgebricksException(e);
+                    }
+                }
+                optFuncExpr.setSourceVar(funcVarIndex, ((AssignOperator) op).getVariables().get(assignVarIndex));
+                //add fieldName to the nested fieldName, return
+                if (nestedAccessFieldName != null) {
+                    for (int i = 0; i < nestedAccessFieldName.size(); i++) {
+                        parentFieldNames.add(nestedAccessFieldName.get(i));
+                    }
+                } else {
+                    parentFieldNames.add(fieldName);
+                }
+                return (parentFieldNames);
+            }
+
+            optFuncExpr.setSourceVar(funcVarIndex, ((AssignOperator) op).getVariables().get(assignVarIndex));
+            //no nested assign, we are at the lowest level.
+            if (isByName) {
+                if (nestedAccessFieldName != null) {
+                    return nestedAccessFieldName;
+                }
+                return new ArrayList<String>(Arrays.asList(fieldName));
+            }
+            return new ArrayList<String>(Arrays.asList(recordType.getFieldNames()[fieldIndex]));
+
+        }
+
+        if (funcIdent != AsterixBuiltinFunctions.WORD_TOKENS && funcIdent != AsterixBuiltinFunctions.GRAM_TOKENS
+                && funcIdent != AsterixBuiltinFunctions.SUBSTRING
+                && funcIdent != AsterixBuiltinFunctions.SUBSTRING_BEFORE
+                && funcIdent != AsterixBuiltinFunctions.SUBSTRING_AFTER
+                && funcIdent != AsterixBuiltinFunctions.CREATE_POLYGON
+                && funcIdent != AsterixBuiltinFunctions.CREATE_MBR
+                && funcIdent != AsterixBuiltinFunctions.CREATE_RECTANGLE
+                && funcIdent != AsterixBuiltinFunctions.CREATE_CIRCLE
+                && funcIdent != AsterixBuiltinFunctions.CREATE_LINE
+                && funcIdent != AsterixBuiltinFunctions.CREATE_POINT) {
+            return null;
+        }
+        // We use a part of the field in edit distance computation
+        if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CHECK) {
+            optFuncExpr.setPartialField(true);
+        }
+        // We expect the function's argument to be a variable, otherwise we
+        // cannot apply an index.
+        ILogicalExpression argExpr = funcExpr.getArguments().get(0).getValue();
+        if (argExpr.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
+            return null;
+        }
+        LogicalVariable curVar = ((VariableReferenceExpression) argExpr).getVariableReference();
+        // We look for the assign or unnest operator that produces curVar below
+        // the current operator
+        for (int assignOrUnnestIndex = opIndex + 1; assignOrUnnestIndex < subTree.assignsAndUnnests.size(); assignOrUnnestIndex++) {
+            AbstractLogicalOperator curOp = subTree.assignsAndUnnests.get(assignOrUnnestIndex);
+            if (curOp.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+                AssignOperator assignOp = (AssignOperator) curOp;
+                List<LogicalVariable> varList = assignOp.getVariables();
+                for (int varIndex = 0; varIndex < varList.size(); varIndex++) {
+                    LogicalVariable var = varList.get(varIndex);
+                    if (var.equals(curVar)) {
+                        optFuncExpr.setSourceVar(funcVarIndex, var);
+                        return getFieldNameFromSubTree(optFuncExpr, subTree, assignOrUnnestIndex, varIndex, recordType,
+                                funcVarIndex, childFuncExpr);
+                    }
+                }
+            } else {
+                UnnestOperator unnestOp = (UnnestOperator) curOp;
+                LogicalVariable var = unnestOp.getVariable();
+                if (var.equals(curVar)) {
+                    getFieldNameFromSubTree(optFuncExpr, subTree, assignOrUnnestIndex, 0, recordType, funcVarIndex,
+                            childFuncExpr);
+                }
+            }
+        }
+        return null;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AccessMethodAnalysisContext.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AccessMethodAnalysisContext.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AccessMethodAnalysisContext.java
new file mode 100644
index 0000000..cc24912
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AccessMethodAnalysisContext.java
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules.am;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+
+/**
+ * Context for analyzing the applicability of a single access method.
+ */
+public class AccessMethodAnalysisContext {
+
+    public List<IOptimizableFuncExpr> matchedFuncExprs = new ArrayList<IOptimizableFuncExpr>();
+
+    // Contains candidate indexes and a list of (integer,integer) tuples that index into matchedFuncExprs and matched variable inside this expr.
+    // We are mapping from candidate indexes to a list of function expressions 
+    // that match one of the index's expressions.
+    public Map<Index, List<Pair<Integer, Integer>>> indexExprsAndVars = new TreeMap<Index, List<Pair<Integer, Integer>>>();
+
+    // Maps from index to the dataset it is indexing.
+    public Map<Index, Dataset> indexDatasetMap = new TreeMap<Index, Dataset>();
+    
+    // Maps from an index to the number of matched fields in the query plan (for performing prefix search)
+    public Map<Index, Integer> indexNumMatchedKeys = new TreeMap<Index, Integer>();
+
+    // variables for resetting null placeholder for left-outer-join
+    private Mutable<ILogicalOperator> lojGroupbyOpRef = null;
+    private ScalarFunctionCallExpression lojIsNullFuncInGroupBy = null;
+
+    public void addIndexExpr(Dataset dataset, Index index, Integer exprIndex, Integer varIndex) {
+        List<Pair<Integer, Integer>> exprs = indexExprsAndVars.get(index);
+        if (exprs == null) {
+            exprs = new ArrayList<Pair<Integer, Integer>>();
+            indexExprsAndVars.put(index, exprs);
+        }
+        exprs.add(new Pair<Integer, Integer>(exprIndex, varIndex));
+        indexDatasetMap.put(index, dataset);
+    }
+
+    public List<Pair<Integer, Integer>> getIndexExprs(Index index) {
+        return indexExprsAndVars.get(index);
+    }
+
+    public void setLOJGroupbyOpRef(Mutable<ILogicalOperator> opRef) {
+        lojGroupbyOpRef = opRef;
+    }
+
+    public Mutable<ILogicalOperator> getLOJGroupbyOpRef() {
+        return lojGroupbyOpRef;
+    }
+
+    public void setLOJIsNullFuncInGroupBy(ScalarFunctionCallExpression isNullFunc) {
+        lojIsNullFuncInGroupBy = isNullFunc;
+    }
+
+    public ScalarFunctionCallExpression getLOJIsNullFuncInGroupBy() {
+        return lojIsNullFuncInGroupBy;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AccessMethodJobGenParams.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AccessMethodJobGenParams.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AccessMethodJobGenParams.java
new file mode 100644
index 0000000..31943e4
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AccessMethodJobGenParams.java
@@ -0,0 +1,140 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules.am;
+
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+
+/**
+ * Helper class for reading and writing job-gen parameters for access methods to
+ * and from a list of function arguments, typically of an unnest-map.
+ */
+public class AccessMethodJobGenParams {
+    protected String indexName;
+    protected IndexType indexType;
+    protected String dataverseName;
+    protected String datasetName;
+    protected boolean retainInput;
+    protected boolean retainNull;
+    protected boolean requiresBroadcast;
+    protected boolean isPrimaryIndex;
+
+    private final int NUM_PARAMS = 7;
+
+    public AccessMethodJobGenParams() {
+    }
+
+    public AccessMethodJobGenParams(String indexName, IndexType indexType, String dataverseName, String datasetName,
+            boolean retainInput, boolean retainNull, boolean requiresBroadcast) {
+        this.indexName = indexName;
+        this.indexType = indexType;
+        this.dataverseName = dataverseName;
+        this.datasetName = datasetName;
+        this.retainInput = retainInput;
+        this.retainNull = retainNull;
+        this.requiresBroadcast = requiresBroadcast;
+        this.isPrimaryIndex = datasetName.equals(indexName);
+    }
+
+    public void writeToFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
+        funcArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils.createStringConstant(indexName)));
+        funcArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils.createInt32Constant(indexType.ordinal())));
+        funcArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils.createStringConstant(dataverseName)));
+        funcArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils.createStringConstant(datasetName)));
+        funcArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils.createBooleanConstant(retainInput)));
+        funcArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils.createBooleanConstant(retainNull)));
+        funcArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils.createBooleanConstant(requiresBroadcast)));
+    }
+
+    public void readFromFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
+        indexName = AccessMethodUtils.getStringConstant(funcArgs.get(0));
+        indexType = IndexType.values()[AccessMethodUtils.getInt32Constant(funcArgs.get(1))];
+        dataverseName = AccessMethodUtils.getStringConstant(funcArgs.get(2));
+        datasetName = AccessMethodUtils.getStringConstant(funcArgs.get(3));
+        retainInput = AccessMethodUtils.getBooleanConstant(funcArgs.get(4));
+        retainNull = AccessMethodUtils.getBooleanConstant(funcArgs.get(5));
+        requiresBroadcast = AccessMethodUtils.getBooleanConstant(funcArgs.get(6));
+        isPrimaryIndex = datasetName.equals(indexName);
+    }
+
+    public String getIndexName() {
+        return indexName;
+    }
+
+    public IndexType getIndexType() {
+        return indexType;
+    }
+
+    public String getDataverseName() {
+        return dataverseName;
+    }
+
+    public String getDatasetName() {
+        return datasetName;
+    }
+
+    public boolean getRetainInput() {
+        return retainInput;
+    }
+    
+    public boolean getRetainNull() {
+        return retainNull;
+    }
+
+    public boolean getRequiresBroadcast() {
+        return requiresBroadcast;
+    }
+
+    protected void writeVarList(List<LogicalVariable> varList, List<Mutable<ILogicalExpression>> funcArgs) {
+        Mutable<ILogicalExpression> numKeysRef = new MutableObject<ILogicalExpression>(new ConstantExpression(
+                new AsterixConstantValue(new AInt32(varList.size()))));
+        funcArgs.add(numKeysRef);
+        for (LogicalVariable keyVar : varList) {
+            Mutable<ILogicalExpression> keyVarRef = new MutableObject<ILogicalExpression>(
+                    new VariableReferenceExpression(keyVar));
+            funcArgs.add(keyVarRef);
+        }
+    }
+
+    protected int readVarList(List<Mutable<ILogicalExpression>> funcArgs, int index, List<LogicalVariable> varList) {
+        int numLowKeys = AccessMethodUtils.getInt32Constant(funcArgs.get(index));
+        if (numLowKeys > 0) {
+            for (int i = 0; i < numLowKeys; i++) {
+                LogicalVariable var = ((VariableReferenceExpression) funcArgs.get(index + 1 + i).getValue())
+                        .getVariableReference();
+                varList.add(var);
+            }
+        }
+        return index + numLowKeys + 1;
+    }
+
+    protected int getNumParams() {
+        return NUM_PARAMS;
+    }
+
+    public boolean isPrimaryIndex() {
+        return isPrimaryIndex;
+    }
+}


[30/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PullPositionalVariableFromUnnestRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PullPositionalVariableFromUnnestRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PullPositionalVariableFromUnnestRule.java
new file mode 100644
index 0000000..482a82b
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PullPositionalVariableFromUnnestRule.java
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.StatefulFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.RunningAggregatePOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.UnpartitionedPropertyComputer;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class PullPositionalVariableFromUnnestRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.UNNEST) {
+            return false;
+        }
+        UnnestOperator unnest = (UnnestOperator) op;
+        LogicalVariable p = unnest.getPositionalVariable();
+        if (p == null) {
+            return false;
+        }
+        ArrayList<LogicalVariable> rOpVars = new ArrayList<LogicalVariable>();
+        rOpVars.add(p);
+        ArrayList<Mutable<ILogicalExpression>> rOpExprList = new ArrayList<Mutable<ILogicalExpression>>();
+        StatefulFunctionCallExpression fce = new StatefulFunctionCallExpression(
+                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.TID), UnpartitionedPropertyComputer.INSTANCE);
+        rOpExprList.add(new MutableObject<ILogicalExpression>(fce));
+        RunningAggregateOperator rOp = new RunningAggregateOperator(rOpVars, rOpExprList);
+        rOp.setExecutionMode(unnest.getExecutionMode());
+        RunningAggregatePOperator rPop = new RunningAggregatePOperator();
+        rOp.setPhysicalOperator(rPop);
+        rOp.getInputs().add(new MutableObject<ILogicalOperator>(unnest));
+        opRef.setValue(rOp);
+        unnest.setPositionalVariable(null);
+        context.computeAndSetTypeEnvironmentForOperator(rOp);
+        context.computeAndSetTypeEnvironmentForOperator(unnest);
+        return true;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushAggFuncIntoStandaloneAggregateRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushAggFuncIntoStandaloneAggregateRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushAggFuncIntoStandaloneAggregateRule.java
new file mode 100644
index 0000000..a29ebb7
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushAggFuncIntoStandaloneAggregateRule.java
@@ -0,0 +1,265 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * Pushes aggregate functions into a stand alone aggregate operator (no group by).
+ */
+public class PushAggFuncIntoStandaloneAggregateRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        // Pattern to match: assign <-- aggregate <-- !(group-by)
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
+            return false;
+        }
+        AssignOperator assignOp = (AssignOperator) op;
+
+        Mutable<ILogicalOperator> opRef2 = op.getInputs().get(0);
+        AbstractLogicalOperator op2 = (AbstractLogicalOperator) opRef2.getValue();
+        if (op2.getOperatorTag() == LogicalOperatorTag.AGGREGATE) {
+            AggregateOperator aggOp = (AggregateOperator) op2;
+            // Make sure the agg expr is a listify.
+            return pushAggregateFunction(aggOp, assignOp, context);
+        } else if (op2.getOperatorTag() == LogicalOperatorTag.INNERJOIN
+                || op2.getOperatorTag() == LogicalOperatorTag.LEFTOUTERJOIN) {
+            AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) op2;
+            // Tries to push aggregates through the join.
+            if (containsAggregate(assignOp.getExpressions()) && pushableThroughJoin(join)) {
+                pushAggregateFunctionThroughJoin(join, assignOp, context);
+                return true;
+            }
+        }
+        return false;
+    }
+
+    /**
+     * Recursively check whether the list of expressions contains an aggregate function.
+     * 
+     * @param exprRefs
+     * @return true if the list contains an aggregate function and false otherwise.
+     */
+    private boolean containsAggregate(List<Mutable<ILogicalExpression>> exprRefs) {
+        for (Mutable<ILogicalExpression> exprRef : exprRefs) {
+            ILogicalExpression expr = exprRef.getValue();
+            if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+                continue;
+            }
+            AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+            FunctionIdentifier funcIdent = AsterixBuiltinFunctions.getAggregateFunction(funcExpr
+                    .getFunctionIdentifier());
+            if (funcIdent == null) {
+                // Recursively look in func args.
+                if (containsAggregate(funcExpr.getArguments())) {
+                    return true;
+                }
+            } else {
+                // This is an aggregation function.
+                return true;
+            }
+        }
+        return false;
+    }
+
+    /**
+     * Check whether the join is aggregate-pushable, that is,
+     * 1) the join condition is true;
+     * 2) each join branch produces only one tuple.
+     * 
+     * @param join
+     * @return true if pushable
+     */
+    private boolean pushableThroughJoin(AbstractBinaryJoinOperator join) {
+        ILogicalExpression condition = join.getCondition().getValue();
+        if (condition.equals(ConstantExpression.TRUE)) {
+            // Checks if the aggregation functions are pushable through the join
+            boolean pushable = true;
+            for (Mutable<ILogicalOperator> branchRef : join.getInputs()) {
+                AbstractLogicalOperator branch = (AbstractLogicalOperator) branchRef.getValue();
+                if (branch.getOperatorTag() == LogicalOperatorTag.AGGREGATE) {
+                    pushable &= true;
+                } else if (branch.getOperatorTag() == LogicalOperatorTag.INNERJOIN
+                        || branch.getOperatorTag() == LogicalOperatorTag.LEFTOUTERJOIN) {
+                    AbstractBinaryJoinOperator childJoin = (AbstractBinaryJoinOperator) branch;
+                    pushable &= pushableThroughJoin(childJoin);
+                } else {
+                    pushable &= false;
+                }
+            }
+            return pushable;
+        }
+        return false;
+    }
+
+    /**
+     * Does the actual push of aggregates for qualified joins.
+     * 
+     * @param join
+     * @param assignOp
+     *            that contains aggregate function calls.
+     * @param context
+     * @throws AlgebricksException
+     */
+    private void pushAggregateFunctionThroughJoin(AbstractBinaryJoinOperator join, AssignOperator assignOp,
+            IOptimizationContext context) throws AlgebricksException {
+        for (Mutable<ILogicalOperator> branchRef : join.getInputs()) {
+            AbstractLogicalOperator branch = (AbstractLogicalOperator) branchRef.getValue();
+            if (branch.getOperatorTag() == LogicalOperatorTag.AGGREGATE) {
+                AggregateOperator aggOp = (AggregateOperator) branch;
+                pushAggregateFunction(aggOp, assignOp, context);
+            } else if (branch.getOperatorTag() == LogicalOperatorTag.INNERJOIN
+                    || branch.getOperatorTag() == LogicalOperatorTag.LEFTOUTERJOIN) {
+                AbstractBinaryJoinOperator childJoin = (AbstractBinaryJoinOperator) branch;
+                pushAggregateFunctionThroughJoin(childJoin, assignOp, context);
+            }
+        }
+    }
+
+    private boolean pushAggregateFunction(AggregateOperator aggOp, AssignOperator assignOp, IOptimizationContext context)
+            throws AlgebricksException {
+        Mutable<ILogicalOperator> opRef3 = aggOp.getInputs().get(0);
+        AbstractLogicalOperator op3 = (AbstractLogicalOperator) opRef3.getValue();
+        // If there's a group by below the agg, then we want to have the agg pushed into the group by.
+        if (op3.getOperatorTag() == LogicalOperatorTag.GROUP) {
+            return false;
+        }
+        if (aggOp.getVariables().size() != 1) {
+            return false;
+        }
+        ILogicalExpression aggExpr = aggOp.getExpressions().get(0).getValue();
+        if (aggExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return false;
+        }
+        AbstractFunctionCallExpression origAggFuncExpr = (AbstractFunctionCallExpression) aggExpr;
+        if (origAggFuncExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.LISTIFY) {
+            return false;
+        }
+
+        LogicalVariable aggVar = aggOp.getVariables().get(0);
+        List<LogicalVariable> used = new LinkedList<LogicalVariable>();
+        VariableUtilities.getUsedVariables(assignOp, used);
+        if (!used.contains(aggVar)) {
+            return false;
+        }
+
+        List<Mutable<ILogicalExpression>> srcAssignExprRefs = new LinkedList<Mutable<ILogicalExpression>>();
+        if (fingAggFuncExprRef(assignOp.getExpressions(), aggVar, srcAssignExprRefs) == false) {
+            return false;
+        }
+        if (srcAssignExprRefs.isEmpty()) {
+            return false;
+        }
+
+        AbstractFunctionCallExpression aggOpExpr = (AbstractFunctionCallExpression) aggOp.getExpressions().get(0)
+                .getValue();
+        aggOp.getExpressions().clear();
+        aggOp.getVariables().clear();
+
+        for (Mutable<ILogicalExpression> srcAssignExprRef : srcAssignExprRefs) {
+            AbstractFunctionCallExpression assignFuncExpr = (AbstractFunctionCallExpression) srcAssignExprRef
+                    .getValue();
+            FunctionIdentifier aggFuncIdent = AsterixBuiltinFunctions.getAggregateFunction(assignFuncExpr
+                    .getFunctionIdentifier());
+
+            // Push the agg func into the agg op.                
+
+            List<Mutable<ILogicalExpression>> aggArgs = new ArrayList<Mutable<ILogicalExpression>>();
+            aggArgs.add(aggOpExpr.getArguments().get(0));
+            AggregateFunctionCallExpression aggFuncExpr = AsterixBuiltinFunctions.makeAggregateFunctionExpression(
+                    aggFuncIdent, aggArgs);
+            LogicalVariable newVar = context.newVar();
+            aggOp.getVariables().add(newVar);
+            aggOp.getExpressions().add(new MutableObject<ILogicalExpression>(aggFuncExpr));
+
+            // The assign now just "renames" the variable to make sure the upstream plan still works.
+            srcAssignExprRef.setValue(new VariableReferenceExpression(newVar));
+        }
+
+        context.computeAndSetTypeEnvironmentForOperator(aggOp);
+        context.computeAndSetTypeEnvironmentForOperator(assignOp);
+        return true;
+    }
+
+    private boolean fingAggFuncExprRef(List<Mutable<ILogicalExpression>> exprRefs, LogicalVariable aggVar,
+            List<Mutable<ILogicalExpression>> srcAssignExprRefs) {
+        for (Mutable<ILogicalExpression> exprRef : exprRefs) {
+            ILogicalExpression expr = exprRef.getValue();
+
+            if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+                if (((VariableReferenceExpression) expr).getVariableReference().equals(aggVar)) {
+                    return false;
+                }
+            }
+
+            if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+                continue;
+            }
+            AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+            FunctionIdentifier funcIdent = AsterixBuiltinFunctions.getAggregateFunction(funcExpr
+                    .getFunctionIdentifier());
+            if (funcIdent == null) {
+                // Recursively look in func args.
+                if (fingAggFuncExprRef(funcExpr.getArguments(), aggVar, srcAssignExprRefs) == false) {
+                    return false;
+                }
+
+            } else {
+                // Check if this is the expr that uses aggVar.
+                Collection<LogicalVariable> usedVars = new HashSet<LogicalVariable>();
+                funcExpr.getUsedVariables(usedVars);
+                if (usedVars.contains(aggVar)) {
+                    srcAssignExprRefs.add(exprRef);
+                }
+            }
+        }
+        return true;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushAggregateIntoGroupbyRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushAggregateIntoGroupbyRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushAggregateIntoGroupbyRule.java
new file mode 100644
index 0000000..90b2f25
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushAggregateIntoGroupbyRule.java
@@ -0,0 +1,462 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorManipulationUtil;
+import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class PushAggregateIntoGroupbyRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        Map<LogicalVariable, Integer> gbyAggVars = new HashMap<LogicalVariable, Integer>();
+        Map<LogicalVariable, Integer> gbyAggVarToPlanIndex = new HashMap<LogicalVariable, Integer>();
+        Map<LogicalVariable, GroupByOperator> gbyWithAgg = new HashMap<LogicalVariable, GroupByOperator>();
+        Map<ILogicalExpression, ILogicalExpression> aggExprToVarExpr = new HashMap<ILogicalExpression, ILogicalExpression>();
+        // first collect vars. referring to listified sequences
+        boolean changed = collectVarsBottomUp(opRef, context, gbyAggVars, gbyWithAgg, gbyAggVarToPlanIndex,
+                aggExprToVarExpr);
+        if (changed) {
+            removeRedundantListifies(opRef, context, gbyAggVars, gbyWithAgg, gbyAggVarToPlanIndex);
+        }
+        return changed;
+    }
+
+    private void removeRedundantListifies(Mutable<ILogicalOperator> opRef, IOptimizationContext context,
+            Map<LogicalVariable, Integer> gbyAggVars, Map<LogicalVariable, GroupByOperator> gbyWithAgg,
+            Map<LogicalVariable, Integer> gbyAggVarToPlanIndex) throws AlgebricksException {
+        for (LogicalVariable aggVar : gbyAggVars.keySet()) {
+            int occurs = gbyAggVars.get(aggVar);
+            if (occurs == 0) {
+                GroupByOperator gbyOp = gbyWithAgg.get(aggVar);
+                AggregateOperator aggOp = (AggregateOperator) gbyOp.getNestedPlans()
+                        .get(gbyAggVarToPlanIndex.get(aggVar)).getRoots().get(0).getValue();
+                int pos = aggOp.getVariables().indexOf(aggVar);
+                if (pos >= 0) {
+                    aggOp.getVariables().remove(pos);
+                    aggOp.getExpressions().remove(pos);
+                    List<LogicalVariable> producedVarsAtAgg = new ArrayList<LogicalVariable>();
+                    VariableUtilities.getProducedVariablesInDescendantsAndSelf(aggOp, producedVarsAtAgg);
+                    if (producedVarsAtAgg.isEmpty()) {
+                        gbyOp.getNestedPlans().remove(gbyAggVarToPlanIndex.get(aggVar));
+                    }
+                }
+            }
+        }
+    }
+
+    private boolean collectVarsBottomUp(Mutable<ILogicalOperator> opRef, IOptimizationContext context,
+            Map<LogicalVariable, Integer> gbyListifyVarsCount, Map<LogicalVariable, GroupByOperator> gbyWithAgg,
+            Map<LogicalVariable, Integer> gbyAggVarToPlanIndex,
+            Map<ILogicalExpression, ILogicalExpression> aggregateExprToVarExpr) throws AlgebricksException {
+        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
+        context.addToDontApplySet(this, op1);
+        boolean change = false;
+        for (Mutable<ILogicalOperator> child : op1.getInputs()) {
+            if (collectVarsBottomUp(child, context, gbyListifyVarsCount, gbyWithAgg, gbyAggVarToPlanIndex,
+                    aggregateExprToVarExpr)) {
+                change = true;
+            }
+        }
+        // Need to use a list instead of a hash-set, because a var. may appear
+        // several times in the same op.
+        List<LogicalVariable> used = new LinkedList<LogicalVariable>();
+        VariableUtilities.getUsedVariables(op1, used);
+        switch (op1.getOperatorTag()) {
+            case ASSIGN:
+            case SELECT: {
+                boolean found = false;
+                // Do some prefiltering: check if the Assign uses any gby vars.
+                for (LogicalVariable v : used) {
+                    if (gbyListifyVarsCount.get(v) != null) {
+                        found = true;
+                        break;
+                    }
+                }
+                if (found) {
+                    if (op1.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+                        AssignOperator assign = (AssignOperator) op1;
+                        for (Mutable<ILogicalExpression> exprRef : assign.getExpressions()) {
+                            Pair<Boolean, ILogicalExpression> p = extractAggFunctionsFromExpression(exprRef,
+                                    gbyWithAgg, aggregateExprToVarExpr, context);
+                            if (p.first) {
+                                change = true;
+                                exprRef.setValue(p.second);
+                            }
+                        }
+                    }
+                    if (op1.getOperatorTag() == LogicalOperatorTag.SELECT) {
+                        SelectOperator select = (SelectOperator) op1;
+                        Mutable<ILogicalExpression> exprRef = select.getCondition();
+                        Pair<Boolean, ILogicalExpression> p = extractAggFunctionsFromExpression(exprRef, gbyWithAgg,
+                                aggregateExprToVarExpr, context);
+                        if (p.first) {
+                            change = true;
+                            exprRef.setValue(p.second);
+                        }
+                    }
+                    used.clear();
+                    VariableUtilities.getUsedVariables(op1, used);
+                    // increment the count for the ones which are still used
+                    for (LogicalVariable v : used) {
+                        Integer m = gbyListifyVarsCount.get(v);
+                        if (m != null) {
+                            gbyListifyVarsCount.put(v, m + 1);
+                        }
+                    }
+                }
+                break;
+            }
+            case SUBPLAN: {
+                for (LogicalVariable v : used) {
+                    Integer m = gbyListifyVarsCount.get(v);
+                    if (m != null) {
+                        GroupByOperator gbyOp = gbyWithAgg.get(v);
+                        if (pushSubplanAsAggIntoGby(opRef, gbyOp, v, gbyListifyVarsCount, gbyWithAgg,
+                                gbyAggVarToPlanIndex, context)) {
+                            change = true;
+                        } else {
+                            gbyListifyVarsCount.put(v, m + 1);
+                        }
+                        break;
+                    }
+                }
+                break;
+            }
+            case GROUP: {
+                List<LogicalVariable> vars = collectOneVarPerAggFromGroupOp((GroupByOperator) op1);
+                if (vars != null) {
+                    for (int i = 0; i < vars.size(); i++) {
+                        LogicalVariable v = vars.get(i);
+                        if (v != null) {
+                            gbyListifyVarsCount.put(v, 0);
+                            gbyAggVarToPlanIndex.put(v, i);
+                            gbyWithAgg.put(v, (GroupByOperator) op1);
+                        }
+                    }
+                }
+                break;
+            }
+            default: {
+                for (LogicalVariable v : used) {
+                    Integer m = gbyListifyVarsCount.get(v);
+                    if (m != null) {
+                        gbyListifyVarsCount.put(v, m + 1);
+                    }
+                }
+            }
+        }
+        return change;
+    }
+
+    private List<LogicalVariable> collectOneVarPerAggFromGroupOp(GroupByOperator group) {
+        List<ILogicalPlan> nPlans = group.getNestedPlans();
+        if (nPlans == null || nPlans.size() < 1) {
+            return null;
+        }
+
+        List<LogicalVariable> aggVars = new ArrayList<LogicalVariable>();
+        // test that the group-by computes a "listify" aggregate
+        for (int i = 0; i < nPlans.size(); i++) {
+            AbstractLogicalOperator topOp = (AbstractLogicalOperator) nPlans.get(i).getRoots().get(0).getValue();
+            if (topOp.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
+                continue;
+            }
+            AggregateOperator agg = (AggregateOperator) topOp;
+            if (agg.getVariables().size() != 1) {
+                continue;
+            }
+            ILogicalExpression expr = agg.getExpressions().get(0).getValue();
+            if (((AbstractLogicalExpression) expr).getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+                continue;
+            }
+            AbstractFunctionCallExpression fceAgg = (AbstractFunctionCallExpression) expr;
+            if (fceAgg.getFunctionIdentifier() != AsterixBuiltinFunctions.LISTIFY) {
+                continue;
+            }
+            aggVars.add(agg.getVariables().get(0));
+        }
+        return aggVars;
+    }
+
+    /**
+     * @param expr
+     * @param aggVars
+     * @param gbyWithAgg
+     * @param context
+     * @return a pair whose first member is a boolean which is true iff
+     *         something was changed in the expression tree rooted at expr. The
+     *         second member is the result of transforming expr.
+     * @throws AlgebricksException
+     */
+    private Pair<Boolean, ILogicalExpression> extractAggFunctionsFromExpression(Mutable<ILogicalExpression> exprRef,
+            Map<LogicalVariable, GroupByOperator> gbyWithAgg,
+            Map<ILogicalExpression, ILogicalExpression> aggregateExprToVarExpr, IOptimizationContext context)
+            throws AlgebricksException {
+        ILogicalExpression expr = exprRef.getValue();
+        switch (expr.getExpressionTag()) {
+            case FUNCTION_CALL: {
+                AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expr;
+                FunctionIdentifier fi = AsterixBuiltinFunctions.getAggregateFunction(fce.getFunctionIdentifier());
+                if (fi != null) {
+                    ILogicalExpression a1 = fce.getArguments().get(0).getValue();
+                    if (a1.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+                        LogicalVariable argVar = ((VariableReferenceExpression) a1).getVariableReference();
+                        GroupByOperator gbyOp = gbyWithAgg.get(argVar);
+
+                        if (gbyOp != null) {
+                            if (!aggregateExprToVarExpr.containsKey(expr)) {
+                                LogicalVariable newVar = context.newVar();
+                                AggregateFunctionCallExpression aggFun = AsterixBuiltinFunctions
+                                        .makeAggregateFunctionExpression(fi, fce.getArguments());
+                                rewriteGroupByAggregate(argVar, gbyOp, aggFun, newVar, context);
+                                ILogicalExpression newVarExpr = new VariableReferenceExpression(newVar);
+                                aggregateExprToVarExpr.put(expr, newVarExpr);
+                                return new Pair<Boolean, ILogicalExpression>(Boolean.TRUE, newVarExpr);
+                            } else {
+                                ILogicalExpression varExpr = aggregateExprToVarExpr.get(expr);
+                                return new Pair<Boolean, ILogicalExpression>(Boolean.TRUE, varExpr);
+                            }
+                        }
+                    }
+                }
+
+                boolean change = false;
+                for (Mutable<ILogicalExpression> a : fce.getArguments()) {
+                    Pair<Boolean, ILogicalExpression> aggArg = extractAggFunctionsFromExpression(a, gbyWithAgg,
+                            aggregateExprToVarExpr, context);
+                    if (aggArg.first.booleanValue()) {
+                        a.setValue(aggArg.second);
+                        change = true;
+                    }
+                }
+                return new Pair<Boolean, ILogicalExpression>(change, fce);
+            }
+            case VARIABLE:
+            case CONSTANT: {
+                return new Pair<Boolean, ILogicalExpression>(Boolean.FALSE, expr);
+            }
+            default: {
+                throw new IllegalArgumentException();
+            }
+        }
+    }
+
+    private void rewriteGroupByAggregate(LogicalVariable oldAggVar, GroupByOperator gbyOp,
+            AggregateFunctionCallExpression aggFun, LogicalVariable newAggVar, IOptimizationContext context)
+            throws AlgebricksException {
+        for (int j = 0; j < gbyOp.getNestedPlans().size(); j++) {
+            AggregateOperator aggOp = (AggregateOperator) gbyOp.getNestedPlans().get(j).getRoots().get(0).getValue();
+            int n = aggOp.getVariables().size();
+            for (int i = 0; i < n; i++) {
+                LogicalVariable v = aggOp.getVariables().get(i);
+                if (v.equals(oldAggVar)) {
+                    AbstractFunctionCallExpression oldAggExpr = (AbstractFunctionCallExpression) aggOp.getExpressions()
+                            .get(i).getValue();
+                    AggregateFunctionCallExpression newAggFun = AsterixBuiltinFunctions
+                            .makeAggregateFunctionExpression(aggFun.getFunctionIdentifier(),
+                                    new ArrayList<Mutable<ILogicalExpression>>());
+                    for (Mutable<ILogicalExpression> arg : oldAggExpr.getArguments()) {
+                        ILogicalExpression cloned = ((AbstractLogicalExpression) arg.getValue()).cloneExpression();
+                        newAggFun.getArguments().add(new MutableObject<ILogicalExpression>(cloned));
+                    }
+                    aggOp.getVariables().add(newAggVar);
+                    aggOp.getExpressions().add(new MutableObject<ILogicalExpression>(newAggFun));
+                    context.computeAndSetTypeEnvironmentForOperator(aggOp);
+                    break;
+                }
+            }
+        }
+    }
+
+    private boolean pushSubplanAsAggIntoGby(Mutable<ILogicalOperator> subplanOpRef, GroupByOperator gbyOp,
+            LogicalVariable varFromGroupAgg, Map<LogicalVariable, Integer> gbyAggVars,
+            Map<LogicalVariable, GroupByOperator> gbyWithAgg, Map<LogicalVariable, Integer> gbyAggVarToPlanIndex,
+            IOptimizationContext context) throws AlgebricksException {
+        SubplanOperator subplan = (SubplanOperator) subplanOpRef.getValue();
+        // only free var can be varFromGroupAgg
+        HashSet<LogicalVariable> freeVars = new HashSet<LogicalVariable>();
+        OperatorPropertiesUtil.getFreeVariablesInSubplans(subplan, freeVars);
+        for (LogicalVariable vFree : freeVars) {
+            if (!vFree.equals(varFromGroupAgg)) {
+                return false;
+            }
+        }
+
+        List<ILogicalPlan> plans = subplan.getNestedPlans();
+        if (plans.size() > 1) {
+            return false;
+        }
+        ILogicalPlan p = plans.get(0);
+        if (p.getRoots().size() > 1) {
+            return false;
+        }
+        Mutable<ILogicalOperator> opRef = p.getRoots().get(0);
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
+            return false;
+        }
+        AggregateOperator aggInSubplanOp = (AggregateOperator) op;
+        LogicalVariable unnestVar = null;
+        boolean pushableNestedSubplan = false;
+        while (op.getInputs().size() == 1) {
+            opRef = op.getInputs().get(0);
+            op = (AbstractLogicalOperator) opRef.getValue();
+            switch (op.getOperatorTag()) {
+                case ASSIGN: {
+                    break;
+                }
+                case UNNEST: {
+                    UnnestOperator unnest = (UnnestOperator) op;
+                    if (unnest.getPositionalVariable() != null) {
+                        // TODO currently subplan with both accumulating and running aggregate is not supported.
+                        return false;
+                    }
+                    ILogicalExpression expr = unnest.getExpressionRef().getValue();
+                    if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+                        return false;
+                    }
+                    AbstractFunctionCallExpression fun = (AbstractFunctionCallExpression) expr;
+                    if (fun.getFunctionIdentifier() != AsterixBuiltinFunctions.SCAN_COLLECTION) {
+                        return false;
+                    }
+                    ILogicalExpression arg0 = fun.getArguments().get(0).getValue();
+                    if (arg0.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
+                        return false;
+                    }
+                    VariableReferenceExpression varExpr = (VariableReferenceExpression) arg0;
+                    if (!varExpr.getVariableReference().equals(varFromGroupAgg)) {
+                        return false;
+                    }
+                    opRef = op.getInputs().get(0);
+                    op = (AbstractLogicalOperator) opRef.getValue();
+                    if (op.getOperatorTag() != LogicalOperatorTag.NESTEDTUPLESOURCE) {
+                        return false;
+                    }
+                    pushableNestedSubplan = true;
+                    unnestVar = unnest.getVariable();
+                    break;
+                }
+                default: {
+                    return false;
+                }
+            }
+        }
+        if (pushableNestedSubplan) {
+            for (int i = 0; i < gbyOp.getNestedPlans().size(); i++) {
+                Mutable<ILogicalOperator> gbyAggRef = gbyOp.getNestedPlans().get(i).getRoots().get(0);
+                AggregateOperator gbyAgg = (AggregateOperator) gbyAggRef.getValue();
+                Mutable<ILogicalOperator> gbyAggChildRef = gbyAgg.getInputs().get(0);
+                OperatorManipulationUtil.substituteVarRec(aggInSubplanOp, unnestVar,
+                        findListifiedVariable(gbyAgg, varFromGroupAgg), true, context);
+                gbyAgg.getVariables().addAll(aggInSubplanOp.getVariables());
+                gbyAgg.getExpressions().addAll(aggInSubplanOp.getExpressions());
+                for (LogicalVariable v : aggInSubplanOp.getVariables()) {
+                    gbyWithAgg.put(v, gbyOp);
+                    gbyAggVars.put(v, 0);
+                    gbyAggVarToPlanIndex.put(v, i);
+                }
+
+                Mutable<ILogicalOperator> opRef1InSubplan = aggInSubplanOp.getInputs().get(0);
+                if (opRef1InSubplan.getValue().getInputs().size() > 0) {
+                    Mutable<ILogicalOperator> opRef2InSubplan = opRef1InSubplan.getValue().getInputs().get(0);
+                    AbstractLogicalOperator op2InSubplan = (AbstractLogicalOperator) opRef2InSubplan.getValue();
+                    if (op2InSubplan.getOperatorTag() != LogicalOperatorTag.NESTEDTUPLESOURCE) {
+                        List<Mutable<ILogicalOperator>> gbyInpList = gbyAgg.getInputs();
+                        gbyInpList.clear();
+                        gbyInpList.add(opRef1InSubplan);
+                        while (true) {
+                            opRef2InSubplan = opRef1InSubplan.getValue().getInputs().get(0);
+                            op2InSubplan = (AbstractLogicalOperator) opRef2InSubplan.getValue();
+                            if (op2InSubplan.getOperatorTag() == LogicalOperatorTag.UNNEST) {
+                                List<Mutable<ILogicalOperator>> opInpList = opRef1InSubplan.getValue().getInputs();
+                                opInpList.clear();
+                                opInpList.add(gbyAggChildRef);
+                                break;
+                            }
+                            opRef1InSubplan = opRef2InSubplan;
+                            if (opRef1InSubplan.getValue().getInputs().size() == 0) {
+                                throw new IllegalStateException("PushAggregateIntoGroupbyRule: could not find UNNEST.");
+                            }
+                        }
+                    }
+                }
+                subplanOpRef.setValue(subplan.getInputs().get(0).getValue());
+                OperatorPropertiesUtil.typeOpRec(gbyAggRef, context);
+            }
+            return true;
+        } else {
+            return false;
+        }
+    }
+
+    private LogicalVariable findListifiedVariable(AggregateOperator gbyAgg, LogicalVariable varFromGroupAgg) {
+        int n = gbyAgg.getVariables().size();
+
+        for (int i = 0; i < n; i++) {
+            if (gbyAgg.getVariables().get(i).equals(varFromGroupAgg)) {
+                AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) gbyAgg.getExpressions().get(i)
+                        .getValue();
+                if (fce.getFunctionIdentifier().equals(AsterixBuiltinFunctions.LISTIFY)) {
+                    ILogicalExpression argExpr = fce.getArguments().get(0).getValue();
+                    if (((AbstractLogicalExpression) argExpr).getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+                        return ((VariableReferenceExpression) argExpr).getVariableReference();
+                    }
+                }
+            }
+        }
+        return null;
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushFieldAccessRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushFieldAccessRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushFieldAccessRule.java
new file mode 100644
index 0000000..3a19854
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushFieldAccessRule.java
@@ -0,0 +1,391 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.algebra.base.AsterixOperatorAnnotations;
+import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.exceptions.AsterixRuntimeException;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.base.IAObject;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.optimizer.base.AnalysisUtil;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSource;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class PushFieldAccessRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (context.checkIfInDontApplySet(this, op)) {
+            return false;
+        }
+        if (op.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
+            return false;
+        }
+        AssignOperator access = (AssignOperator) op;
+        ILogicalExpression expr = getFirstExpr(access);
+        String finalAnnot = null;
+        if (AnalysisUtil.isAccessToFieldRecord(expr)) {
+            finalAnnot = AsterixOperatorAnnotations.PUSHED_FIELD_ACCESS;
+        } else if (AnalysisUtil.isRunnableAccessToFieldRecord(expr)) {
+            finalAnnot = AsterixOperatorAnnotations.PUSHED_RUNNABLE_FIELD_ACCESS;
+        } else {
+            return false;
+        }
+        return propagateFieldAccessRec(opRef, context, finalAnnot);
+    }
+
+    @SuppressWarnings("unchecked")
+    private boolean isAccessToIndexedField(AssignOperator assign, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractFunctionCallExpression accessFun = (AbstractFunctionCallExpression) assign.getExpressions().get(0)
+                .getValue();
+        ILogicalExpression e0 = accessFun.getArguments().get(0).getValue();
+        if (e0.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
+            return false;
+        }
+        LogicalVariable var = ((VariableReferenceExpression) e0).getVariableReference();
+        if (context.findPrimaryKey(var) == null) {
+            // not referring to a dataset record
+            return false;
+        }
+        AbstractLogicalOperator op = assign;
+        while (op.getInputs().size() == 1 && op.getOperatorTag() != LogicalOperatorTag.DATASOURCESCAN) {
+            op = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
+        }
+        if (op.getOperatorTag() != LogicalOperatorTag.DATASOURCESCAN) {
+            return false;
+        }
+        DataSourceScanOperator scan = (DataSourceScanOperator) op;
+        LogicalVariable recVar = scan.getVariables().get(scan.getVariables().size() - 1);
+        if (recVar != var) {
+            return false;
+        }
+        AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
+        AqlSourceId asid = ((IDataSource<AqlSourceId>) scan.getDataSource()).getId();
+
+        Dataset dataset = mp.findDataset(asid.getDataverseName(), asid.getDatasourceName());
+        if (dataset == null) {
+            throw new AlgebricksException("Dataset " + asid.getDatasourceName() + " not found.");
+        }
+        if (dataset.getDatasetType() != DatasetType.INTERNAL) {
+            return false;
+        }
+        ILogicalExpression e1 = accessFun.getArguments().get(1).getValue();
+        if (e1.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+            return false;
+        }
+        ConstantExpression ce = (ConstantExpression) e1;
+        IAObject obj = ((AsterixConstantValue) ce.getValue()).getObject();
+        String fldName;
+        if (obj.getType().getTypeTag() == ATypeTag.STRING) {
+            fldName = ((AString) obj).getStringValue();
+        } else {
+            int pos = ((AInt32) obj).getIntegerValue();
+            String tName = dataset.getItemTypeName();
+            IAType t = mp.findType(dataset.getDataverseName(), tName);
+            if (t.getTypeTag() != ATypeTag.RECORD) {
+                return false;
+            }
+            ARecordType rt = (ARecordType) t;
+            if (pos >= rt.getFieldNames().length) {
+                return false;
+            }
+            fldName = rt.getFieldNames()[pos];
+        }
+
+        List<Index> datasetIndexes = mp.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName());
+        boolean hasSecondaryIndex = false;
+        for (Index index : datasetIndexes) {
+            if (index.isSecondaryIndex()) {
+                hasSecondaryIndex = true;
+                break;
+            }
+        }
+
+        return hasSecondaryIndex;
+    }
+
+    private boolean tryingToPushThroughSelectionWithSameDataSource(AssignOperator access, AbstractLogicalOperator op2) {
+        if (op2.getOperatorTag() != LogicalOperatorTag.SELECT) {
+            return false;
+        }
+        ILogicalExpression e1 = (ILogicalExpression) access.getAnnotations().get(
+                AsterixOperatorAnnotations.FIELD_ACCESS);
+        if (e1 == null) {
+            return false;
+        }
+        ILogicalExpression e2 = (ILogicalExpression) op2.getAnnotations().get(AsterixOperatorAnnotations.FIELD_ACCESS);
+        if (e2 == null) {
+            return false;
+        }
+        return e1.equals(e2);
+    }
+
+    @SuppressWarnings("unchecked")
+    private boolean propagateFieldAccessRec(Mutable<ILogicalOperator> opRef, IOptimizationContext context,
+            String finalAnnot) throws AlgebricksException {
+        AssignOperator access = (AssignOperator) opRef.getValue();
+        Mutable<ILogicalOperator> opRef2 = access.getInputs().get(0);
+        AbstractLogicalOperator op2 = (AbstractLogicalOperator) opRef2.getValue();
+        // If it's not an indexed field, it is pushed so that scan can be
+        // rewritten into index search.
+        if (op2.getOperatorTag() == LogicalOperatorTag.PROJECT || context.checkAndAddToAlreadyCompared(access, op2)
+                && !(op2.getOperatorTag() == LogicalOperatorTag.SELECT && isAccessToIndexedField(access, context))) {
+            return false;
+        }
+        if (tryingToPushThroughSelectionWithSameDataSource(access, op2)) {
+            return false;
+        }
+        if (testAndModifyRedundantOp(access, op2)) {
+            propagateFieldAccessRec(opRef2, context, finalAnnot);
+            return true;
+        }
+        List<LogicalVariable> usedInAccess = new LinkedList<LogicalVariable>();
+        VariableUtilities.getUsedVariables(access, usedInAccess);
+        List<LogicalVariable> produced2 = new LinkedList<LogicalVariable>();
+        if (op2.getOperatorTag() == LogicalOperatorTag.GROUP) {
+            VariableUtilities.getLiveVariables(op2, produced2);
+        } else {
+            VariableUtilities.getProducedVariables(op2, produced2);
+        }
+        boolean pushItDown = false;
+        List<LogicalVariable> inter = new ArrayList<LogicalVariable>(usedInAccess);
+        if (inter.isEmpty()) { // ground value
+            return false;
+        }
+        inter.retainAll(produced2);
+        if (inter.isEmpty()) {
+            pushItDown = true;
+        } else if (op2.getOperatorTag() == LogicalOperatorTag.GROUP) {
+            GroupByOperator g = (GroupByOperator) op2;
+            List<Pair<LogicalVariable, LogicalVariable>> varMappings = new ArrayList<Pair<LogicalVariable, LogicalVariable>>();
+            for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : g.getDecorList()) {
+                ILogicalExpression e = p.second.getValue();
+                if (e.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+                    LogicalVariable decorVar = GroupByOperator.getDecorVariable(p);
+                    if (inter.contains(decorVar)) {
+                        inter.remove(decorVar);
+                        LogicalVariable v1 = ((VariableReferenceExpression) e).getVariableReference();
+                        varMappings.add(new Pair<LogicalVariable, LogicalVariable>(decorVar, v1));
+                    }
+                }
+            }
+            if (inter.isEmpty()) {
+                boolean changed = false;
+                for (Pair<LogicalVariable, LogicalVariable> m : varMappings) {
+                    LogicalVariable v2 = context.newVar();
+                    LogicalVariable oldVar = access.getVariables().get(0);
+                    g.getDecorList().add(
+                            new Pair<LogicalVariable, Mutable<ILogicalExpression>>(oldVar,
+                                    new MutableObject<ILogicalExpression>(new VariableReferenceExpression(v2))));
+                    changed = true;
+                    access.getVariables().set(0, v2);
+                    VariableUtilities.substituteVariables(access, m.first, m.second, context);
+                }
+                if (changed) {
+                    context.computeAndSetTypeEnvironmentForOperator(g);
+                }
+                usedInAccess.clear();
+                VariableUtilities.getUsedVariables(access, usedInAccess);
+                pushItDown = true;
+            }
+        }
+        if (pushItDown) {
+            if (op2.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE) {
+                Mutable<ILogicalOperator> childOfSubplan = ((NestedTupleSourceOperator) op2).getDataSourceReference()
+                        .getValue().getInputs().get(0);
+                pushAccessDown(opRef, op2, childOfSubplan, context, finalAnnot);
+                return true;
+            }
+            if (op2.getInputs().size() == 1 && !op2.hasNestedPlans()) {
+                pushAccessDown(opRef, op2, op2.getInputs().get(0), context, finalAnnot);
+                return true;
+            } else {
+                for (Mutable<ILogicalOperator> inp : op2.getInputs()) {
+                    HashSet<LogicalVariable> v2 = new HashSet<LogicalVariable>();
+                    VariableUtilities.getLiveVariables(inp.getValue(), v2);
+                    if (v2.containsAll(usedInAccess)) {
+                        pushAccessDown(opRef, op2, inp, context, finalAnnot);
+                        return true;
+                    }
+                }
+            }
+            if (op2.hasNestedPlans()) {
+                AbstractOperatorWithNestedPlans nestedOp = (AbstractOperatorWithNestedPlans) op2;
+                for (ILogicalPlan plan : nestedOp.getNestedPlans()) {
+                    for (Mutable<ILogicalOperator> root : plan.getRoots()) {
+                        HashSet<LogicalVariable> v2 = new HashSet<LogicalVariable>();
+                        VariableUtilities.getLiveVariables(root.getValue(), v2);
+                        if (v2.containsAll(usedInAccess)) {
+                            pushAccessDown(opRef, op2, root, context, finalAnnot);
+                            return true;
+                        }
+                    }
+                }
+            }
+            throw new AsterixRuntimeException("Field access " + access.getExpressions().get(0).getValue()
+                    + " does not correspond to any input of operator " + op2);
+        } else {
+            // Check if the accessed field is not one of the partitioning key
+            // fields. If yes, we can equate the two variables.
+            if (op2.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
+                DataSourceScanOperator scan = (DataSourceScanOperator) op2;
+                int n = scan.getVariables().size();
+                LogicalVariable scanRecordVar = scan.getVariables().get(n - 1);
+                AbstractFunctionCallExpression accessFun = (AbstractFunctionCallExpression) access.getExpressions()
+                        .get(0).getValue();
+                ILogicalExpression e0 = accessFun.getArguments().get(0).getValue();
+                LogicalExpressionTag tag = e0.getExpressionTag();
+                if (tag == LogicalExpressionTag.VARIABLE) {
+                    VariableReferenceExpression varRef = (VariableReferenceExpression) e0;
+                    if (varRef.getVariableReference() == scanRecordVar) {
+                        ILogicalExpression e1 = accessFun.getArguments().get(1).getValue();
+                        if (e1.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
+                            IDataSource<AqlSourceId> dataSource = (IDataSource<AqlSourceId>) scan.getDataSource();
+                            AqlDataSourceType dsType = ((AqlDataSource) dataSource).getDatasourceType();
+                            if (dsType == AqlDataSourceType.FEED || dsType == AqlDataSourceType.LOADABLE) {
+                                return false;
+                            }
+                            AqlSourceId asid = dataSource.getId();
+                            AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
+                            Dataset dataset = mp.findDataset(asid.getDataverseName(), asid.getDatasourceName());
+                            if (dataset == null) {
+                                throw new AlgebricksException("Dataset " + asid.getDatasourceName() + " not found.");
+                            }
+                            if (dataset.getDatasetType() != DatasetType.INTERNAL) {
+                                setAsFinal(access, context, finalAnnot);
+                                return false;
+                            }
+                            ConstantExpression ce = (ConstantExpression) e1;
+                            IAObject obj = ((AsterixConstantValue) ce.getValue()).getObject();
+                            String fldName;
+                            if (obj.getType().getTypeTag() == ATypeTag.STRING) {
+                                fldName = ((AString) obj).getStringValue();
+                            } else {
+                                int pos = ((AInt32) obj).getIntegerValue();
+                                String tName = dataset.getItemTypeName();
+                                IAType t = mp.findType(dataset.getDataverseName(), tName);
+                                if (t.getTypeTag() != ATypeTag.RECORD) {
+                                    return false;
+                                }
+                                ARecordType rt = (ARecordType) t;
+                                if (pos >= rt.getFieldNames().length) {
+                                    setAsFinal(access, context, finalAnnot);
+                                    return false;
+                                }
+                                fldName = rt.getFieldNames()[pos];
+                            }
+                            int p = DatasetUtils.getPositionOfPartitioningKeyField(dataset, fldName);
+                            if (p < 0) { // not one of the partitioning fields
+                                setAsFinal(access, context, finalAnnot);
+                                return false;
+                            }
+                            LogicalVariable keyVar = scan.getVariables().get(p);
+                            access.getExpressions().get(0).setValue(new VariableReferenceExpression(keyVar));
+                            return true;
+                        }
+                    }
+                }
+            }
+            setAsFinal(access, context, finalAnnot);
+            return false;
+        }
+    }
+
+    private void setAsFinal(ILogicalOperator access, IOptimizationContext context, String finalAnnot) {
+        access.getAnnotations().put(finalAnnot, true);
+        context.addToDontApplySet(this, access);
+    }
+
+    private boolean testAndModifyRedundantOp(AssignOperator access, AbstractLogicalOperator op2) {
+        if (op2.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
+            return false;
+        }
+        AssignOperator a2 = (AssignOperator) op2;
+        if (getFirstExpr(access).equals(getFirstExpr(a2))) {
+            access.getExpressions().get(0).setValue(new VariableReferenceExpression(a2.getVariables().get(0)));
+            return true;
+        } else {
+            return false;
+        }
+    }
+
+    // indirect recursivity with propagateFieldAccessRec
+    private void pushAccessDown(Mutable<ILogicalOperator> fldAccessOpRef, ILogicalOperator op2,
+            Mutable<ILogicalOperator> inputOfOp2, IOptimizationContext context, String finalAnnot)
+            throws AlgebricksException {
+        ILogicalOperator fieldAccessOp = fldAccessOpRef.getValue();
+        fldAccessOpRef.setValue(op2);
+        List<Mutable<ILogicalOperator>> faInpList = fieldAccessOp.getInputs();
+        faInpList.clear();
+        faInpList.add(new MutableObject<ILogicalOperator>(inputOfOp2.getValue()));
+        inputOfOp2.setValue(fieldAccessOp);
+        // typing
+        context.computeAndSetTypeEnvironmentForOperator(fieldAccessOp);
+        context.computeAndSetTypeEnvironmentForOperator(op2);
+        propagateFieldAccessRec(inputOfOp2, context, finalAnnot);
+    }
+
+    private ILogicalExpression getFirstExpr(AssignOperator assign) {
+        return assign.getExpressions().get(0).getValue();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushGroupByThroughProduct.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushGroupByThroughProduct.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushGroupByThroughProduct.java
new file mode 100644
index 0000000..647c3ee
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushGroupByThroughProduct.java
@@ -0,0 +1,172 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorManipulationUtil;
+import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class PushGroupByThroughProduct implements IAlgebraicRewriteRule {
+
+    private enum PushTestResult {
+        FALSE,
+        TRUE,
+        REPEATED_DECORS
+    }
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
+        if (op1.getOperatorTag() != LogicalOperatorTag.GROUP) {
+            return false;
+        }
+        Mutable<ILogicalOperator> opRef2 = op1.getInputs().get(0);
+        AbstractLogicalOperator op2 = (AbstractLogicalOperator) opRef2.getValue();
+        if (op2.getOperatorTag() != LogicalOperatorTag.INNERJOIN) {
+            return false;
+        }
+        InnerJoinOperator join = (InnerJoinOperator) op2;
+        if (!OperatorPropertiesUtil.isAlwaysTrueCond(join.getCondition().getValue())) {
+            // not a product
+            return false;
+        }
+        GroupByOperator gby = (GroupByOperator) op1;
+
+        List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorToPush = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
+        List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorNotToPush = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
+
+        Mutable<ILogicalOperator> opLeftRef = join.getInputs().get(0);
+        ILogicalOperator opLeft = opLeftRef.getValue();
+        switch (canPushThrough(gby, opLeft, decorToPush, decorNotToPush)) {
+            case REPEATED_DECORS: {
+                return false;
+            }
+            case TRUE: {
+                push(opRef, opRef2, 0, decorToPush, decorNotToPush, context);
+                return true;
+            }
+            case FALSE: {
+                decorToPush.clear();
+                Mutable<ILogicalOperator> opRightRef = join.getInputs().get(1);
+                ILogicalOperator opRight = opRightRef.getValue();
+                if (canPushThrough(gby, opRight, decorToPush, decorNotToPush) == PushTestResult.TRUE) {
+                    push(opRef, opRef2, 1, decorToPush, decorNotToPush, context);
+                    return true;
+                } else {
+                    return false;
+                }
+            }
+            default: {
+                throw new IllegalStateException();
+            }
+        }
+    }
+
+    private void push(Mutable<ILogicalOperator> opRefGby, Mutable<ILogicalOperator> opRefJoin, int branch,
+            List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorToPush,
+            List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorNotToPush, IOptimizationContext context)
+            throws AlgebricksException {
+        GroupByOperator gby = (GroupByOperator) opRefGby.getValue();
+        AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) opRefJoin.getValue();
+        gby.getDecorList().clear();
+        gby.getDecorList().addAll(decorToPush);
+        for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : decorNotToPush) {
+            LogicalVariable v1 = p.first;
+            VariableReferenceExpression varRef = (VariableReferenceExpression) p.second.getValue();
+            LogicalVariable v2 = varRef.getVariableReference();
+            OperatorManipulationUtil.substituteVarRec(join, v2, v1, true, context);
+        }
+        Mutable<ILogicalOperator> branchRef = join.getInputs().get(branch);
+        ILogicalOperator opBranch = branchRef.getValue();
+        opRefJoin.setValue(opBranch);
+        branchRef.setValue(gby);
+        opRefGby.setValue(join);
+    }
+
+    private PushTestResult canPushThrough(GroupByOperator gby, ILogicalOperator branch,
+            List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> toPush,
+            List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> notToPush) throws AlgebricksException {
+        Collection<LogicalVariable> fromBranch = new HashSet<LogicalVariable>();
+        VariableUtilities.getLiveVariables(branch, fromBranch);
+        Collection<LogicalVariable> usedInGbyExprList = new ArrayList<LogicalVariable>();
+        for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gby.getGroupByList()) {
+            p.second.getValue().getUsedVariables(usedInGbyExprList);
+        }
+
+        if (!fromBranch.containsAll(usedInGbyExprList)) {
+            return PushTestResult.FALSE;
+        }
+        Set<LogicalVariable> free = new HashSet<LogicalVariable>();
+        for (ILogicalPlan p : gby.getNestedPlans()) {
+            for (Mutable<ILogicalOperator> r : p.getRoots()) {
+                OperatorPropertiesUtil.getFreeVariablesInSelfOrDesc((AbstractLogicalOperator) r.getValue(), free);
+            }
+        }
+        if (!fromBranch.containsAll(free)) {
+            return PushTestResult.FALSE;
+        }
+
+        Set<LogicalVariable> decorVarRhs = new HashSet<LogicalVariable>();
+        decorVarRhs.clear();
+        for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gby.getDecorList()) {
+            ILogicalExpression expr = p.second.getValue();
+            if (expr.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
+                return PushTestResult.FALSE;
+            }
+            VariableReferenceExpression varRef = (VariableReferenceExpression) expr;
+            LogicalVariable v = varRef.getVariableReference();
+            if (decorVarRhs.contains(v)) {
+                return PushTestResult.REPEATED_DECORS;
+            }
+            decorVarRhs.add(v);
+
+            if (fromBranch.contains(v)) {
+                toPush.add(p);
+            } else {
+                notToPush.add(p);
+            }
+        }
+        return PushTestResult.TRUE;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushProperJoinThroughProduct.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushProperJoinThroughProduct.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushProperJoinThroughProduct.java
new file mode 100644
index 0000000..7d9d261
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushProperJoinThroughProduct.java
@@ -0,0 +1,107 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class PushProperJoinThroughProduct implements IAlgebraicRewriteRule {
+
+    private List<LogicalVariable> usedInCond1AndMaps = new ArrayList<LogicalVariable>();
+    private List<LogicalVariable> productLeftBranchVars = new ArrayList<LogicalVariable>();
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        LogicalOperatorTag tag1 = op.getOperatorTag();
+        if (tag1 != LogicalOperatorTag.INNERJOIN && tag1 != LogicalOperatorTag.LEFTOUTERJOIN) {
+            return false;
+        }
+        AbstractBinaryJoinOperator join1 = (AbstractBinaryJoinOperator) op;
+        ILogicalExpression cond1 = join1.getCondition().getValue();
+        // don't try to push a product down
+        if (OperatorPropertiesUtil.isAlwaysTrueCond(cond1)) {
+            return false;
+        }
+
+        Mutable<ILogicalOperator> opRef2 = op.getInputs().get(0);
+
+        AbstractLogicalOperator op2 = (AbstractLogicalOperator) opRef2.getValue();
+        while (op2.isMap()) {
+            opRef2 = op2.getInputs().get(0);
+            op2 = (AbstractLogicalOperator) opRef2.getValue();
+        }
+
+        if (op2.getOperatorTag() != LogicalOperatorTag.INNERJOIN) {
+            return false;
+        }
+
+        InnerJoinOperator product = (InnerJoinOperator) op2;
+        if (!OperatorPropertiesUtil.isAlwaysTrueCond(product.getCondition().getValue())) {
+            return false;
+        }
+
+        usedInCond1AndMaps.clear();
+        cond1.getUsedVariables(usedInCond1AndMaps);
+        Mutable<ILogicalOperator> opIterRef = op.getInputs().get(0);
+        ILogicalOperator opIter = opIterRef.getValue();
+        do {
+            VariableUtilities.getUsedVariables(opIter, usedInCond1AndMaps);
+            opIterRef = opIter.getInputs().get(0);
+            opIter = opIterRef.getValue();
+        } while (opIter.isMap());
+
+        productLeftBranchVars.clear();
+        ILogicalOperator opLeft = op2.getInputs().get(0).getValue();
+        VariableUtilities.getLiveVariables(opLeft, productLeftBranchVars);
+
+        if (!OperatorPropertiesUtil.disjoint(usedInCond1AndMaps, productLeftBranchVars)) {
+            return false;
+        }
+
+        // now push the operators from in between joins, too
+        opIterRef = op.getInputs().get(0);
+        opIter = opIterRef.getValue();
+
+        Mutable<ILogicalOperator> op3Ref = product.getInputs().get(1);
+        ILogicalOperator op3 = op3Ref.getValue();
+
+        opRef2.setValue(op3);
+        op3Ref.setValue(join1);
+        opRef.setValue(product);
+        return true;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushSimilarityFunctionsBelowJoin.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushSimilarityFunctionsBelowJoin.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushSimilarityFunctionsBelowJoin.java
new file mode 100644
index 0000000..d9e06f4
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushSimilarityFunctionsBelowJoin.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushFunctionsBelowJoin;
+
+/**
+ * Pushes similarity function-call expressions below a join if possible.
+ * Assigns the similarity function-call expressions to new variables, and replaces the original
+ * expression with a corresponding variable reference expression.
+ * This rule can help reduce the cost of computing expensive similarity functions by pushing them below
+ * a join (which may blow up the cardinality).
+ * Also, this rule may help to enable other rules such as common subexpression elimination, again to reduce
+ * the number of calls to expensive similarity functions.
+ * Example:
+ * Before plan:
+ * assign [$$10] <- [funcA(funcB(simFuncX($$3, $$4)))]
+ * join (some condition)
+ * join_branch_0 where $$3 and $$4 are not live
+ * ...
+ * join_branch_1 where $$3 and $$4 are live
+ * ...
+ * After plan:
+ * assign [$$10] <- [funcA(funcB($$11))]
+ * join (some condition)
+ * join_branch_0 where $$3 and $$4 are not live
+ * ...
+ * join_branch_1 where $$3 and $$4 are live
+ * assign[$$11] <- [simFuncX($$3, $$4)]
+ * ...
+ */
+public class PushSimilarityFunctionsBelowJoin extends PushFunctionsBelowJoin {
+
+    private static final Set<FunctionIdentifier> simFuncIdents = new HashSet<FunctionIdentifier>();
+    static {
+        simFuncIdents.add(AsterixBuiltinFunctions.SIMILARITY_JACCARD);
+        simFuncIdents.add(AsterixBuiltinFunctions.SIMILARITY_JACCARD_CHECK);
+        simFuncIdents.add(AsterixBuiltinFunctions.EDIT_DISTANCE);
+        simFuncIdents.add(AsterixBuiltinFunctions.EDIT_DISTANCE_CHECK);
+    }
+
+    public PushSimilarityFunctionsBelowJoin() {
+        super(simFuncIdents);
+    }
+}


[24/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
new file mode 100644
index 0000000..aa7a6ac
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
@@ -0,0 +1,242 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules.am;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.optimizer.base.AnalysisUtil;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractUnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExternalDataLookupOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
+
+/**
+ * Operator subtree that matches the following patterns, and provides convenient access to its nodes:
+ * (select)? <-- (assign | unnest)* <-- (datasource scan | unnest-map)
+ */
+public class OptimizableOperatorSubTree {
+
+    public static enum DataSourceType {
+        DATASOURCE_SCAN,
+        EXTERNAL_SCAN,
+        PRIMARY_INDEX_LOOKUP,
+        COLLECTION_SCAN,
+        NO_DATASOURCE
+    }
+
+    public ILogicalOperator root = null;
+    public Mutable<ILogicalOperator> rootRef = null;
+    public final List<Mutable<ILogicalOperator>> assignsAndUnnestsRefs = new ArrayList<Mutable<ILogicalOperator>>();
+    public final List<AbstractLogicalOperator> assignsAndUnnests = new ArrayList<AbstractLogicalOperator>();
+    public Mutable<ILogicalOperator> dataSourceRef = null;
+    public DataSourceType dataSourceType = DataSourceType.NO_DATASOURCE;
+    // Dataset and type metadata. Set in setDatasetAndTypeMetadata().
+    public Dataset dataset = null;
+    public ARecordType recordType = null;
+
+    public boolean initFromSubTree(Mutable<ILogicalOperator> subTreeOpRef) {
+        reset();
+        rootRef = subTreeOpRef;
+        root = subTreeOpRef.getValue();
+        // Examine the op's children to match the expected patterns.
+        AbstractLogicalOperator subTreeOp = (AbstractLogicalOperator) subTreeOpRef.getValue();
+        do {
+            // Skip select operator.
+            if (subTreeOp.getOperatorTag() == LogicalOperatorTag.SELECT) {
+                subTreeOpRef = subTreeOp.getInputs().get(0);
+                subTreeOp = (AbstractLogicalOperator) subTreeOpRef.getValue();
+            }
+            // Check primary-index pattern.
+            if (subTreeOp.getOperatorTag() != LogicalOperatorTag.ASSIGN
+                    && subTreeOp.getOperatorTag() != LogicalOperatorTag.UNNEST) {
+                // Pattern may still match if we are looking for primary index matches as well.
+                return initializeDataSource(subTreeOpRef);
+            }
+            // Match (assign | unnest)+.
+            while (subTreeOp.getOperatorTag() == LogicalOperatorTag.ASSIGN
+                    || subTreeOp.getOperatorTag() == LogicalOperatorTag.UNNEST) {
+                assignsAndUnnestsRefs.add(subTreeOpRef);
+                assignsAndUnnests.add(subTreeOp);
+
+                subTreeOpRef = subTreeOp.getInputs().get(0);
+                subTreeOp = (AbstractLogicalOperator) subTreeOpRef.getValue();
+            };
+        } while (subTreeOp.getOperatorTag() == LogicalOperatorTag.SELECT);
+
+        // Match data source (datasource scan or primary index search).
+        return initializeDataSource(subTreeOpRef);
+    }
+
+    private boolean initializeDataSource(Mutable<ILogicalOperator> subTreeOpRef) {
+        AbstractLogicalOperator subTreeOp = (AbstractLogicalOperator) subTreeOpRef.getValue();
+        if (subTreeOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
+            dataSourceType = DataSourceType.DATASOURCE_SCAN;
+            dataSourceRef = subTreeOpRef;
+            return true;
+        } else if (subTreeOp.getOperatorTag() == LogicalOperatorTag.EXTERNAL_LOOKUP) {
+            dataSourceType = DataSourceType.EXTERNAL_SCAN;
+            dataSourceRef = subTreeOpRef;
+            return true;
+        } else if (subTreeOp.getOperatorTag() == LogicalOperatorTag.EMPTYTUPLESOURCE) {
+            dataSourceType = DataSourceType.COLLECTION_SCAN;
+            dataSourceRef = subTreeOpRef;
+            return true;
+        } else if (subTreeOp.getOperatorTag() == LogicalOperatorTag.UNNEST_MAP) {
+            UnnestMapOperator unnestMapOp = (UnnestMapOperator) subTreeOp;
+            ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
+            if (unnestExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
+                if (f.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INDEX_SEARCH)) {
+                    AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
+                    jobGenParams.readFromFuncArgs(f.getArguments());
+                    if (jobGenParams.isPrimaryIndex()) {
+                        dataSourceType = DataSourceType.PRIMARY_INDEX_LOOKUP;
+                        dataSourceRef = subTreeOpRef;
+                        return true;
+                    }
+                }
+            }
+        }
+
+        return false;
+    }
+
+    /**
+     * Find the dataset corresponding to the datasource scan in the metadata.
+     * Also sets recordType to be the type of that dataset.
+     */
+    public boolean setDatasetAndTypeMetadata(AqlMetadataProvider metadataProvider) throws AlgebricksException {
+        String dataverseName = null;
+        String datasetName = null;
+        switch (dataSourceType) {
+            case DATASOURCE_SCAN:
+                DataSourceScanOperator dataSourceScan = (DataSourceScanOperator) dataSourceRef.getValue();
+                Pair<String, String> datasetInfo = AnalysisUtil.getDatasetInfo(dataSourceScan);
+                dataverseName = datasetInfo.first;
+                datasetName = datasetInfo.second;
+                break;
+            case PRIMARY_INDEX_LOOKUP:
+                AbstractUnnestOperator unnestMapOp = (AbstractUnnestOperator) dataSourceRef.getValue();
+                ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
+                AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
+                AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
+                jobGenParams.readFromFuncArgs(f.getArguments());
+                datasetName = jobGenParams.getDatasetName();
+                dataverseName = jobGenParams.getDataverseName();
+                break;
+            case EXTERNAL_SCAN:
+                ExternalDataLookupOperator externalScan = (ExternalDataLookupOperator) dataSourceRef.getValue();
+                datasetInfo = AnalysisUtil.getDatasetInfo(externalScan);
+                dataverseName = datasetInfo.first;
+                datasetName = datasetInfo.second;
+                break;
+            case COLLECTION_SCAN:
+                return true;
+            case NO_DATASOURCE:
+            default:
+                return false;
+        }
+        if (dataverseName == null || datasetName == null) {
+            return false;
+        }
+        // Find the dataset corresponding to the datasource in the metadata.
+        dataset = metadataProvider.findDataset(dataverseName, datasetName);
+        if (dataset == null) {
+            throw new AlgebricksException("No metadata for dataset " + datasetName);
+        }
+        // Get the record type for that dataset.
+        IAType itemType = metadataProvider.findType(dataverseName, dataset.getItemTypeName());
+        if (itemType.getTypeTag() != ATypeTag.RECORD) {
+            return false;
+        }
+        recordType = (ARecordType) itemType;
+        return true;
+    }
+
+    public boolean hasDataSource() {
+        return dataSourceType != DataSourceType.NO_DATASOURCE;
+    }
+
+    public boolean hasDataSourceScan() {
+        return dataSourceType == DataSourceType.DATASOURCE_SCAN;
+    }
+
+    public void reset() {
+        root = null;
+        rootRef = null;
+        assignsAndUnnestsRefs.clear();
+        assignsAndUnnests.clear();
+        dataSourceRef = null;
+        dataSourceType = DataSourceType.NO_DATASOURCE;
+        dataset = null;
+        recordType = null;
+    }
+
+    public void getPrimaryKeyVars(List<LogicalVariable> target) throws AlgebricksException {
+        switch (dataSourceType) {
+            case DATASOURCE_SCAN:
+                DataSourceScanOperator dataSourceScan = (DataSourceScanOperator) dataSourceRef.getValue();
+                int numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
+                for (int i = 0; i < numPrimaryKeys; i++) {
+                    target.add(dataSourceScan.getVariables().get(i));
+                }
+                break;
+            case PRIMARY_INDEX_LOOKUP:
+                UnnestMapOperator unnestMapOp = (UnnestMapOperator) dataSourceRef.getValue();
+                List<LogicalVariable> primaryKeys = null;
+                primaryKeys = AccessMethodUtils.getPrimaryKeyVarsFromPrimaryUnnestMap(dataset, unnestMapOp);
+                target.addAll(primaryKeys);
+                break;
+            case NO_DATASOURCE:
+            default:
+                throw new AlgebricksException("The subtree does not have any data source.");
+        }
+    }
+
+    public List<LogicalVariable> getDataSourceVariables() throws AlgebricksException {
+        switch (dataSourceType) {
+            case DATASOURCE_SCAN:
+            case EXTERNAL_SCAN:
+            case PRIMARY_INDEX_LOOKUP:
+                AbstractScanOperator scanOp = (AbstractScanOperator) dataSourceRef.getValue();
+                return scanOp.getVariables();
+            case COLLECTION_SCAN:
+                return new ArrayList<LogicalVariable>();
+            case NO_DATASOURCE:
+            default:
+                throw new AlgebricksException("The subtree does not have any data source.");
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeAccessMethod.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeAccessMethod.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeAccessMethod.java
new file mode 100644
index 0000000..e2dffd1
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeAccessMethod.java
@@ -0,0 +1,255 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules.am;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.common.annotations.SkipSecondaryIndexSearchExpressionAnnotation;
+import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractDataSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExternalDataLookupOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
+
+/**
+ * Class for helping rewrite rules to choose and apply RTree indexes.
+ */
+public class RTreeAccessMethod implements IAccessMethod {
+
+    private static List<FunctionIdentifier> funcIdents = new ArrayList<FunctionIdentifier>();
+    static {
+        funcIdents.add(AsterixBuiltinFunctions.SPATIAL_INTERSECT);
+    }
+
+    public static RTreeAccessMethod INSTANCE = new RTreeAccessMethod();
+
+    @Override
+    public List<FunctionIdentifier> getOptimizableFunctions() {
+        return funcIdents;
+    }
+
+    @Override
+    public boolean analyzeFuncExprArgs(AbstractFunctionCallExpression funcExpr,
+            List<AbstractLogicalOperator> assignsAndUnnests, AccessMethodAnalysisContext analysisCtx) {
+        boolean matches = AccessMethodUtils.analyzeFuncExprArgsForOneConstAndVar(funcExpr, analysisCtx);
+        if (!matches) {
+            matches = AccessMethodUtils.analyzeFuncExprArgsForTwoVars(funcExpr, analysisCtx);
+        }
+        return matches;
+    }
+
+    @Override
+    public boolean matchAllIndexExprs() {
+        return true;
+    }
+
+    @Override
+    public boolean matchPrefixIndexExprs() {
+        return false;
+    }
+
+    @Override
+    public boolean applySelectPlanTransformation(Mutable<ILogicalOperator> selectRef,
+            OptimizableOperatorSubTree subTree, Index chosenIndex, AccessMethodAnalysisContext analysisCtx,
+            IOptimizationContext context) throws AlgebricksException {
+        // TODO: We can probably do something smarter here based on selectivity or MBR area.
+        IOptimizableFuncExpr optFuncExpr = AccessMethodUtils.chooseFirstOptFuncExpr(chosenIndex, analysisCtx);
+        ILogicalOperator primaryIndexUnnestOp = createSecondaryToPrimaryPlan(subTree, null, chosenIndex, optFuncExpr,
+                analysisCtx, false, false, false, context);
+        if (primaryIndexUnnestOp == null) {
+            return false;
+        }
+        // Replace the datasource scan with the new plan rooted at primaryIndexUnnestMap.
+        subTree.dataSourceRef.setValue(primaryIndexUnnestOp);
+        return true;
+    }
+
+    @Override
+    public boolean applyJoinPlanTransformation(Mutable<ILogicalOperator> joinRef,
+            OptimizableOperatorSubTree leftSubTree, OptimizableOperatorSubTree rightSubTree, Index chosenIndex,
+            AccessMethodAnalysisContext analysisCtx, IOptimizationContext context, boolean isLeftOuterJoin,
+            boolean hasGroupBy) throws AlgebricksException {
+        // Determine if the index is applicable on the left or right side (if both, we arbitrarily prefer the left side).
+        Dataset dataset = analysisCtx.indexDatasetMap.get(chosenIndex);
+        // Determine probe and index subtrees based on chosen index.
+        OptimizableOperatorSubTree indexSubTree = null;
+        OptimizableOperatorSubTree probeSubTree = null;
+        if (!isLeftOuterJoin && leftSubTree.hasDataSourceScan()
+                && dataset.getDatasetName().equals(leftSubTree.dataset.getDatasetName())) {
+            indexSubTree = leftSubTree;
+            probeSubTree = rightSubTree;
+        } else if (rightSubTree.hasDataSourceScan()
+                && dataset.getDatasetName().equals(rightSubTree.dataset.getDatasetName())) {
+            indexSubTree = rightSubTree;
+            probeSubTree = leftSubTree;
+        }
+        if (indexSubTree == null) {
+            //This may happen for left outer join case
+            return false;
+        }
+
+        LogicalVariable newNullPlaceHolderVar = null;
+        if (isLeftOuterJoin) {
+            //get a new null place holder variable that is the first field variable of the primary key
+            //from the indexSubTree's datasourceScanOp
+            newNullPlaceHolderVar = indexSubTree.getDataSourceVariables().get(0);
+        }
+
+        // TODO: We can probably do something smarter here based on selectivity or MBR area.
+        IOptimizableFuncExpr optFuncExpr = AccessMethodUtils.chooseFirstOptFuncExpr(chosenIndex, analysisCtx);
+        ILogicalOperator primaryIndexUnnestOp = createSecondaryToPrimaryPlan(indexSubTree, probeSubTree, chosenIndex,
+                optFuncExpr, analysisCtx, true, isLeftOuterJoin, true, context);
+        if (primaryIndexUnnestOp == null) {
+            return false;
+        }
+
+        if (isLeftOuterJoin && hasGroupBy) {
+            //reset the null place holder variable
+            AccessMethodUtils.resetLOJNullPlaceholderVariableInGroupByOp(analysisCtx, newNullPlaceHolderVar, context);
+        }
+
+        indexSubTree.dataSourceRef.setValue(primaryIndexUnnestOp);
+        // Change join into a select with the same condition.
+        AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) joinRef.getValue();
+        SelectOperator topSelect = new SelectOperator(joinOp.getCondition(), isLeftOuterJoin, newNullPlaceHolderVar);
+        topSelect.getInputs().add(indexSubTree.rootRef);
+        topSelect.setExecutionMode(ExecutionMode.LOCAL);
+        context.computeAndSetTypeEnvironmentForOperator(topSelect);
+        // Replace the original join with the new subtree rooted at the select op.
+        joinRef.setValue(topSelect);
+        return true;
+    }
+
+    private ILogicalOperator createSecondaryToPrimaryPlan(OptimizableOperatorSubTree indexSubTree,
+            OptimizableOperatorSubTree probeSubTree, Index chosenIndex, IOptimizableFuncExpr optFuncExpr,
+            AccessMethodAnalysisContext analysisCtx, boolean retainInput, boolean retainNull,
+            boolean requiresBroadcast, IOptimizationContext context) throws AlgebricksException {
+        Dataset dataset = indexSubTree.dataset;
+        ARecordType recordType = indexSubTree.recordType;
+
+        int optFieldIdx = AccessMethodUtils.chooseFirstOptFuncVar(chosenIndex, analysisCtx);
+        Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(optFuncExpr.getFieldType(optFieldIdx),
+                optFuncExpr.getFieldName(optFieldIdx), recordType);
+        if (keyPairType == null) {
+            return null;
+        }
+
+        // Get the number of dimensions corresponding to the field indexed by chosenIndex.
+        IAType spatialType = keyPairType.first;
+        int numDimensions = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
+        int numSecondaryKeys = numDimensions * 2;
+        // we made sure indexSubTree has datasource scan
+        AbstractDataSourceOperator dataSourceOp = (AbstractDataSourceOperator) indexSubTree.dataSourceRef.getValue();
+        RTreeJobGenParams jobGenParams = new RTreeJobGenParams(chosenIndex.getIndexName(), IndexType.RTREE,
+                dataset.getDataverseName(), dataset.getDatasetName(), retainInput, retainNull, requiresBroadcast);
+        // A spatial object is serialized in the constant of the func expr we are optimizing.
+        // The R-Tree expects as input an MBR represented with 1 field per dimension.
+        // Here we generate vars and funcs for extracting MBR fields from the constant into fields of a tuple (as the R-Tree expects them).
+        // List of variables for the assign.
+        ArrayList<LogicalVariable> keyVarList = new ArrayList<LogicalVariable>();
+        // List of expressions for the assign.
+        ArrayList<Mutable<ILogicalExpression>> keyExprList = new ArrayList<Mutable<ILogicalExpression>>();
+        Pair<ILogicalExpression, Boolean> returnedSearchKeyExpr = AccessMethodUtils.createSearchKeyExpr(optFuncExpr,
+                indexSubTree, probeSubTree);
+        ILogicalExpression searchKeyExpr = returnedSearchKeyExpr.first;
+
+        for (int i = 0; i < numSecondaryKeys; i++) {
+            // The create MBR function "extracts" one field of an MBR around the given spatial object.
+            AbstractFunctionCallExpression createMBR = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.CREATE_MBR));
+            // Spatial object is the constant from the func expr we are optimizing.
+            createMBR.getArguments().add(new MutableObject<ILogicalExpression>(searchKeyExpr));
+            // The number of dimensions.
+            createMBR.getArguments().add(
+                    new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(
+                            numDimensions)))));
+            // Which part of the MBR to extract.
+            createMBR.getArguments().add(
+                    new MutableObject<ILogicalExpression>(new ConstantExpression(
+                            new AsterixConstantValue(new AInt32(i)))));
+            // Add a variable and its expr to the lists which will be passed into an assign op.
+            LogicalVariable keyVar = context.newVar();
+            keyVarList.add(keyVar);
+            keyExprList.add(new MutableObject<ILogicalExpression>(createMBR));
+        }
+        jobGenParams.setKeyVarList(keyVarList);
+
+        // Assign operator that "extracts" the MBR fields from the func-expr constant into a tuple.
+        AssignOperator assignSearchKeys = new AssignOperator(keyVarList, keyExprList);
+        if (probeSubTree == null) {
+            // We are optimizing a selection query.
+            // Input to this assign is the EmptyTupleSource (which the dataSourceScan also must have had as input).
+            assignSearchKeys.getInputs().add(dataSourceOp.getInputs().get(0));
+            assignSearchKeys.setExecutionMode(dataSourceOp.getExecutionMode());
+        } else {
+            // We are optimizing a join, place the assign op top of the probe subtree.
+            assignSearchKeys.getInputs().add(probeSubTree.rootRef);
+        }
+
+        UnnestMapOperator secondaryIndexUnnestOp = AccessMethodUtils.createSecondaryIndexUnnestMap(dataset, recordType,
+                chosenIndex, assignSearchKeys, jobGenParams, context, false, retainInput);
+
+        // Generate the rest of the upstream plan which feeds the search results into the primary index.
+        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
+            ExternalDataLookupOperator externalDataAccessOp = AccessMethodUtils.createExternalDataLookupUnnestMap(
+                    dataSourceOp, dataset, recordType, secondaryIndexUnnestOp, context, chosenIndex, retainInput,
+                    retainNull);
+            return externalDataAccessOp;
+        } else {
+            UnnestMapOperator primaryIndexUnnestOp = AccessMethodUtils.createPrimaryIndexUnnestMap(dataSourceOp,
+                    dataset, recordType, secondaryIndexUnnestOp, context, true, retainInput, false, false);
+
+            return primaryIndexUnnestOp;
+        }
+    }
+
+    @Override
+    public boolean exprIsOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) {
+        if (optFuncExpr.getFuncExpr().getAnnotations()
+                .containsKey(SkipSecondaryIndexSearchExpressionAnnotation.INSTANCE)) {
+            return false;
+        }
+        // No additional analysis required.
+        return true;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeJobGenParams.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeJobGenParams.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeJobGenParams.java
new file mode 100644
index 0000000..9b6ff5f
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeJobGenParams.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules.am;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+
+/**
+ * Helper class for reading and writing job-gen parameters for RTree access methods to
+ * and from a list of function arguments, typically of an unnest-map.
+ */
+public class RTreeJobGenParams extends AccessMethodJobGenParams {
+
+    protected List<LogicalVariable> keyVarList;
+
+    public RTreeJobGenParams() {
+    }
+
+    public RTreeJobGenParams(String indexName, IndexType indexType, String dataverseName, String datasetName,
+            boolean retainInput, boolean retainNull, boolean requiresBroadcast) {
+        super(indexName, indexType, dataverseName, datasetName, retainInput, retainNull, requiresBroadcast);
+    }
+
+    public void writeToFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
+        super.writeToFuncArgs(funcArgs);
+        writeVarList(keyVarList, funcArgs);
+    }
+
+    public void readFromFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
+        super.readFromFuncArgs(funcArgs);
+        int index = super.getNumParams();
+        keyVarList = new ArrayList<LogicalVariable>();
+        readVarList(funcArgs, index, keyVarList);
+    }
+
+    public void setKeyVarList(List<LogicalVariable> keyVarList) {
+        this.keyVarList = keyVarList;
+    }
+
+    public List<LogicalVariable> getKeyVarList() {
+        return keyVarList;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/temporal/TranslateIntervalExpressionRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/temporal/TranslateIntervalExpressionRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/temporal/TranslateIntervalExpressionRule.java
new file mode 100644
index 0000000..567d85d
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/temporal/TranslateIntervalExpressionRule.java
@@ -0,0 +1,150 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules.temporal;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * Finds interval conditional expressions and convert them to interval start and end conditional statements.
+ * The translation exposes the condition to the Algebricks optimizer.
+ */
+public class TranslateIntervalExpressionRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
+            return false;
+        }
+        SelectOperator selectOp = (SelectOperator) op;
+
+        Mutable<ILogicalExpression> exprRef = selectOp.getCondition();
+        boolean modified = false;
+        ILogicalExpression expr = exprRef.getValue();
+        if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return false;
+        }
+        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+        if (funcExpr.getArguments().size() != 2) {
+            return false;
+        }
+        ILogicalExpression interval1 = funcExpr.getArguments().get(0).getValue();
+        ILogicalExpression interval2 = funcExpr.getArguments().get(1).getValue();
+        if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_MEETS)) {
+            exprRef.setValue(getEqualExpr(getIntervalEndExpr(interval1), getIntervalStartExpr(interval2)));
+            modified = true;
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_MET_BY)) {
+            exprRef.setValue(getEqualExpr(getIntervalStartExpr(interval1), getIntervalEndExpr(interval2)));
+            modified = true;
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_STARTS)) {
+            ILogicalExpression startExpr = getEqualExpr(getIntervalStartExpr(interval1),
+                    getIntervalStartExpr(interval2));
+            ILogicalExpression endExpr = getLessThanOrEqualExpr(getIntervalEndExpr(interval1),
+                    getIntervalEndExpr(interval2));
+            exprRef.setValue(getAndExpr(startExpr, endExpr));
+            modified = true;
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_STARTED_BY)) {
+            ILogicalExpression startExpr = getEqualExpr(getIntervalStartExpr(interval1),
+                    getIntervalStartExpr(interval2));
+            ILogicalExpression endExpr = getLessThanOrEqualExpr(getIntervalEndExpr(interval2),
+                    getIntervalEndExpr(interval1));
+            exprRef.setValue(getAndExpr(startExpr, endExpr));
+            modified = true;
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_ENDS)) {
+            ILogicalExpression endExpr = getEqualExpr(getIntervalEndExpr(interval1), getIntervalEndExpr(interval2));
+            ILogicalExpression startExpr = getLessThanOrEqualExpr(getIntervalStartExpr(interval1),
+                    getIntervalStartExpr(interval2));
+            exprRef.setValue(getAndExpr(startExpr, endExpr));
+            modified = true;
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_ENDED_BY)) {
+            ILogicalExpression endExpr = getEqualExpr(getIntervalEndExpr(interval1), getIntervalEndExpr(interval2));
+            ILogicalExpression startExpr = getLessThanOrEqualExpr(getIntervalStartExpr(interval2),
+                    getIntervalStartExpr(interval1));
+            exprRef.setValue(getAndExpr(startExpr, endExpr));
+            modified = true;
+        } else if (funcExpr.getFunctionInfo().equals(AsterixBuiltinFunctions.INTERVAL_BEFORE)) {
+            // Requires new strategy, no translation for this interval and the remaining listed.
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_AFTER)) {
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_OVERLAPS)) {
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_OVERLAPPED_BY)) {
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_OVERLAPPING)) {
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_COVERS)) {
+        } else if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.INTERVAL_COVERED_BY)) {
+        }
+
+        return modified;
+    }
+
+    private ILogicalExpression getAndExpr(ILogicalExpression arg1, ILogicalExpression arg2) {
+        return getScalarExpr(AlgebricksBuiltinFunctions.AND, arg1, arg2);
+    }
+
+    private ILogicalExpression getEqualExpr(ILogicalExpression arg1, ILogicalExpression arg2) {
+        return getScalarExpr(AlgebricksBuiltinFunctions.EQ, arg1, arg2);
+    }
+
+    private ILogicalExpression getLessThanOrEqualExpr(ILogicalExpression arg1, ILogicalExpression arg2) {
+        return getScalarExpr(AlgebricksBuiltinFunctions.LE, arg1, arg2);
+    }
+
+    private ILogicalExpression getIntervalStartExpr(ILogicalExpression interval) {
+        return getScalarExpr(AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_INTERVAL_START, interval);
+    }
+
+    private ILogicalExpression getIntervalEndExpr(ILogicalExpression interval) {
+        return getScalarExpr(AsterixBuiltinFunctions.ACCESSOR_TEMPORAL_INTERVAL_END, interval);
+    }
+
+    private ILogicalExpression getScalarExpr(FunctionIdentifier func, ILogicalExpression interval) {
+        List<Mutable<ILogicalExpression>> intervalArg = new ArrayList<Mutable<ILogicalExpression>>();
+        intervalArg.add(new MutableObject<ILogicalExpression>(interval));
+        return new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(func), intervalArg);
+    }
+
+    private ILogicalExpression getScalarExpr(FunctionIdentifier func, ILogicalExpression interval1,
+            ILogicalExpression interval2) {
+        List<Mutable<ILogicalExpression>> intervalArg = new ArrayList<Mutable<ILogicalExpression>>();
+        intervalArg.add(new MutableObject<ILogicalExpression>(interval1));
+        intervalArg.add(new MutableObject<ILogicalExpression>(interval2));
+        return new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(func), intervalArg);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java
new file mode 100644
index 0000000..f6bf3c3
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java
@@ -0,0 +1,548 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules.typecast;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.pointables.base.DefaultOpenFieldType;
+import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.AbstractCollectionType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
+
+/**
+ * This class is utility to do type cast.
+ * It offers two public methods:
+ * 1. public static boolean rewriteListExpr(AbstractFunctionCallExpression funcExpr, IAType reqType, IAType inputType,
+ * IVariableTypeEnvironment env) throws AlgebricksException, which only enforces the list type recursively.
+ * 2. public static boolean rewriteFuncExpr(AbstractFunctionCallExpression funcExpr, IAType reqType, IAType inputType,
+ * IVariableTypeEnvironment env) throws AlgebricksException, which enforces the list type and the record type recursively.
+ *
+ * @author yingyib
+ */
+public class StaticTypeCastUtil {
+
+    /**
+     * This method is only called when funcExpr contains list constructor function calls.
+     * The List constructor is very special because a nested list is of type List<ANY>.
+     * However, the bottom-up type inference (InferTypeRule in algebricks) did not infer that so we need this method to enforce the type.
+     * We do not want to break the generality of algebricks so this method is called in an ASTERIX rule: @ IntroduceEnforcedListTypeRule} .
+     * 
+     * @param funcExpr
+     *            record constructor function expression
+     * @param requiredListType
+     *            required record type
+     * @param inputRecordType
+     * @param env
+     *            type environment
+     * @throws AlgebricksException
+     */
+    public static boolean rewriteListExpr(AbstractFunctionCallExpression funcExpr, IAType reqType, IAType inputType,
+            IVariableTypeEnvironment env) throws AlgebricksException {
+        if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR) {
+            if (reqType.equals(BuiltinType.ANY)) {
+                reqType = DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE;
+            }
+            return rewriteListFuncExpr(funcExpr, (AbstractCollectionType) reqType, (AbstractCollectionType) inputType,
+                    env);
+        } else if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR) {
+            if (reqType.equals(BuiltinType.ANY)) {
+                reqType = DefaultOpenFieldType.NESTED_OPEN_AORDERED_LIST_TYPE;
+            }
+            return rewriteListFuncExpr(funcExpr, (AbstractCollectionType) reqType, (AbstractCollectionType) inputType,
+                    env);
+        } else {
+            List<Mutable<ILogicalExpression>> args = funcExpr.getArguments();
+            boolean changed = false;
+            for (Mutable<ILogicalExpression> arg : args) {
+                ILogicalExpression argExpr = arg.getValue();
+                if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                    AbstractFunctionCallExpression argFuncExpr = (AbstractFunctionCallExpression) argExpr;
+                    IAType exprType = (IAType) env.getType(argFuncExpr);
+                    changed = rewriteListExpr(argFuncExpr, exprType, exprType, env) || changed;
+                }
+            }
+            return changed;
+        }
+    }
+
+    /**
+     * This method is to recursively enforce required types, for the list type and the record type.
+     * The List constructor is very special because
+     * 1. a nested list in a list is of type List<ANY>;
+     * 2. a nested record in a list is of type Open_Record{}.
+     * The open record constructor is very special because
+     * 1. a nested list in the open part is of type List<ANY>;
+     * 2. a nested record in the open part is of type Open_Record{}.
+     * However, the bottom-up type inference (InferTypeRule in algebricks) did not infer that so we need this method to enforce the type.
+     * We do not want to break the generality of algebricks so this method is called in an ASTERIX rule: @ IntroduceStaticTypeCastRule} .
+     * 
+     * @param funcExpr
+     *            the function expression whose type needs to be top-down enforced
+     * @param reqType
+     *            the required type inferred from parent operators/expressions
+     * @param inputType
+     *            the current inferred
+     * @param env
+     *            the type environment
+     * @return true if the type is casted; otherwise, false.
+     * @throws AlgebricksException
+     */
+    public static boolean rewriteFuncExpr(AbstractFunctionCallExpression funcExpr, IAType reqType, IAType inputType,
+            IVariableTypeEnvironment env) throws AlgebricksException {
+        /**
+         * sanity check: if there are list(ordered or unordered)/record variable expressions in the funcExpr, we will not do STATIC type casting
+         * because they are not "statically cast-able".
+         * instead, the record will be dynamically casted at the runtime
+         */
+        if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR) {
+            if (reqType.equals(BuiltinType.ANY)) {
+                reqType = DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE;
+            }
+            return rewriteListFuncExpr(funcExpr, (AbstractCollectionType) reqType, (AbstractCollectionType) inputType,
+                    env);
+        } else if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR) {
+            if (reqType.equals(BuiltinType.ANY)) {
+                reqType = DefaultOpenFieldType.NESTED_OPEN_AORDERED_LIST_TYPE;
+            }
+            return rewriteListFuncExpr(funcExpr, (AbstractCollectionType) reqType, (AbstractCollectionType) inputType,
+                    env);
+        } else if (inputType.getTypeTag().equals(ATypeTag.RECORD)) {
+            if (reqType.equals(BuiltinType.ANY)) {
+                reqType = DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE;
+            }
+            return rewriteRecordFuncExpr(funcExpr, (ARecordType) reqType, (ARecordType) inputType, env);
+        } else {
+            List<Mutable<ILogicalExpression>> args = funcExpr.getArguments();
+            boolean changed = false;
+            for (Mutable<ILogicalExpression> arg : args) {
+                ILogicalExpression argExpr = arg.getValue();
+                if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                    AbstractFunctionCallExpression argFuncExpr = (AbstractFunctionCallExpression) argExpr;
+                    IAType exprType = (IAType) env.getType(argFuncExpr);
+                    changed = changed || rewriteFuncExpr(argFuncExpr, exprType, exprType, env);
+                }
+            }
+            if (!compatible(reqType, inputType)) {
+                throw new AlgebricksException("type mismatch, required: " + reqType.toString() + " actual: "
+                        + inputType.toString());
+            }
+            return changed;
+        }
+    }
+
+    /**
+     * only called when funcExpr is record constructor
+     * 
+     * @param funcExpr
+     *            record constructor function expression
+     * @param requiredListType
+     *            required record type
+     * @param inputRecordType
+     * @param env
+     *            type environment
+     * @throws AlgebricksException
+     */
+    private static boolean rewriteRecordFuncExpr(AbstractFunctionCallExpression funcExpr,
+            ARecordType requiredRecordType, ARecordType inputRecordType, IVariableTypeEnvironment env)
+            throws AlgebricksException {
+        // if already rewritten, the required type is not null
+        if (TypeComputerUtilities.getRequiredType(funcExpr) != null)
+            return false;
+        boolean casted = staticRecordTypeCast(funcExpr, requiredRecordType, inputRecordType, env);
+        if (casted) {
+            //enforce the required type if it is statically casted
+            TypeComputerUtilities.setRequiredAndInputTypes(funcExpr, requiredRecordType, inputRecordType);
+        }
+        return casted;
+    }
+
+    /**
+     * only called when funcExpr is list constructor
+     * 
+     * @param funcExpr
+     *            list constructor function expression
+     * @param requiredListType
+     *            required list type
+     * @param inputListType
+     * @param env
+     *            type environment
+     * @throws AlgebricksException
+     */
+    private static boolean rewriteListFuncExpr(AbstractFunctionCallExpression funcExpr,
+            AbstractCollectionType requiredListType, AbstractCollectionType inputListType, IVariableTypeEnvironment env)
+            throws AlgebricksException {
+        if (TypeComputerUtilities.getRequiredType(funcExpr) != null)
+            return false;
+
+        TypeComputerUtilities.setRequiredAndInputTypes(funcExpr, requiredListType, inputListType);
+        List<Mutable<ILogicalExpression>> args = funcExpr.getArguments();
+
+        IAType itemType = requiredListType.getItemType();
+        IAType inputItemType = inputListType.getItemType();
+        boolean changed = false;
+        for (int j = 0; j < args.size(); j++) {
+            ILogicalExpression arg = args.get(j).getValue();
+            IAType currentItemType = (inputItemType == null || inputItemType == BuiltinType.ANY) ? (IAType) env
+                    .getType(arg) : inputItemType;
+            switch (arg.getExpressionTag()) {
+                case FUNCTION_CALL:
+                    ScalarFunctionCallExpression argFunc = (ScalarFunctionCallExpression) arg;
+                    changed = rewriteFuncExpr(argFunc, itemType, currentItemType, env) || changed;
+                    break;
+                case VARIABLE:
+                    changed = injectCastToRelaxType(args.get(j), currentItemType, env) || changed;
+                    break;
+            }
+        }
+        return changed;
+    }
+
+    /**
+     * This method statically cast the type of records from their current type to the required type.
+     * 
+     * @param func
+     *            The record constructor expression.
+     * @param reqType
+     *            The required type.
+     * @param inputType
+     *            The current type.
+     * @param env
+     *            The type environment.
+     * @throws AlgebricksException
+     */
+    private static boolean staticRecordTypeCast(AbstractFunctionCallExpression func, ARecordType reqType,
+            ARecordType inputType, IVariableTypeEnvironment env) throws AlgebricksException {
+        if (!(func.getFunctionIdentifier() == AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR || func
+                .getFunctionIdentifier() == AsterixBuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR)) {
+            return false;
+        }
+        IAType[] reqFieldTypes = reqType.getFieldTypes();
+        String[] reqFieldNames = reqType.getFieldNames();
+        IAType[] inputFieldTypes = inputType.getFieldTypes();
+        String[] inputFieldNames = inputType.getFieldNames();
+
+        int[] fieldPermutation = new int[reqFieldTypes.length];
+        boolean[] nullFields = new boolean[reqFieldTypes.length];
+        boolean[] openFields = new boolean[inputFieldTypes.length];
+
+        Arrays.fill(nullFields, false);
+        Arrays.fill(openFields, true);
+        Arrays.fill(fieldPermutation, -1);
+
+        // forward match: match from actual to required
+        boolean matched = false;
+        for (int i = 0; i < inputFieldNames.length; i++) {
+            String fieldName = inputFieldNames[i];
+            IAType fieldType = inputFieldTypes[i];
+
+            if (2 * i + 1 > func.getArguments().size()) {
+                // it is not a record constructor function
+                return false;
+            }
+
+            // 2*i+1 is the index of field value expression
+            ILogicalExpression arg = func.getArguments().get(2 * i + 1).getValue();
+            matched = false;
+            for (int j = 0; j < reqFieldNames.length; j++) {
+                String reqFieldName = reqFieldNames[j];
+                IAType reqFieldType = reqFieldTypes[j];
+                if (fieldName.equals(reqFieldName)) {
+                    //type matched
+                    if (fieldType.equals(reqFieldType)) {
+                        fieldPermutation[j] = i;
+                        openFields[i] = false;
+                        matched = true;
+
+                        if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                            ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
+                            rewriteFuncExpr(scalarFunc, reqFieldType, fieldType, env);
+                        }
+                        break;
+                    }
+
+                    // match the optional field
+                    if (NonTaggedFormatUtil.isOptional(reqFieldType)) {
+                        IAType itemType = ((AUnionType) reqFieldType).getNullableType();
+                        reqFieldType = itemType;
+                        if (fieldType.equals(BuiltinType.ANULL) || fieldType.equals(itemType)) {
+                            fieldPermutation[j] = i;
+                            openFields[i] = false;
+                            matched = true;
+
+                            // rewrite record expr
+                            if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                                ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
+                                rewriteFuncExpr(scalarFunc, reqFieldType, fieldType, env);
+                            }
+                            break;
+                        }
+                    }
+
+                    // match the optional type input for a non-optional field
+                    // delay that to runtime by calling the not-null function
+                    if (NonTaggedFormatUtil.isOptional(fieldType)) {
+                        IAType itemType = ((AUnionType) fieldType).getNullableType();
+                        if (reqFieldType.equals(itemType)) {
+                            fieldPermutation[j] = i;
+                            openFields[i] = false;
+                            matched = true;
+
+                            ScalarFunctionCallExpression notNullFunc = new ScalarFunctionCallExpression(
+                                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.NOT_NULL));
+                            notNullFunc.getArguments().add(new MutableObject<ILogicalExpression>(arg));
+                            //wrap the not null function to the original function
+                            func.getArguments().get(2 * i + 1).setValue(notNullFunc);
+                            break;
+                        }
+                    }
+
+                    // match the record field: need cast
+                    if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                        ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
+                        rewriteFuncExpr(scalarFunc, reqFieldType, fieldType, env);
+                        fieldPermutation[j] = i;
+                        openFields[i] = false;
+                        matched = true;
+                        break;
+                    }
+                }
+            }
+            // the input has extra fields
+            if (!matched && !reqType.isOpen()) {
+                throw new AlgebricksException("static type mismatch: the input record includes an extra closed field "
+                        + fieldName + ":" + fieldType + "! Please check the field name and type.");
+            }
+        }
+
+        // backward match: match from required to actual
+        for (int i = 0; i < reqFieldNames.length; i++) {
+            String reqFieldName = reqFieldNames[i];
+            IAType reqFieldType = reqFieldTypes[i];
+            matched = false;
+            for (int j = 0; j < inputFieldNames.length; j++) {
+                String fieldName = inputFieldNames[j];
+                IAType fieldType = inputFieldTypes[j];
+                if (!fieldName.equals(reqFieldName))
+                    continue;
+                // should check open field here
+                // because number of entries in fieldPermuations is the
+                // number of required schema fields
+                // here we want to check if an input field is matched
+                // the entry index of fieldPermuatons is req field index
+                if (!openFields[j]) {
+                    matched = true;
+                    break;
+                }
+
+                // match the optional field
+                if (NonTaggedFormatUtil.isOptional(reqFieldType)) {
+                    IAType itemType = ((AUnionType) reqFieldType).getNullableType();
+                    if (fieldType.equals(BuiltinType.ANULL) || fieldType.equals(itemType)) {
+                        matched = true;
+                        break;
+                    }
+                }
+            }
+            if (matched)
+                continue;
+
+            if (NonTaggedFormatUtil.isOptional(reqFieldType)) {
+                // add a null field
+                nullFields[i] = true;
+            } else {
+                // no matched field in the input for a required closed field
+                if (inputType.isOpen()) {
+                    //if the input type is open, return false, give that to dynamic type cast to defer the error to the runtime
+                    return false;
+                } else {
+                    throw new AlgebricksException(
+                            "static type mismatch: the input record misses a required closed field " + reqFieldName
+                                    + ":" + reqFieldType + "! Please check the field name and type.");
+                }
+            }
+        }
+
+        List<Mutable<ILogicalExpression>> arguments = func.getArguments();
+        List<Mutable<ILogicalExpression>> originalArguments = new ArrayList<Mutable<ILogicalExpression>>();
+        originalArguments.addAll(arguments);
+        arguments.clear();
+        // re-order the closed part and fill in null fields
+        for (int i = 0; i < fieldPermutation.length; i++) {
+            int pos = fieldPermutation[i];
+            if (pos >= 0) {
+                arguments.add(originalArguments.get(2 * pos));
+                arguments.add(originalArguments.get(2 * pos + 1));
+            }
+            if (nullFields[i]) {
+                // add a null field
+                arguments.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
+                        new AString(reqFieldNames[i])))));
+                arguments.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
+                        ANull.NULL))));
+            }
+        }
+
+        // add the open part
+        for (int i = 0; i < openFields.length; i++) {
+            if (openFields[i]) {
+                arguments.add(originalArguments.get(2 * i));
+                Mutable<ILogicalExpression> expRef = originalArguments.get(2 * i + 1);
+                injectCastToRelaxType(expRef, inputFieldTypes[i], env);
+                arguments.add(expRef);
+            }
+        }
+        return true;
+    }
+
+    private static boolean injectCastToRelaxType(Mutable<ILogicalExpression> expRef, IAType inputFieldType,
+            IVariableTypeEnvironment env) throws AlgebricksException {
+        ILogicalExpression argExpr = expRef.getValue();
+        List<LogicalVariable> parameterVars = new ArrayList<LogicalVariable>();
+        argExpr.getUsedVariables(parameterVars);
+        // we need to handle open fields recursively by their default
+        // types
+        // for list, their item type is any
+        // for record, their
+        boolean castInjected = false;
+        if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL
+                || argExpr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+            IAType reqFieldType = inputFieldType;
+            FunctionIdentifier fi = null;
+            // do not enforce nested type in the case of no-used variables
+            switch (inputFieldType.getTypeTag()) {
+                case RECORD:
+                    reqFieldType = DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE;
+                    fi = AsterixBuiltinFunctions.CAST_RECORD;
+                    break;
+                case ORDEREDLIST:
+                    reqFieldType = DefaultOpenFieldType.NESTED_OPEN_AORDERED_LIST_TYPE;
+                    fi = AsterixBuiltinFunctions.CAST_LIST;
+                    break;
+                case UNORDEREDLIST:
+                    reqFieldType = DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE;
+                    fi = AsterixBuiltinFunctions.CAST_LIST;
+            }
+            if (fi != null && !inputFieldType.equals(reqFieldType) && parameterVars.size() > 0) {
+                //inject dynamic type casting
+                injectCastFunction(FunctionUtils.getFunctionInfo(fi), reqFieldType, inputFieldType, expRef, argExpr);
+                castInjected = true;
+            }
+            if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                //recursively rewrite function arguments
+                if (TypeComputerUtilities.getRequiredType((AbstractFunctionCallExpression) argExpr) == null
+                        && reqFieldType != null) {
+                    if (castInjected) {
+                        //rewrite the arg expression inside the dynamic cast
+                        ScalarFunctionCallExpression argFunc = (ScalarFunctionCallExpression) argExpr;
+                        rewriteFuncExpr(argFunc, inputFieldType, inputFieldType, env);
+                    } else {
+                        //rewrite arg
+                        ScalarFunctionCallExpression argFunc = (ScalarFunctionCallExpression) argExpr;
+                        rewriteFuncExpr(argFunc, reqFieldType, inputFieldType, env);
+                    }
+                }
+            }
+        }
+        return castInjected;
+    }
+
+    /**
+     * Inject a dynamic cast function wrapping an existing expression
+     * 
+     * @param funcInfo
+     *            the cast function
+     * @param reqType
+     *            the required type
+     * @param inputType
+     *            the original type
+     * @param exprRef
+     *            the expression reference
+     * @param argExpr
+     *            the original expression
+     */
+    private static void injectCastFunction(IFunctionInfo funcInfo, IAType reqType, IAType inputType,
+            Mutable<ILogicalExpression> exprRef, ILogicalExpression argExpr) {
+        ScalarFunctionCallExpression cast = new ScalarFunctionCallExpression(funcInfo);
+        cast.getArguments().add(new MutableObject<ILogicalExpression>(argExpr));
+        exprRef.setValue(cast);
+        TypeComputerUtilities.setRequiredAndInputTypes(cast, reqType, inputType);
+    }
+
+    /**
+     * Determine if two types are compatible
+     * 
+     * @param reqType
+     *            the required type
+     * @param inputType
+     *            the input type
+     * @return true if the two types are compatible; false otherwise
+     */
+    public static boolean compatible(IAType reqType, IAType inputType) {
+        if (reqType.getTypeTag() == ATypeTag.ANY || inputType.getTypeTag() == ATypeTag.ANY) {
+            return true;
+        }
+        if (reqType.getTypeTag() != ATypeTag.UNION && inputType.getTypeTag() != ATypeTag.UNION) {
+            if (reqType.equals(inputType)) {
+                return true;
+            } else {
+                return false;
+            }
+        }
+        Set<IAType> reqTypePossible = new HashSet<IAType>();
+        Set<IAType> inputTypePossible = new HashSet<IAType>();
+        if (reqType.getTypeTag() == ATypeTag.UNION) {
+            AUnionType unionType = (AUnionType) reqType;
+            reqTypePossible.addAll(unionType.getUnionList());
+        } else {
+            reqTypePossible.add(reqType);
+        }
+
+        if (inputType.getTypeTag() == ATypeTag.UNION) {
+            AUnionType unionType = (AUnionType) inputType;
+            inputTypePossible.addAll(unionType.getUnionList());
+        } else {
+            inputTypePossible.add(inputType);
+        }
+        return reqTypePossible.equals(inputTypePossible);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/util/EquivalenceClassUtils.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/util/EquivalenceClassUtils.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/util/EquivalenceClassUtils.java
new file mode 100644
index 0000000..6c82c11
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/util/EquivalenceClassUtils.java
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules.util;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang3.mutable.MutableObject;
+import org.mortbay.util.SingletonList;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.EquivalenceClass;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+
+public class EquivalenceClassUtils {
+
+    /**
+     * Adds equivalent classes for primary index accesses, including unnest-map for
+     * primary index access and data source scan through primary index ---
+     * one equivalent class between a primary key variable and a record field-access expression.
+     * 
+     * @param operator
+     *            , the primary index access operator.
+     * @param indexSearchVars
+     *            , the returned variables from primary index access. The last variable
+     *            is the record variable.
+     * @param recordType
+     *            , the record type of an index payload record.
+     * @param dataset
+     *            , the accessed dataset.
+     * @param context
+     *            , the optimization context.
+     * @throws AlgebricksException
+     */
+    @SuppressWarnings("unchecked")
+    public static void addEquivalenceClassesForPrimaryIndexAccess(ILogicalOperator operator,
+            List<LogicalVariable> indexSearchVars, ARecordType recordType, Dataset dataset, IOptimizationContext context)
+            throws AlgebricksException {
+        if (dataset.getDatasetDetails().getDatasetType() != DatasetType.INTERNAL) {
+            return;
+        }
+        InternalDatasetDetails datasetDetails = (InternalDatasetDetails) dataset.getDatasetDetails();
+        List<List<String>> primaryKey = datasetDetails.getPrimaryKey();
+        Map<String, Integer> fieldNameToIndexMap = new HashMap<String, Integer>();
+        String[] fieldNames = recordType.getFieldNames();
+        for (int fieldIndex = 0; fieldIndex < fieldNames.length; ++fieldIndex) {
+            fieldNameToIndexMap.put(fieldNames[fieldIndex], fieldIndex);
+        }
+
+        LogicalVariable recordVar = indexSearchVars.get(indexSearchVars.size() - 1);
+        for (int pkIndex = 0; pkIndex < primaryKey.size(); ++pkIndex) {
+            String pkFieldName = primaryKey.get(pkIndex).get(0);
+            int fieldIndexInRecord = fieldNameToIndexMap.get(pkFieldName);
+            LogicalVariable var = indexSearchVars.get(pkIndex);
+            ILogicalExpression expr = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX),
+                    new MutableObject<ILogicalExpression>(new VariableReferenceExpression(recordVar)),
+                    new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(new AInt32(
+                            fieldIndexInRecord)))));
+            EquivalenceClass equivClass = new EquivalenceClass(SingletonList.newSingletonList(var), var,
+                    SingletonList.newSingletonList(expr));
+            Map<LogicalVariable, EquivalenceClass> equivalenceMap = context.getEquivalenceClassMap(operator);
+            if (equivalenceMap == null) {
+                equivalenceMap = new HashMap<LogicalVariable, EquivalenceClass>();
+                context.putEquivalenceClassMap(operator, equivalenceMap);
+            }
+            equivalenceMap.put(var, equivClass);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractAqlTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractAqlTranslator.java b/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractAqlTranslator.java
new file mode 100644
index 0000000..1c31da0
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractAqlTranslator.java
@@ -0,0 +1,185 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.translator;
+
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.expression.DatasetDecl;
+import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
+import edu.uci.ics.asterix.aql.expression.DeleteStatement;
+import edu.uci.ics.asterix.aql.expression.DropStatement;
+import edu.uci.ics.asterix.aql.expression.InsertStatement;
+import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
+import edu.uci.ics.asterix.common.api.IClusterManagementWork.ClusterState;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
+import edu.uci.ics.asterix.metadata.dataset.hints.DatasetHints;
+import edu.uci.ics.asterix.metadata.entities.AsterixBuiltinTypeMap;
+import edu.uci.ics.asterix.metadata.entities.Dataverse;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+
+/**
+ * Base class for AQL translators. Contains the common validation logic for AQL
+ * statements.
+ */
+public abstract class AbstractAqlTranslator {
+
+    protected static final Logger LOGGER = Logger.getLogger(AbstractAqlTranslator.class.getName());
+
+    protected static final Map<String, BuiltinType> builtinTypeMap = AsterixBuiltinTypeMap.getBuiltinTypes();
+
+    public void validateOperation(Dataverse defaultDataverse, Statement stmt) throws AsterixException {
+
+        if (!(AsterixClusterProperties.INSTANCE.getState().equals(ClusterState.ACTIVE) && AsterixClusterProperties.INSTANCE
+                .isGlobalRecoveryCompleted())) {
+            int maxWaitCycles = AsterixAppContextInfo.getInstance().getExternalProperties().getMaxWaitClusterActive();
+            int waitCycleCount = 0;
+            try {
+                while (!AsterixClusterProperties.INSTANCE.getState().equals(ClusterState.ACTIVE)
+                        && waitCycleCount < maxWaitCycles) {
+                    Thread.sleep(1000);
+                    waitCycleCount++;
+                }
+            } catch (InterruptedException e) {
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("Thread interrupted while waiting for cluster to be "
+                            + ClusterState.ACTIVE);
+                }
+            }
+            if (!AsterixClusterProperties.INSTANCE.getState().equals(ClusterState.ACTIVE)) {
+                throw new AsterixException(" Asterix Cluster is in " + ClusterState.UNUSABLE
+                        + " state." + "\n One or more Node Controllers have left or haven't joined yet.\n");
+            } else {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Cluster is now " + ClusterState.ACTIVE);
+                }
+            }
+        }
+
+        if (AsterixClusterProperties.INSTANCE.getState().equals(ClusterState.UNUSABLE)) {
+            throw new AsterixException(" Asterix Cluster is in " + ClusterState.UNUSABLE + " state."
+                    + "\n One or more Node Controllers have left.\n");
+        }
+
+        if (!AsterixClusterProperties.INSTANCE.isGlobalRecoveryCompleted()) {
+            int maxWaitCycles = AsterixAppContextInfo.getInstance().getExternalProperties().getMaxWaitClusterActive();
+            int waitCycleCount = 0;
+            try {
+                while (!AsterixClusterProperties.INSTANCE.isGlobalRecoveryCompleted() && waitCycleCount < maxWaitCycles) {
+                    Thread.sleep(1000);
+                    waitCycleCount++;
+                }
+            } catch (InterruptedException e) {
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("Thread interrupted while waiting for cluster to complete global recovery ");
+                }
+            }
+            if (!AsterixClusterProperties.INSTANCE.isGlobalRecoveryCompleted()) {
+                throw new AsterixException(" Asterix Cluster Global recovery is not yet complete and The system is in "
+                        + ClusterState.ACTIVE + " state");
+            }
+        }
+
+        boolean invalidOperation = false;
+        String message = null;
+        String dataverse = defaultDataverse != null ? defaultDataverse.getDataverseName() : null;
+        switch (stmt.getKind()) {
+            case INSERT:
+                InsertStatement insertStmt = (InsertStatement) stmt;
+                if (insertStmt.getDataverseName() != null) {
+                    dataverse = insertStmt.getDataverseName().getValue();
+                }
+                invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
+                if (invalidOperation) {
+                    message = "Insert operation is not permitted in dataverse "
+                            + MetadataConstants.METADATA_DATAVERSE_NAME;
+                }
+                break;
+
+            case DELETE:
+                DeleteStatement deleteStmt = (DeleteStatement) stmt;
+                if (deleteStmt.getDataverseName() != null) {
+                    dataverse = deleteStmt.getDataverseName().getValue();
+                }
+                invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
+                if (invalidOperation) {
+                    message = "Delete operation is not permitted in dataverse "
+                            + MetadataConstants.METADATA_DATAVERSE_NAME;
+                }
+                break;
+
+            case NODEGROUP_DROP:
+                String nodegroupName = ((NodeGroupDropStatement) stmt).getNodeGroupName().getValue();
+                invalidOperation = MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME.equals(nodegroupName);
+                if (invalidOperation) {
+                    message = "Cannot drop nodegroup:" + nodegroupName;
+                }
+                break;
+
+            case DATAVERSE_DROP:
+                DataverseDropStatement dvDropStmt = (DataverseDropStatement) stmt;
+                invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dvDropStmt.getDataverseName()
+                        .getValue());
+                if (invalidOperation) {
+                    message = "Cannot drop dataverse:" + dvDropStmt.getDataverseName().getValue();
+                }
+                break;
+
+            case DATASET_DROP:
+                DropStatement dropStmt = (DropStatement) stmt;
+                if (dropStmt.getDataverseName() != null) {
+                    dataverse = dropStmt.getDataverseName().getValue();
+                }
+                invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
+                if (invalidOperation) {
+                    message = "Cannot drop a dataset belonging to the dataverse:"
+                            + MetadataConstants.METADATA_DATAVERSE_NAME;
+                }
+                break;
+            case DATASET_DECL:
+                DatasetDecl datasetStmt = (DatasetDecl) stmt;
+                Map<String, String> hints = datasetStmt.getHints();
+                if (hints != null && !hints.isEmpty()) {
+                    Pair<Boolean, String> validationResult = null;
+                    StringBuffer errorMsgBuffer = new StringBuffer();
+                    for (Entry<String, String> hint : hints.entrySet()) {
+                        validationResult = DatasetHints.validate(hint.getKey(), hint.getValue());
+                        if (!validationResult.first) {
+                            errorMsgBuffer.append("Dataset: " + datasetStmt.getName().getValue()
+                                    + " error in processing hint: " + hint.getKey() + " " + validationResult.second);
+                            errorMsgBuffer.append(" \n");
+                        }
+                    }
+                    invalidOperation = errorMsgBuffer.length() > 0;
+                    if (invalidOperation) {
+                        message = errorMsgBuffer.toString();
+                    }
+                }
+                break;
+
+        }
+
+        if (invalidOperation) {
+            throw new AsterixException("Invalid operation - " + message);
+        }
+    }
+}


[35/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/BTreeSearchPOperator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/BTreeSearchPOperator.java b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/BTreeSearchPOperator.java
new file mode 100644
index 0000000..5a4b52a
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/BTreeSearchPOperator.java
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.algebra.operators.physical;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.optimizer.rules.am.BTreeJobGenParams;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.ListSet;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder.OrderKind;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.BroadcastPartitioningProperty;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.ILocalStructuralProperty;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.LocalOrderProperty;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.OrderColumn;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.UnorderedPartitionedProperty;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+
+/**
+ * Contributes the runtime operator for an unnest-map representing a BTree search.
+ */
+public class BTreeSearchPOperator extends IndexSearchPOperator {
+
+    private final List<LogicalVariable> lowKeyVarList;
+    private final List<LogicalVariable> highKeyVarList;
+    private final boolean isPrimaryIndex;
+    private final boolean isEqCondition;
+    private Object implConfig;
+
+    public BTreeSearchPOperator(IDataSourceIndex<String, AqlSourceId> idx, boolean requiresBroadcast,
+            boolean isPrimaryIndex, boolean isEqCondition, List<LogicalVariable> lowKeyVarList,
+            List<LogicalVariable> highKeyVarList) {
+        super(idx, requiresBroadcast);
+        this.isPrimaryIndex = isPrimaryIndex;
+        this.isEqCondition = isEqCondition;
+        this.lowKeyVarList = lowKeyVarList;
+        this.highKeyVarList = highKeyVarList;
+    }
+
+    public void setImplConfig(Object implConfig) {
+        this.implConfig = implConfig;
+    }
+
+    public Object getImplConfig() {
+        return implConfig;
+    }
+
+    @Override
+    public PhysicalOperatorTag getOperatorTag() {
+        return PhysicalOperatorTag.BTREE_SEARCH;
+    }
+
+    @Override
+    public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
+            IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
+            throws AlgebricksException {
+        UnnestMapOperator unnestMap = (UnnestMapOperator) op;
+        ILogicalExpression unnestExpr = unnestMap.getExpressionRef().getValue();
+        if (unnestExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            throw new IllegalStateException();
+        }
+        AbstractFunctionCallExpression unnestFuncExpr = (AbstractFunctionCallExpression) unnestExpr;
+        FunctionIdentifier funcIdent = unnestFuncExpr.getFunctionIdentifier();
+        if (!funcIdent.equals(AsterixBuiltinFunctions.INDEX_SEARCH)) {
+            return;
+        }
+        BTreeJobGenParams jobGenParams = new BTreeJobGenParams();
+        jobGenParams.readFromFuncArgs(unnestFuncExpr.getArguments());
+        int[] lowKeyIndexes = getKeyIndexes(jobGenParams.getLowKeyVarList(), inputSchemas);
+        int[] highKeyIndexes = getKeyIndexes(jobGenParams.getHighKeyVarList(), inputSchemas);
+
+        int[] minFilterFieldIndexes = getKeyIndexes(unnestMap.getMinFilterVars(), inputSchemas);
+        int[] maxFilterFieldIndexes = getKeyIndexes(unnestMap.getMaxFilterVars(), inputSchemas);
+
+        AqlMetadataProvider metadataProvider = (AqlMetadataProvider) context.getMetadataProvider();
+        Dataset dataset = metadataProvider.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
+        IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op);
+        List<LogicalVariable> outputVars = unnestMap.getVariables();
+        if (jobGenParams.getRetainInput()) {
+            outputVars = new ArrayList<LogicalVariable>();
+            VariableUtilities.getLiveVariables(unnestMap, outputVars);
+        }
+        Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> btreeSearch = metadataProvider.buildBtreeRuntime(
+                builder.getJobSpec(), outputVars, opSchema, typeEnv, context, jobGenParams.getRetainInput(),
+                jobGenParams.getRetainNull(), dataset, jobGenParams.getIndexName(), lowKeyIndexes, highKeyIndexes,
+                jobGenParams.isLowKeyInclusive(), jobGenParams.isHighKeyInclusive(), implConfig, minFilterFieldIndexes,
+                maxFilterFieldIndexes);
+
+        builder.contributeHyracksOperator(unnestMap, btreeSearch.first);
+        builder.contributeAlgebricksPartitionConstraint(btreeSearch.first, btreeSearch.second);
+
+        ILogicalOperator srcExchange = unnestMap.getInputs().get(0).getValue();
+        builder.contributeGraphEdge(srcExchange, 0, unnestMap, 0);
+    }
+
+    @Override
+    public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
+            IPhysicalPropertiesVector reqdByParent) {
+        if (requiresBroadcast) {
+            // For primary indexes optimizing an equality condition we can reduce the broadcast requirement to hash partitioning.
+            if (isPrimaryIndex && isEqCondition) {
+                StructuralPropertiesVector[] pv = new StructuralPropertiesVector[1];
+                ListSet<LogicalVariable> searchKeyVars = new ListSet<LogicalVariable>();
+                searchKeyVars.addAll(lowKeyVarList);
+                searchKeyVars.addAll(highKeyVarList);
+                // Also, add a local sorting property to enforce a sort before the primary-index operator.
+                List<ILocalStructuralProperty> propsLocal = new ArrayList<ILocalStructuralProperty>();
+                List<OrderColumn> orderColumns = new ArrayList<OrderColumn>();
+                for (LogicalVariable orderVar : searchKeyVars) {
+                    orderColumns.add(new OrderColumn(orderVar, OrderKind.ASC));
+                }
+                propsLocal.add(new LocalOrderProperty(orderColumns));
+                pv[0] = new StructuralPropertiesVector(new UnorderedPartitionedProperty(searchKeyVars, null),
+                        propsLocal);
+                return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
+            } else {
+                StructuralPropertiesVector[] pv = new StructuralPropertiesVector[1];
+                pv[0] = new StructuralPropertiesVector(new BroadcastPartitioningProperty(null), null);
+                return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
+            }
+        } else {
+            return super.getRequiredPropertiesForChildren(op, reqdByParent);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitPOperator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitPOperator.java b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitPOperator.java
new file mode 100644
index 0000000..6722cbe
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitPOperator.java
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.algebra.operators.physical;
+
+import java.util.List;
+
+import edu.uci.ics.asterix.common.transactions.JobId;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.AbstractPhysicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenHelper;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+
+public class CommitPOperator extends AbstractPhysicalOperator {
+
+    private final List<LogicalVariable> primaryKeyLogicalVars;
+    private final JobId jobId;
+    private final int datasetId;
+
+    public CommitPOperator(JobId jobId, int datasetId, List<LogicalVariable> primaryKeyLogicalVars) {
+        this.jobId = jobId;
+        this.datasetId = datasetId;
+        this.primaryKeyLogicalVars = primaryKeyLogicalVars;
+    }
+
+    @Override
+    public PhysicalOperatorTag getOperatorTag() {
+        return PhysicalOperatorTag.EXTENSION_OPERATOR;
+    }
+
+    @Override
+    public String toString() {
+        return "COMMIT";
+    }
+
+    @Override
+    public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
+            IPhysicalPropertiesVector reqdByParent) {
+        return emptyUnaryRequirements();
+    }
+
+    @Override
+    public void computeDeliveredProperties(ILogicalOperator op, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op2 = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
+        deliveredProperties = op2.getDeliveredPhysicalProperties().clone();
+    }
+
+    @Override
+    public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
+            IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
+            throws AlgebricksException {
+
+        RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema,
+                context);
+        int[] primaryKeyFields = JobGenHelper.variablesToFieldIndexes(primaryKeyLogicalVars, inputSchemas[0]);
+
+        AqlMetadataProvider metadataProvider = (AqlMetadataProvider) context.getMetadataProvider();
+        CommitRuntimeFactory runtime = new CommitRuntimeFactory(jobId, datasetId, primaryKeyFields,
+                metadataProvider.isTemporaryDatasetWriteJob(), metadataProvider.isWriteTransaction());
+        builder.contributeMicroOperator(op, runtime, recDesc);
+        ILogicalOperator src = op.getInputs().get(0).getValue();
+        builder.contributeGraphEdge(src, 0, op, 0);
+    }
+
+    @Override
+    public boolean isMicroOperator() {
+        return true;
+    }
+
+    @Override
+    public boolean expensiveThanMaterialization() {
+        return false;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitRuntime.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitRuntime.java b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitRuntime.java
new file mode 100644
index 0000000..69c8f78
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitRuntime.java
@@ -0,0 +1,139 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.algebra.operators.physical;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.asterix.common.api.IAsterixAppRuntimeContext;
+import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.transactions.ILogManager;
+import edu.uci.ics.asterix.common.transactions.ITransactionContext;
+import edu.uci.ics.asterix.common.transactions.ITransactionManager;
+import edu.uci.ics.asterix.common.transactions.JobId;
+import edu.uci.ics.asterix.common.transactions.LogRecord;
+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntime;
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.MurmurHash128Bit;
+
+public class CommitRuntime implements IPushRuntime {
+
+    private final static long SEED = 0L;
+
+    private final IHyracksTaskContext hyracksTaskCtx;
+    private final ITransactionManager transactionManager;
+    private final ILogManager logMgr;
+    private final JobId jobId;
+    private final int datasetId;
+    private final int[] primaryKeyFields;
+    private final boolean isTemporaryDatasetWriteJob;
+    private final boolean isWriteTransaction;
+    private final long[] longHashes;
+    private final LogRecord logRecord;
+
+    private ITransactionContext transactionContext;
+    private FrameTupleAccessor frameTupleAccessor;
+    private final FrameTupleReference frameTupleReference;
+
+    public CommitRuntime(IHyracksTaskContext ctx, JobId jobId, int datasetId, int[] primaryKeyFields,
+            boolean isTemporaryDatasetWriteJob, boolean isWriteTransaction) {
+        this.hyracksTaskCtx = ctx;
+        IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
+                .getApplicationContext().getApplicationObject();
+        this.transactionManager = runtimeCtx.getTransactionSubsystem().getTransactionManager();
+        this.logMgr = runtimeCtx.getTransactionSubsystem().getLogManager();
+        this.jobId = jobId;
+        this.datasetId = datasetId;
+        this.primaryKeyFields = primaryKeyFields;
+        this.frameTupleReference = new FrameTupleReference();
+        this.isTemporaryDatasetWriteJob = isTemporaryDatasetWriteJob;
+        this.isWriteTransaction = isWriteTransaction;
+        this.longHashes = new long[2];
+        this.logRecord = new LogRecord();
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        try {
+            transactionContext = transactionManager.getTransactionContext(jobId, false);
+            transactionContext.setWriteTxn(isWriteTransaction);
+        } catch (ACIDException e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        int pkHash = 0;
+        frameTupleAccessor.reset(buffer);
+        int nTuple = frameTupleAccessor.getTupleCount();
+        for (int t = 0; t < nTuple; t++) {
+            if (isTemporaryDatasetWriteJob) {
+                /**
+                 * This "if branch" is for writes over temporary datasets.
+                 * A temporary dataset does not require any lock and does not generate any write-ahead
+                 * update and commit log but generates flush log and job commit log.
+                 * However, a temporary dataset still MUST guarantee no-steal policy so that this
+                 * notification call should be delivered to PrimaryIndexOptracker and used correctly in order
+                 * to decrement number of active operation count of PrimaryIndexOptracker.
+                 * By maintaining the count correctly and only allowing flushing when the count is 0, it can
+                 * guarantee the no-steal policy for temporary datasets, too.
+                 */
+                transactionContext.notifyOptracker(false);
+            } else {
+                frameTupleReference.reset(frameTupleAccessor, t);
+                pkHash = computePrimaryKeyHashValue(frameTupleReference, primaryKeyFields);
+                logRecord.formEntityCommitLogRecord(transactionContext, datasetId, pkHash, frameTupleReference,
+                        primaryKeyFields);
+                try {
+                    logMgr.log(logRecord);
+                } catch (ACIDException e) {
+                    throw new HyracksDataException(e);
+                }
+            }
+        }
+    }
+
+    private int computePrimaryKeyHashValue(ITupleReference tuple, int[] primaryKeyFields) {
+        MurmurHash128Bit.hash3_x64_128(tuple, primaryKeyFields, SEED, longHashes);
+        return Math.abs((int) longHashes[0]);
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+
+    }
+
+    @Override
+    public void setFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
+        throw new IllegalStateException();
+    }
+
+    @Override
+    public void setInputRecordDescriptor(int index, RecordDescriptor recordDescriptor) {
+        this.frameTupleAccessor = new FrameTupleAccessor(recordDescriptor);
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitRuntimeFactory.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitRuntimeFactory.java b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitRuntimeFactory.java
new file mode 100644
index 0000000..768cdb6
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitRuntimeFactory.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.algebra.operators.physical;
+
+import edu.uci.ics.asterix.common.transactions.JobId;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntime;
+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+
+public class CommitRuntimeFactory implements IPushRuntimeFactory {
+
+    private static final long serialVersionUID = 1L;
+
+    private final JobId jobId;
+    private final int datasetId;
+    private final int[] primaryKeyFields;
+    private final boolean isTemporaryDatasetWriteJob;
+    private final boolean isWriteTransaction;
+
+    public CommitRuntimeFactory(JobId jobId, int datasetId, int[] primaryKeyFields, boolean isTemporaryDatasetWriteJob,
+            boolean isWriteTransaction) {
+        this.jobId = jobId;
+        this.datasetId = datasetId;
+        this.primaryKeyFields = primaryKeyFields;
+        this.isTemporaryDatasetWriteJob = isTemporaryDatasetWriteJob;
+        this.isWriteTransaction = isWriteTransaction;
+    }
+
+    @Override
+    public String toString() {
+        return "commit";
+    }
+
+    @Override
+    public IPushRuntime createPushRuntime(IHyracksTaskContext ctx) throws AlgebricksException {
+        return new CommitRuntime(ctx, jobId, datasetId, primaryKeyFields, isTemporaryDatasetWriteJob,
+                isWriteTransaction);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/ExternalDataLookupPOperator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/ExternalDataLookupPOperator.java b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/ExternalDataLookupPOperator.java
new file mode 100644
index 0000000..dcbc70c
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/ExternalDataLookupPOperator.java
@@ -0,0 +1,180 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.algebra.operators.physical;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.asterix.external.indexing.dataflow.HDFSLookupAdapterFactory;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
+import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourcePropertiesProvider;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExternalDataLookupOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.AbstractScanPOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.BroadcastPartitioningProperty;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+
+public class ExternalDataLookupPOperator extends AbstractScanPOperator {
+
+    private final List<LogicalVariable> ridVarList;
+    private AqlSourceId datasetId;
+    private Dataset dataset;
+    private ARecordType recordType;
+    private Index secondaryIndex;
+    private boolean requiresBroadcast;
+    private boolean retainInput;
+    private boolean retainNull;
+
+    public ExternalDataLookupPOperator(AqlSourceId datasetId, Dataset dataset, ARecordType recordType,
+            Index secondaryIndex, List<LogicalVariable> ridVarList, boolean requiresBroadcast, boolean retainInput, boolean retainNull) {
+        this.datasetId = datasetId;
+        this.dataset = dataset;
+        this.recordType = recordType;
+        this.secondaryIndex = secondaryIndex;
+        this.ridVarList = ridVarList;
+        this.requiresBroadcast = requiresBroadcast;
+        this.retainInput = retainInput;
+        this.retainNull = retainNull;
+    }
+
+    public Dataset getDataset() {
+        return dataset;
+    }
+
+    public void setDataset(Dataset dataset) {
+        this.dataset = dataset;
+    }
+
+    public ARecordType getRecordType() {
+        return recordType;
+    }
+
+    public void setRecordType(ARecordType recordType) {
+        this.recordType = recordType;
+    }
+
+    public AqlSourceId getDatasetId() {
+        return datasetId;
+    }
+
+    public void setDatasetId(AqlSourceId datasetId) {
+        this.datasetId = datasetId;
+    }
+
+    @Override
+    public PhysicalOperatorTag getOperatorTag() {
+        return PhysicalOperatorTag.EXTERNAL_LOOKUP;
+    }
+
+    @Override
+    public void computeDeliveredProperties(ILogicalOperator op, IOptimizationContext context)
+            throws AlgebricksException {
+        AqlDataSource ds = new DatasetDataSource(datasetId, datasetId.getDataverseName(), datasetId.getDatasourceName(),
+                recordType, AqlDataSourceType.EXTERNAL_DATASET);
+        IDataSourcePropertiesProvider dspp = ds.getPropertiesProvider();
+        AbstractScanOperator as = (AbstractScanOperator) op;
+        deliveredProperties = dspp.computePropertiesVector(as.getVariables());
+    }
+
+    @Override
+    public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
+            IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
+            throws AlgebricksException {
+        ExternalDataLookupOperator edabro = (ExternalDataLookupOperator) op;
+        ILogicalExpression expr = edabro.getExpressionRef().getValue();
+        if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            throw new IllegalStateException();
+        }
+        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+        FunctionIdentifier funcIdent = funcExpr.getFunctionIdentifier();
+        if (!funcIdent.equals(AsterixBuiltinFunctions.EXTERNAL_LOOKUP)) {
+            return;
+        }
+        int[] ridIndexes = getKeyIndexes(ridVarList, inputSchemas);
+        IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op);
+        List<LogicalVariable> outputVars = new ArrayList<LogicalVariable>();
+        if (retainInput) {
+            VariableUtilities.getLiveVariables(edabro, outputVars);
+        } else {
+            VariableUtilities.getProducedVariables(edabro, outputVars);
+        }
+
+        AqlMetadataProvider metadataProvider = (AqlMetadataProvider) context.getMetadataProvider();
+        Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> externalLoopup = HDFSLookupAdapterFactory
+                .buildExternalDataLookupRuntime(builder.getJobSpec(), dataset, secondaryIndex, ridIndexes, retainInput,
+                        typeEnv, outputVars, opSchema, context, metadataProvider, retainNull);
+        builder.contributeHyracksOperator(edabro, externalLoopup.first);
+        builder.contributeAlgebricksPartitionConstraint(externalLoopup.first, externalLoopup.second);
+        ILogicalOperator srcExchange = edabro.getInputs().get(0).getValue();
+        builder.contributeGraphEdge(srcExchange, 0, edabro, 0);
+    }
+
+    protected int[] getKeyIndexes(List<LogicalVariable> keyVarList, IOperatorSchema[] inputSchemas) {
+        if (keyVarList == null) {
+            return null;
+        }
+        int[] keyIndexes = new int[keyVarList.size()];
+        for (int i = 0; i < keyVarList.size(); i++) {
+            keyIndexes[i] = inputSchemas[0].findVariable(keyVarList.get(i));
+        }
+        return keyIndexes;
+    }
+
+    public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
+            IPhysicalPropertiesVector reqdByParent) {
+        if (requiresBroadcast) {
+            StructuralPropertiesVector[] pv = new StructuralPropertiesVector[1];
+            pv[0] = new StructuralPropertiesVector(new BroadcastPartitioningProperty(null), null);
+            return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
+
+        } else {
+            return super.getRequiredPropertiesForChildren(op, reqdByParent);
+        }
+    }
+
+    @Override
+    public boolean isMicroOperator() {
+        return false;
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/IndexSearchPOperator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/IndexSearchPOperator.java b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/IndexSearchPOperator.java
new file mode 100644
index 0000000..1793407
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/IndexSearchPOperator.java
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.algebra.operators.physical;
+
+import java.util.List;
+
+import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSource;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourcePropertiesProvider;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.AbstractScanPOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.BroadcastPartitioningProperty;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;
+
+/**
+ * Class that embodies the commonalities between access method physical operators.
+ */
+public abstract class IndexSearchPOperator extends AbstractScanPOperator {
+
+    protected final IDataSourceIndex<String, AqlSourceId> idx;
+    protected final boolean requiresBroadcast;
+
+    public IndexSearchPOperator(IDataSourceIndex<String, AqlSourceId> idx, boolean requiresBroadcast) {
+        this.idx = idx;
+        this.requiresBroadcast = requiresBroadcast;
+    }
+
+    @Override
+    public boolean isMicroOperator() {
+        return false;
+    }
+
+    @Override
+    public void computeDeliveredProperties(ILogicalOperator op, IOptimizationContext context) {
+        IDataSource<?> ds = idx.getDataSource();
+        IDataSourcePropertiesProvider dspp = ds.getPropertiesProvider();
+        AbstractScanOperator as = (AbstractScanOperator) op;
+        deliveredProperties = dspp.computePropertiesVector(as.getVariables());
+    }
+
+    protected int[] getKeyIndexes(List<LogicalVariable> keyVarList, IOperatorSchema[] inputSchemas) {
+        if (keyVarList == null) {
+            return null;
+        }
+        int[] keyIndexes = new int[keyVarList.size()];
+        for (int i = 0; i < keyVarList.size(); i++) {
+            keyIndexes[i] = inputSchemas[0].findVariable(keyVarList.get(i));
+        }
+        return keyIndexes;
+    }
+
+    public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
+            IPhysicalPropertiesVector reqdByParent) {
+        if (requiresBroadcast) {
+            StructuralPropertiesVector[] pv = new StructuralPropertiesVector[1];
+            pv[0] = new StructuralPropertiesVector(new BroadcastPartitioningProperty(null), null);
+            return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
+        } else {
+            return super.getRequiredPropertiesForChildren(op, reqdByParent);
+        }
+    }
+
+    @Override
+    public boolean expensiveThanMaterialization() {
+        return true;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
new file mode 100644
index 0000000..12b2723
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
@@ -0,0 +1,288 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.algebra.operators.physical;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
+import edu.uci.ics.asterix.common.context.AsterixVirtualBufferCacheProvider;
+import edu.uci.ics.asterix.common.dataflow.IAsterixApplicationContextInfo;
+import edu.uci.ics.asterix.common.ioopcallbacks.LSMInvertedIndexIOOperationCallbackFactory;
+import edu.uci.ics.asterix.metadata.MetadataException;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
+import edu.uci.ics.asterix.om.base.IAObject;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
+import edu.uci.ics.asterix.optimizer.rules.am.InvertedIndexAccessMethod;
+import edu.uci.ics.asterix.optimizer.rules.am.InvertedIndexAccessMethod.SearchModifierType;
+import edu.uci.ics.asterix.optimizer.rules.am.InvertedIndexJobGenParams;
+import edu.uci.ics.asterix.transaction.management.opcallbacks.SecondaryIndexOperationTrackerProvider;
+import edu.uci.ics.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IAlgebricksConstantValue;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.ShortPointable;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifierFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.PartitionedLSMInvertedIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
+
+/**
+ * Contributes the runtime operator for an unnest-map representing an
+ * inverted-index search.
+ */
+public class InvertedIndexPOperator extends IndexSearchPOperator {
+    private final boolean isPartitioned;
+
+    public InvertedIndexPOperator(IDataSourceIndex<String, AqlSourceId> idx, boolean requiresBroadcast,
+            boolean isPartitioned) {
+        super(idx, requiresBroadcast);
+        this.isPartitioned = isPartitioned;
+    }
+
+    @Override
+    public PhysicalOperatorTag getOperatorTag() {
+        if (isPartitioned) {
+            return PhysicalOperatorTag.LENGTH_PARTITIONED_INVERTED_INDEX_SEARCH;
+        } else {
+            return PhysicalOperatorTag.SINGLE_PARTITION_INVERTED_INDEX_SEARCH;
+        }
+    }
+
+    @Override
+    public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
+            IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
+            throws AlgebricksException {
+        UnnestMapOperator unnestMapOp = (UnnestMapOperator) op;
+        ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
+        if (unnestExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            throw new IllegalStateException();
+        }
+        AbstractFunctionCallExpression unnestFuncExpr = (AbstractFunctionCallExpression) unnestExpr;
+        if (unnestFuncExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.INDEX_SEARCH) {
+            return;
+        }
+        InvertedIndexJobGenParams jobGenParams = new InvertedIndexJobGenParams();
+        jobGenParams.readFromFuncArgs(unnestFuncExpr.getArguments());
+
+        AqlMetadataProvider metadataProvider = (AqlMetadataProvider) context.getMetadataProvider();
+        Dataset dataset;
+        try {
+            dataset = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(),
+                    jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
+        } catch (MetadataException e) {
+            throw new AlgebricksException(e);
+        }
+        int[] keyIndexes = getKeyIndexes(jobGenParams.getKeyVarList(), inputSchemas);
+
+        int[] minFilterFieldIndexes = getKeyIndexes(unnestMapOp.getMinFilterVars(), inputSchemas);
+        int[] maxFilterFieldIndexes = getKeyIndexes(unnestMapOp.getMaxFilterVars(), inputSchemas);
+        // Build runtime.
+        Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> invIndexSearch = buildInvertedIndexRuntime(
+                metadataProvider, context, builder.getJobSpec(), unnestMapOp, opSchema, jobGenParams.getRetainInput(),
+                jobGenParams.getRetainNull(), jobGenParams.getDatasetName(), dataset, jobGenParams.getIndexName(),
+                jobGenParams.getSearchKeyType(), keyIndexes, jobGenParams.getSearchModifierType(),
+                jobGenParams.getSimilarityThreshold(), minFilterFieldIndexes, maxFilterFieldIndexes);
+
+        // Contribute operator in hyracks job.
+        builder.contributeHyracksOperator(unnestMapOp, invIndexSearch.first);
+        builder.contributeAlgebricksPartitionConstraint(invIndexSearch.first, invIndexSearch.second);
+        ILogicalOperator srcExchange = unnestMapOp.getInputs().get(0).getValue();
+        builder.contributeGraphEdge(srcExchange, 0, unnestMapOp, 0);
+    }
+
+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildInvertedIndexRuntime(
+            AqlMetadataProvider metadataProvider, JobGenContext context, JobSpecification jobSpec,
+            UnnestMapOperator unnestMap, IOperatorSchema opSchema, boolean retainInput, boolean retainNull,
+            String datasetName, Dataset dataset, String indexName, ATypeTag searchKeyType, int[] keyFields,
+            SearchModifierType searchModifierType, IAlgebricksConstantValue similarityThreshold,
+            int[] minFilterFieldIndexes, int[] maxFilterFieldIndexes) throws AlgebricksException {
+
+        try {
+            IAObject simThresh = ((AsterixConstantValue) similarityThreshold).getObject();
+            IAType itemType = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(),
+                    dataset.getDataverseName(), dataset.getItemTypeName()).getDatatype();
+            int numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
+            Index secondaryIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(),
+                    dataset.getDataverseName(), dataset.getDatasetName(), indexName);
+            if (secondaryIndex == null) {
+                throw new AlgebricksException("Code generation error: no index " + indexName + " for dataset "
+                        + datasetName);
+            }
+            List<List<String>> secondaryKeyFieldEntries = secondaryIndex.getKeyFieldNames();
+            List<IAType> secondaryKeyTypeEntries = secondaryIndex.getKeyFieldTypes();
+            int numSecondaryKeys = secondaryKeyFieldEntries.size();
+            if (numSecondaryKeys != 1) {
+                throw new AlgebricksException(
+                        "Cannot use "
+                                + numSecondaryKeys
+                                + " fields as a key for an inverted index. There can be only one field as a key for the inverted index index.");
+            }
+            if (itemType.getTypeTag() != ATypeTag.RECORD) {
+                throw new AlgebricksException("Only record types can be indexed.");
+            }
+            ARecordType recordType = (ARecordType) itemType;
+            Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(secondaryKeyTypeEntries.get(0),
+                    secondaryKeyFieldEntries.get(0), recordType);
+            IAType secondaryKeyType = keyPairType.first;
+            if (secondaryKeyType == null) {
+                throw new AlgebricksException("Could not find field " + secondaryKeyFieldEntries.get(0)
+                        + " in the schema.");
+            }
+
+            // TODO: For now we assume the type of the generated tokens is the
+            // same as the indexed field.
+            // We need a better way of expressing this because tokens may be
+            // hashed, or an inverted-index may index a list type, etc.
+            int numTokenKeys = (!isPartitioned) ? numSecondaryKeys : numSecondaryKeys + 1;
+            ITypeTraits[] tokenTypeTraits = new ITypeTraits[numTokenKeys];
+            IBinaryComparatorFactory[] tokenComparatorFactories = new IBinaryComparatorFactory[numTokenKeys];
+            for (int i = 0; i < numSecondaryKeys; i++) {
+                tokenComparatorFactories[i] = NonTaggedFormatUtil.getTokenBinaryComparatorFactory(secondaryKeyType);
+                tokenTypeTraits[i] = NonTaggedFormatUtil.getTokenTypeTrait(secondaryKeyType);
+            }
+            if (isPartitioned) {
+                // The partitioning field is hardcoded to be a short *without* an Asterix type tag.
+                tokenComparatorFactories[numSecondaryKeys] = PointableBinaryComparatorFactory
+                        .of(ShortPointable.FACTORY);
+                tokenTypeTraits[numSecondaryKeys] = ShortPointable.TYPE_TRAITS;
+            }
+
+            IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(unnestMap);
+            List<LogicalVariable> outputVars = unnestMap.getVariables();
+            if (retainInput) {
+                outputVars = new ArrayList<LogicalVariable>();
+                VariableUtilities.getLiveVariables(unnestMap, outputVars);
+            }
+            RecordDescriptor outputRecDesc = JobGenHelper.mkRecordDescriptor(typeEnv, opSchema, context);
+
+            int start = outputRecDesc.getFieldCount() - numPrimaryKeys;
+            IBinaryComparatorFactory[] invListsComparatorFactories = JobGenHelper
+                    .variablesToAscBinaryComparatorFactories(outputVars, start, numPrimaryKeys, typeEnv, context);
+            ITypeTraits[] invListsTypeTraits = JobGenHelper.variablesToTypeTraits(outputVars, start, numPrimaryKeys,
+                    typeEnv, context);
+
+            ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, recordType);
+            IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(
+                    dataset, recordType, context.getBinaryComparatorFactoryProvider());
+
+            int[] filterFields = null;
+            int[] invertedIndexFields = null;
+            int[] filterFieldsForNonBulkLoadOps = null;
+            int[] invertedIndexFieldsForNonBulkLoadOps = null;
+            if (filterTypeTraits != null) {
+                filterFields = new int[1];
+                filterFields[0] = numTokenKeys + numPrimaryKeys;
+                invertedIndexFields = new int[numTokenKeys + numPrimaryKeys];
+                for (int k = 0; k < invertedIndexFields.length; k++) {
+                    invertedIndexFields[k] = k;
+                }
+
+                filterFieldsForNonBulkLoadOps = new int[1];
+                filterFieldsForNonBulkLoadOps[0] = numPrimaryKeys + numSecondaryKeys;
+                invertedIndexFieldsForNonBulkLoadOps = new int[numPrimaryKeys + numSecondaryKeys];
+                for (int k = 0; k < invertedIndexFieldsForNonBulkLoadOps.length; k++) {
+                    invertedIndexFieldsForNonBulkLoadOps[k] = k;
+                }
+            }
+
+            IAsterixApplicationContextInfo appContext = (IAsterixApplicationContextInfo) context.getAppContext();
+            Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider
+                    .splitProviderAndPartitionConstraintsForDataset(dataset.getDataverseName(), datasetName, indexName,
+                            dataset.getDatasetDetails().isTemp());
+            // TODO: Here we assume there is only one search key field.
+            int queryField = keyFields[0];
+            // Get tokenizer and search modifier factories.
+            IInvertedIndexSearchModifierFactory searchModifierFactory = InvertedIndexAccessMethod
+                    .getSearchModifierFactory(searchModifierType, simThresh, secondaryIndex);
+            IBinaryTokenizerFactory queryTokenizerFactory = InvertedIndexAccessMethod.getBinaryTokenizerFactory(
+                    searchModifierType, searchKeyType, secondaryIndex);
+            IIndexDataflowHelperFactory dataflowHelperFactory;
+
+            AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
+            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(
+                    dataset, metadataProvider.getMetadataTxnContext());
+            boolean temp = dataset.getDatasetDetails().isTemp();
+            if (!isPartitioned) {
+                dataflowHelperFactory = new LSMInvertedIndexDataflowHelperFactory(
+                        new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()), compactionInfo.first,
+                        compactionInfo.second, new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                        LSMInvertedIndexIOOperationCallbackFactory.INSTANCE,
+                        storageProperties.getBloomFilterFalsePositiveRate(), invertedIndexFields, filterTypeTraits,
+                        filterCmpFactories, filterFields, filterFieldsForNonBulkLoadOps,
+                        invertedIndexFieldsForNonBulkLoadOps, !temp);
+            } else {
+                dataflowHelperFactory = new PartitionedLSMInvertedIndexDataflowHelperFactory(
+                        new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()), compactionInfo.first,
+                        compactionInfo.second, new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                        LSMInvertedIndexIOOperationCallbackFactory.INSTANCE,
+                        storageProperties.getBloomFilterFalsePositiveRate(), invertedIndexFields, filterTypeTraits,
+                        filterCmpFactories, filterFields, filterFieldsForNonBulkLoadOps,
+                        invertedIndexFieldsForNonBulkLoadOps, !temp);
+            }
+            LSMInvertedIndexSearchOperatorDescriptor invIndexSearchOp = new LSMInvertedIndexSearchOperatorDescriptor(
+                    jobSpec, queryField, appContext.getStorageManagerInterface(), secondarySplitsAndConstraint.first,
+                    appContext.getIndexLifecycleManagerProvider(), tokenTypeTraits, tokenComparatorFactories,
+                    invListsTypeTraits, invListsComparatorFactories, dataflowHelperFactory, queryTokenizerFactory,
+                    searchModifierFactory, outputRecDesc, retainInput, retainNull, context.getNullWriterFactory(),
+                    NoOpOperationCallbackFactory.INSTANCE, minFilterFieldIndexes, maxFilterFieldIndexes);
+
+            return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(invIndexSearchOp,
+                    secondarySplitsAndConstraint.second);
+        } catch (MetadataException e) {
+            throw new AlgebricksException(e);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/RTreeSearchPOperator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/RTreeSearchPOperator.java b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/RTreeSearchPOperator.java
new file mode 100644
index 0000000..33f906c
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/RTreeSearchPOperator.java
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.algebra.operators.physical;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.optimizer.rules.am.RTreeJobGenParams;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+
+/**
+ * Contributes the runtime operator for an unnest-map representing a RTree
+ * search.
+ */
+public class RTreeSearchPOperator extends IndexSearchPOperator {
+
+    public RTreeSearchPOperator(IDataSourceIndex<String, AqlSourceId> idx, boolean requiresBroadcast) {
+        super(idx, requiresBroadcast);
+    }
+
+    @Override
+    public PhysicalOperatorTag getOperatorTag() {
+        return PhysicalOperatorTag.RTREE_SEARCH;
+    }
+
+    @Override
+    public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
+            IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
+            throws AlgebricksException {
+        UnnestMapOperator unnestMap = (UnnestMapOperator) op;
+        ILogicalExpression unnestExpr = unnestMap.getExpressionRef().getValue();
+        if (unnestExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            throw new IllegalStateException();
+        }
+        AbstractFunctionCallExpression unnestFuncExpr = (AbstractFunctionCallExpression) unnestExpr;
+        FunctionIdentifier funcIdent = unnestFuncExpr.getFunctionIdentifier();
+        if (!funcIdent.equals(AsterixBuiltinFunctions.INDEX_SEARCH)) {
+            return;
+        }
+        RTreeJobGenParams jobGenParams = new RTreeJobGenParams();
+        jobGenParams.readFromFuncArgs(unnestFuncExpr.getArguments());
+        int[] keyIndexes = getKeyIndexes(jobGenParams.getKeyVarList(), inputSchemas);
+
+        int[] minFilterFieldIndexes = getKeyIndexes(unnestMap.getMinFilterVars(), inputSchemas);
+        int[] maxFilterFieldIndexes = getKeyIndexes(unnestMap.getMaxFilterVars(), inputSchemas);
+
+        AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
+        Dataset dataset = mp.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
+        IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(unnestMap);
+        List<LogicalVariable> outputVars = unnestMap.getVariables();
+        if (jobGenParams.getRetainInput()) {
+            outputVars = new ArrayList<LogicalVariable>();
+            VariableUtilities.getLiveVariables(unnestMap, outputVars);
+        }
+        Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> rtreeSearch = mp.buildRtreeRuntime(
+                builder.getJobSpec(), outputVars, opSchema, typeEnv, context, jobGenParams.getRetainInput(),
+                jobGenParams.getRetainNull(), dataset, jobGenParams.getIndexName(), keyIndexes, minFilterFieldIndexes,
+                maxFilterFieldIndexes);
+
+        builder.contributeHyracksOperator(unnestMap, rtreeSearch.first);
+        builder.contributeAlgebricksPartitionConstraint(rtreeSearch.first, rtreeSearch.second);
+        ILogicalOperator srcExchange = unnestMap.getInputs().get(0).getValue();
+        builder.contributeGraphEdge(srcExchange, 0, unnestMap, 0);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/jobgen/AqlLogicalExpressionJobGen.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/jobgen/AqlLogicalExpressionJobGen.java b/asterix-algebra/src/main/java/org/apache/asterix/jobgen/AqlLogicalExpressionJobGen.java
new file mode 100644
index 0000000..99e8f25
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/jobgen/AqlLogicalExpressionJobGen.java
@@ -0,0 +1,217 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.jobgen;
+
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.functions.FunctionDescriptorTag;
+import edu.uci.ics.asterix.external.library.ExternalFunctionDescriptorProvider;
+import edu.uci.ics.asterix.formats.base.IDataFormat;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.IExternalFunctionInfo;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.comparisons.ComparisonEvalFactory;
+import edu.uci.ics.asterix.runtime.formats.FormatUtils;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ILogicalExpressionJobGen;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.StatefulFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions.ComparisonKind;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyAggregateFunctionFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyRunningAggregateFunctionFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopySerializableAggregateFunctionFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyUnnestingFunctionFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.evaluators.ColumnAccessEvalFactory;
+
+public class AqlLogicalExpressionJobGen implements ILogicalExpressionJobGen {
+
+    public static final AqlLogicalExpressionJobGen INSTANCE = new AqlLogicalExpressionJobGen();
+
+    private AqlLogicalExpressionJobGen() {
+    }
+
+    @Override
+    public ICopyAggregateFunctionFactory createAggregateFunctionFactory(AggregateFunctionCallExpression expr,
+            IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)
+            throws AlgebricksException {
+        ICopyEvaluatorFactory[] args = codegenArguments(expr, env, inputSchemas, context);
+        IFunctionDescriptor fd = getFunctionDescriptor(expr, env, context);
+        switch (fd.getFunctionDescriptorTag()) {
+            case SERIALAGGREGATE:
+                return null;
+            case AGGREGATE:
+                return fd.createAggregateFunctionFactory(args);
+            default:
+                throw new IllegalStateException("Invalid function descriptor " + fd.getFunctionDescriptorTag()
+                        + " expected " + FunctionDescriptorTag.SERIALAGGREGATE + " or "
+                        + FunctionDescriptorTag.AGGREGATE);
+        }
+    }
+
+    @Override
+    public ICopyRunningAggregateFunctionFactory createRunningAggregateFunctionFactory(
+            StatefulFunctionCallExpression expr, IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas,
+            JobGenContext context) throws AlgebricksException {
+        ICopyEvaluatorFactory[] args = codegenArguments(expr, env, inputSchemas, context);
+        return getFunctionDescriptor(expr, env, context).createRunningAggregateFunctionFactory(args);
+    }
+
+    @Override
+    public ICopyUnnestingFunctionFactory createUnnestingFunctionFactory(UnnestingFunctionCallExpression expr,
+            IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)
+            throws AlgebricksException {
+        ICopyEvaluatorFactory[] args = codegenArguments(expr, env, inputSchemas, context);
+        return getFunctionDescriptor(expr, env, context).createUnnestingFunctionFactory(args);
+    }
+
+    @Override
+    public ICopyEvaluatorFactory createEvaluatorFactory(ILogicalExpression expr, IVariableTypeEnvironment env,
+            IOperatorSchema[] inputSchemas, JobGenContext context) throws AlgebricksException {
+        ICopyEvaluatorFactory copyEvaluatorFactory = null;
+        switch (expr.getExpressionTag()) {
+            case VARIABLE: {
+                VariableReferenceExpression v = (VariableReferenceExpression) expr;
+                copyEvaluatorFactory = createVariableEvaluatorFactory(v, inputSchemas, context);
+                return copyEvaluatorFactory;
+            }
+            case CONSTANT: {
+                ConstantExpression c = (ConstantExpression) expr;
+                copyEvaluatorFactory = createConstantEvaluatorFactory(c, inputSchemas, context);
+                return copyEvaluatorFactory;
+            }
+            case FUNCTION_CALL: {
+                copyEvaluatorFactory = createScalarFunctionEvaluatorFactory((AbstractFunctionCallExpression) expr, env,
+                        inputSchemas, context);
+                return copyEvaluatorFactory;
+            }
+            default:
+                throw new IllegalStateException();
+        }
+
+    }
+
+    private ICopyEvaluatorFactory createVariableEvaluatorFactory(VariableReferenceExpression expr,
+            IOperatorSchema[] inputSchemas, JobGenContext context) throws AlgebricksException {
+        LogicalVariable variable = expr.getVariableReference();
+        for (IOperatorSchema scm : inputSchemas) {
+            int pos = scm.findVariable(variable);
+            if (pos >= 0) {
+                return new ColumnAccessEvalFactory(pos);
+            }
+        }
+        throw new AlgebricksException("Variable " + variable + " could not be found in any input schema.");
+    }
+
+    private ICopyEvaluatorFactory createScalarFunctionEvaluatorFactory(AbstractFunctionCallExpression expr,
+            IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)
+            throws AlgebricksException {
+        ICopyEvaluatorFactory[] args = codegenArguments(expr, env, inputSchemas, context);
+        FunctionIdentifier fi = expr.getFunctionIdentifier();
+        ComparisonKind ck = AlgebricksBuiltinFunctions.getComparisonType(fi);
+        if (ck != null) {
+            return new ComparisonEvalFactory(args[0], args[1], ck);
+        }
+
+        IFunctionDescriptor fd = null;
+        if (!(expr.getFunctionInfo() instanceof IExternalFunctionInfo)) {
+            AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
+            IDataFormat format = FormatUtils.getDefaultFormat();
+            fd = format.resolveFunction(expr, env);
+        } else {
+            try {
+                fd = ExternalFunctionDescriptorProvider.getExternalFunctionDescriptor((IExternalFunctionInfo) expr
+                        .getFunctionInfo());
+            } catch (AsterixException ae) {
+                throw new AlgebricksException(ae);
+            }
+        }
+        return fd.createEvaluatorFactory(args);
+    }
+
+    private ICopyEvaluatorFactory createConstantEvaluatorFactory(ConstantExpression expr,
+            IOperatorSchema[] inputSchemas, JobGenContext context) throws AlgebricksException {
+        AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
+        IDataFormat format = FormatUtils.getDefaultFormat();
+        return format.getConstantEvalFactory(expr.getValue());
+    }
+
+    private ICopyEvaluatorFactory[] codegenArguments(AbstractFunctionCallExpression expr, IVariableTypeEnvironment env,
+            IOperatorSchema[] inputSchemas, JobGenContext context) throws AlgebricksException {
+        List<Mutable<ILogicalExpression>> arguments = expr.getArguments();
+        int n = arguments.size();
+        ICopyEvaluatorFactory[] args = new ICopyEvaluatorFactory[n];
+        int i = 0;
+        for (Mutable<ILogicalExpression> a : arguments) {
+            args[i++] = createEvaluatorFactory(a.getValue(), env, inputSchemas, context);
+        }
+        return args;
+    }
+
+    @Override
+    public ICopySerializableAggregateFunctionFactory createSerializableAggregateFunctionFactory(
+            AggregateFunctionCallExpression expr, IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas,
+            JobGenContext context) throws AlgebricksException {
+        ICopyEvaluatorFactory[] args = codegenArguments(expr, env, inputSchemas, context);
+        IFunctionDescriptor fd = getFunctionDescriptor(expr, env, context);
+
+        switch (fd.getFunctionDescriptorTag()) {
+            case AGGREGATE: {
+                if (AsterixBuiltinFunctions.isAggregateFunctionSerializable(fd.getIdentifier())) {
+                    AggregateFunctionCallExpression serialAggExpr = AsterixBuiltinFunctions
+                            .makeSerializableAggregateFunctionExpression(fd.getIdentifier(), expr.getArguments());
+                    IFunctionDescriptor afdd = getFunctionDescriptor(serialAggExpr, env, context);
+                    return afdd.createSerializableAggregateFunctionFactory(args);
+                } else {
+                    throw new AlgebricksException(
+                            "Trying to create a serializable aggregate from a non-serializable aggregate function descriptor. (fi="
+                                    + expr.getFunctionIdentifier() + ")");
+                }
+            }
+            case SERIALAGGREGATE: {
+                return fd.createSerializableAggregateFunctionFactory(args);
+            }
+
+            default:
+                throw new IllegalStateException("Invalid function descriptor " + fd.getFunctionDescriptorTag()
+                        + " expected " + FunctionDescriptorTag.SERIALAGGREGATE + " or "
+                        + FunctionDescriptorTag.AGGREGATE);
+        }
+    }
+
+    private IFunctionDescriptor getFunctionDescriptor(AbstractFunctionCallExpression expr,
+            IVariableTypeEnvironment env, JobGenContext context) throws AlgebricksException {
+        IFunctionDescriptor fd;
+        AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
+        fd = FormatUtils.getDefaultFormat().resolveFunction(expr, env);
+        return fd;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/AnalysisUtil.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/AnalysisUtil.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/AnalysisUtil.java
new file mode 100644
index 0000000..2d3ca0a
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/AnalysisUtil.java
@@ -0,0 +1,138 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.base;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractDataSourceOperator;
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+
+public class AnalysisUtil {
+    /*
+     * If the first child of op is of type opType, then it returns that child,
+     * o/w returns null.
+     */
+    public final static ILogicalOperator firstChildOfType(AbstractLogicalOperator op, LogicalOperatorTag opType) {
+        List<Mutable<ILogicalOperator>> ins = op.getInputs();
+        if (ins == null || ins.isEmpty()) {
+            return null;
+        }
+        Mutable<ILogicalOperator> opRef2 = ins.get(0);
+        AbstractLogicalOperator op2 = (AbstractLogicalOperator) opRef2.getValue();
+        if (op2.getOperatorTag() == opType) {
+            return op2;
+        } else {
+            return null;
+        }
+    }
+
+    public static int numberOfVarsInExpr(ILogicalExpression e) {
+        switch (((AbstractLogicalExpression) e).getExpressionTag()) {
+            case CONSTANT: {
+                return 0;
+            }
+            case FUNCTION_CALL: {
+                AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) e;
+                int s = 0;
+                for (Mutable<ILogicalExpression> arg : f.getArguments()) {
+                    s += numberOfVarsInExpr(arg.getValue());
+                }
+                return s;
+            }
+            case VARIABLE: {
+                return 1;
+            }
+            default: {
+                assert false;
+                throw new IllegalArgumentException();
+            }
+        }
+    }
+
+    public static boolean isRunnableFieldAccessFunction(FunctionIdentifier fid) {
+        return fieldAccessFunctions.contains(fid);
+    }
+
+    public static boolean isDataSetCall(ILogicalExpression e) {
+        if (((AbstractLogicalExpression) e).getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return false;
+        }
+        AbstractFunctionCallExpression fe = (AbstractFunctionCallExpression) e;
+        return AsterixBuiltinFunctions.isDatasetFunction(fe.getFunctionIdentifier());
+    }
+
+    public static boolean isRunnableAccessToFieldRecord(ILogicalExpression expr) {
+        if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+            AbstractFunctionCallExpression fc = (AbstractFunctionCallExpression) expr;
+            FunctionIdentifier fid = fc.getFunctionIdentifier();
+            if (AnalysisUtil.isRunnableFieldAccessFunction(fid)) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    public static boolean isAccessByNameToFieldRecord(ILogicalExpression expr) {
+        if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+            AbstractFunctionCallExpression fc = (AbstractFunctionCallExpression) expr;
+            FunctionIdentifier fid = fc.getFunctionIdentifier();
+            if (fid.equals(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME)) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    public static boolean isAccessToFieldRecord(ILogicalExpression expr) {
+        if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+            AbstractFunctionCallExpression fc = (AbstractFunctionCallExpression) expr;
+            FunctionIdentifier fid = fc.getFunctionIdentifier();
+            if (fid.equals(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX)
+                    || fid.equals(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME)
+                    || fid.equals(AsterixBuiltinFunctions.FIELD_ACCESS_NESTED)) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    public static Pair<String, String> getDatasetInfo(AbstractDataSourceOperator op) throws AlgebricksException {
+        AqlSourceId srcId = (AqlSourceId) op.getDataSource().getId();
+        return new Pair<String, String>(srcId.getDataverseName(), srcId.getDatasourceName());
+    }
+
+    private static List<FunctionIdentifier> fieldAccessFunctions = new ArrayList<FunctionIdentifier>();
+    static {
+        fieldAccessFunctions.add(AsterixBuiltinFunctions.GET_DATA);
+        fieldAccessFunctions.add(AsterixBuiltinFunctions.GET_HANDLE);
+        fieldAccessFunctions.add(AsterixBuiltinFunctions.TYPE_OF);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/FuzzyUtils.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/FuzzyUtils.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/FuzzyUtils.java
new file mode 100644
index 0000000..063c887
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/FuzzyUtils.java
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.base;
+
+import java.util.ArrayList;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.om.base.AFloat;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.IAObject;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+
+public class FuzzyUtils {
+
+    private final static String DEFAULT_SIM_FUNCTION = "jaccard";
+    private final static float JACCARD_DEFAULT_SIM_THRESHOLD = .8f;
+    private final static int EDIT_DISTANCE_DEFAULT_SIM_THRESHOLD = 1;
+
+    private final static String SIM_FUNCTION_PROP_NAME = "simfunction";
+    private final static String SIM_THRESHOLD_PROP_NAME = "simthreshold";
+
+    public final static String JACCARD_FUNCTION_NAME = "jaccard";
+    public final static String EDIT_DISTANCE_FUNCTION_NAME = "edit-distance";
+
+    public static FunctionIdentifier getTokenizer(ATypeTag inputTag) {
+        switch (inputTag) {
+            case STRING:
+                return AsterixBuiltinFunctions.COUNTHASHED_WORD_TOKENS;
+            case UNORDEREDLIST:
+            case ORDEREDLIST:
+            case ANY:
+                return null;
+            default:
+                throw new NotImplementedException("No tokenizer for type " + inputTag);
+        }
+    }
+
+    public static IAObject getSimThreshold(AqlMetadataProvider metadata, String simFuncName) {
+        String simThresholValue = metadata.getPropertyValue(SIM_THRESHOLD_PROP_NAME);
+        IAObject ret = null;
+        if (simFuncName.equals(JACCARD_FUNCTION_NAME)) {
+            if (simThresholValue != null) {
+                float jaccThresh = Float.parseFloat(simThresholValue);
+                ret = new AFloat(jaccThresh);
+            } else {
+                ret = new AFloat(JACCARD_DEFAULT_SIM_THRESHOLD);
+            }
+        } else if (simFuncName.equals(EDIT_DISTANCE_FUNCTION_NAME)) {
+            if (simThresholValue != null) {
+                int edThresh = Integer.parseInt(simThresholValue);
+                ret = new AInt32(edThresh);
+            } else {
+                ret = new AFloat(EDIT_DISTANCE_DEFAULT_SIM_THRESHOLD);
+            }
+        }
+        return ret;
+    }
+
+    public static FunctionIdentifier getFunctionIdentifier(String simFuncName) {
+        if (simFuncName.equals(JACCARD_FUNCTION_NAME)) {
+            return AsterixBuiltinFunctions.SIMILARITY_JACCARD;
+        } else if (simFuncName.equals(EDIT_DISTANCE_FUNCTION_NAME)) {
+            return AsterixBuiltinFunctions.EDIT_DISTANCE;
+        }
+        return null;
+    }
+
+    public static ScalarFunctionCallExpression getComparisonExpr(String simFuncName,
+            ArrayList<Mutable<ILogicalExpression>> cmpArgs) {
+        if (simFuncName.equals(JACCARD_FUNCTION_NAME)) {
+            return new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(AlgebricksBuiltinFunctions.GE),
+                    cmpArgs);
+        } else if (simFuncName.equals(EDIT_DISTANCE_FUNCTION_NAME)) {
+            return new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(AlgebricksBuiltinFunctions.LE),
+                    cmpArgs);
+        }
+        return null;
+    }
+
+    public static float getSimThreshold(AqlMetadataProvider metadata) {
+        float simThreshold = JACCARD_DEFAULT_SIM_THRESHOLD;
+        String simThresholValue = metadata.getPropertyValue(SIM_THRESHOLD_PROP_NAME);
+        if (simThresholValue != null) {
+            simThreshold = Float.parseFloat(simThresholValue);
+        }
+        return simThreshold;
+    }
+
+    // TODO: The default function depend on the input types. 
+    public static String getSimFunction(AqlMetadataProvider metadata) {
+        String simFunction = metadata.getPropertyValue(SIM_FUNCTION_PROP_NAME);
+        if (simFunction == null) {
+            simFunction = DEFAULT_SIM_FUNCTION;
+        }
+        simFunction = simFunction.toLowerCase();
+        return simFunction;
+    }
+}



[06/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/file/SecondaryBTreeOperationsHelper.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/SecondaryBTreeOperationsHelper.java b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryBTreeOperationsHelper.java
new file mode 100644
index 0000000..ffecc8e
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryBTreeOperationsHelper.java
@@ -0,0 +1,363 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.file;
+
+import java.util.List;
+
+import edu.uci.ics.asterix.common.api.ILocalResourceMetadata;
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
+import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
+import edu.uci.ics.asterix.common.context.AsterixVirtualBufferCacheProvider;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
+import edu.uci.ics.asterix.common.ioopcallbacks.LSMBTreeWithBuddyIOOperationCallbackFactory;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.metadata.external.IndexingConstants;
+import edu.uci.ics.asterix.metadata.feeds.ExternalDataScanOperatorDescriptor;
+import edu.uci.ics.asterix.metadata.utils.ExternalDatasetsRegistry;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.transaction.management.opcallbacks.SecondaryIndexOperationTrackerProvider;
+import edu.uci.ics.asterix.transaction.management.resource.ExternalBTreeWithBuddyLocalResourceMetadata;
+import edu.uci.ics.asterix.transaction.management.resource.LSMBTreeLocalResourceMetadata;
+import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceFactoryProvider;
+import edu.uci.ics.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.ConnectorPolicyAssignmentPolicy;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;
+import edu.uci.ics.hyracks.algebricks.data.ITypeTraitProvider;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.operators.base.SinkRuntimeFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow.ExternalBTreeWithBuddyDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexCompactOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.common.file.ILocalResourceFactoryProvider;
+import edu.uci.ics.hyracks.storage.common.file.LocalResource;
+
+public class SecondaryBTreeOperationsHelper extends SecondaryIndexOperationsHelper {
+
+    protected SecondaryBTreeOperationsHelper(PhysicalOptimizationConfig physOptConf,
+            IAsterixPropertiesProvider propertiesProvider) {
+        super(physOptConf, propertiesProvider);
+    }
+
+    @Override
+    public JobSpecification buildCreationJobSpec() throws AsterixException, AlgebricksException {
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+
+        AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
+        ILocalResourceFactoryProvider localResourceFactoryProvider;
+        IIndexDataflowHelperFactory indexDataflowHelperFactory;
+        if (dataset.getDatasetType() == DatasetType.INTERNAL) {
+            //prepare a LocalResourceMetadata which will be stored in NC's local resource repository
+            ILocalResourceMetadata localResourceMetadata = new LSMBTreeLocalResourceMetadata(secondaryTypeTraits,
+                    secondaryComparatorFactories, secondaryBloomFilterKeyFields, true, dataset.getDatasetId(),
+                    mergePolicyFactory, mergePolicyFactoryProperties, filterTypeTraits, filterCmpFactories,
+                    secondaryBTreeFields, secondaryFilterFields);
+            localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(localResourceMetadata,
+                    LocalResource.LSMBTreeResource);
+            // The index create operation should be persistent regardless of temp datasets or permanent dataset.
+            indexDataflowHelperFactory = new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
+                    dataset.getDatasetId()), mergePolicyFactory, mergePolicyFactoryProperties,
+                    new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                    storageProperties.getBloomFilterFalsePositiveRate(), false, filterTypeTraits, filterCmpFactories,
+                    secondaryBTreeFields, secondaryFilterFields, true);
+        } else {
+            // External dataset local resource and dataflow helper
+            int[] buddyBreeFields = new int[] { numSecondaryKeys };
+            ILocalResourceMetadata localResourceMetadata = new ExternalBTreeWithBuddyLocalResourceMetadata(
+                    dataset.getDatasetId(), secondaryComparatorFactories, secondaryTypeTraits, mergePolicyFactory,
+                    mergePolicyFactoryProperties, buddyBreeFields);
+            localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(localResourceMetadata,
+                    LocalResource.ExternalBTreeWithBuddyResource);
+            indexDataflowHelperFactory = new ExternalBTreeWithBuddyDataflowHelperFactory(mergePolicyFactory,
+                    mergePolicyFactoryProperties, new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                    LSMBTreeWithBuddyIOOperationCallbackFactory.INSTANCE,
+                    storageProperties.getBloomFilterFalsePositiveRate(), buddyBreeFields,
+                    ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true);
+        }
+        TreeIndexCreateOperatorDescriptor secondaryIndexCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                secondaryFileSplitProvider, secondaryTypeTraits, secondaryComparatorFactories,
+                secondaryBloomFilterKeyFields, indexDataflowHelperFactory, localResourceFactoryProvider,
+                NoOpOperationCallbackFactory.INSTANCE);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, secondaryIndexCreateOp,
+                secondaryPartitionConstraint);
+        spec.addRoot(secondaryIndexCreateOp);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        return spec;
+    }
+
+    @Override
+    public JobSpecification buildLoadingJobSpec() throws AsterixException, AlgebricksException {
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+
+        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
+            /*
+             * In case of external data, this method is used to build loading jobs for both initial load on index creation
+             * and transaction load on dataset referesh
+             */
+
+            // Create external indexing scan operator
+            ExternalDataScanOperatorDescriptor primaryScanOp = createExternalIndexingOp(spec);
+
+            // Assign op.
+            AbstractOperatorDescriptor sourceOp = primaryScanOp;
+            if (isEnforcingKeyTypes) {
+                sourceOp = createCastOp(spec, primaryScanOp, numSecondaryKeys, dataset.getDatasetType());
+                spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, sourceOp, 0);
+            }
+            AlgebricksMetaOperatorDescriptor asterixAssignOp = createExternalAssignOp(spec, numSecondaryKeys);
+
+            // If any of the secondary fields are nullable, then add a select op that filters nulls.
+            AlgebricksMetaOperatorDescriptor selectOp = null;
+            if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
+                selectOp = createFilterNullsSelectOp(spec, numSecondaryKeys);
+            }
+
+            // Sort by secondary keys.
+            ExternalSortOperatorDescriptor sortOp = createSortOp(spec, secondaryComparatorFactories, secondaryRecDesc);
+
+            AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
+            // Create secondary BTree bulk load op.
+            AbstractTreeIndexOperatorDescriptor secondaryBulkLoadOp;
+            ExternalBTreeWithBuddyDataflowHelperFactory dataflowHelperFactory = new ExternalBTreeWithBuddyDataflowHelperFactory(
+                    mergePolicyFactory, mergePolicyFactoryProperties, new SecondaryIndexOperationTrackerProvider(
+                            dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                    LSMBTreeWithBuddyIOOperationCallbackFactory.INSTANCE,
+                    storageProperties.getBloomFilterFalsePositiveRate(), new int[] { numSecondaryKeys },
+                    ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true);
+            IOperatorDescriptor root;
+            if (externalFiles != null) {
+                // Transaction load
+                secondaryBulkLoadOp = createExternalIndexBulkModifyOp(spec, numSecondaryKeys, dataflowHelperFactory,
+                        GlobalConfig.DEFAULT_TREE_FILL_FACTOR);
+                root = secondaryBulkLoadOp;
+            } else {
+                // Initial load
+                secondaryBulkLoadOp = createTreeIndexBulkLoadOp(spec, numSecondaryKeys, dataflowHelperFactory,
+                        GlobalConfig.DEFAULT_TREE_FILL_FACTOR);
+                AlgebricksMetaOperatorDescriptor metaOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
+                        new IPushRuntimeFactory[] { new SinkRuntimeFactory() },
+                        new RecordDescriptor[] { secondaryRecDesc });
+                spec.connect(new OneToOneConnectorDescriptor(spec), secondaryBulkLoadOp, 0, metaOp, 0);
+                root = metaOp;
+            }
+            spec.connect(new OneToOneConnectorDescriptor(spec), sourceOp, 0, asterixAssignOp, 0);
+            if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
+                spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, selectOp, 0);
+                spec.connect(new OneToOneConnectorDescriptor(spec), selectOp, 0, sortOp, 0);
+            } else {
+                spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, sortOp, 0);
+            }
+            spec.connect(new OneToOneConnectorDescriptor(spec), sortOp, 0, secondaryBulkLoadOp, 0);
+            spec.addRoot(root);
+            spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+            return spec;
+        } else {
+            // Create dummy key provider for feeding the primary index scan. 
+            AbstractOperatorDescriptor keyProviderOp = createDummyKeyProviderOp(spec);
+
+            // Create primary index scan op.
+            BTreeSearchOperatorDescriptor primaryScanOp = createPrimaryIndexScanOp(spec);
+
+            // Assign op.
+            AbstractOperatorDescriptor sourceOp = primaryScanOp;
+            if (isEnforcingKeyTypes) {
+                sourceOp = createCastOp(spec, primaryScanOp, numSecondaryKeys, dataset.getDatasetType());
+                spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, sourceOp, 0);
+            }
+            AlgebricksMetaOperatorDescriptor asterixAssignOp = createAssignOp(spec, sourceOp, numSecondaryKeys);
+
+            // If any of the secondary fields are nullable, then add a select op that filters nulls.
+            AlgebricksMetaOperatorDescriptor selectOp = null;
+            if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
+                selectOp = createFilterNullsSelectOp(spec, numSecondaryKeys);
+            }
+
+            // Sort by secondary keys.
+            ExternalSortOperatorDescriptor sortOp = createSortOp(spec, secondaryComparatorFactories, secondaryRecDesc);
+
+            AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
+            boolean temp = dataset.getDatasetDetails().isTemp();
+            // Create secondary BTree bulk load op.
+            TreeIndexBulkLoadOperatorDescriptor secondaryBulkLoadOp = createTreeIndexBulkLoadOp(
+                    spec,
+                    numSecondaryKeys,
+                    new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
+                            mergePolicyFactory, mergePolicyFactoryProperties,
+                            new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                            LSMBTreeIOOperationCallbackFactory.INSTANCE, storageProperties
+                                    .getBloomFilterFalsePositiveRate(), false, filterTypeTraits, filterCmpFactories,
+                            secondaryBTreeFields, secondaryFilterFields, !temp), GlobalConfig.DEFAULT_TREE_FILL_FACTOR);
+
+            AlgebricksMetaOperatorDescriptor metaOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
+                    new IPushRuntimeFactory[] { new SinkRuntimeFactory() }, new RecordDescriptor[] { secondaryRecDesc });
+            // Connect the operators.
+            spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryScanOp, 0);
+            spec.connect(new OneToOneConnectorDescriptor(spec), sourceOp, 0, asterixAssignOp, 0);
+            if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
+                spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, selectOp, 0);
+                spec.connect(new OneToOneConnectorDescriptor(spec), selectOp, 0, sortOp, 0);
+            } else {
+                spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, sortOp, 0);
+            }
+            spec.connect(new OneToOneConnectorDescriptor(spec), sortOp, 0, secondaryBulkLoadOp, 0);
+            spec.connect(new OneToOneConnectorDescriptor(spec), secondaryBulkLoadOp, 0, metaOp, 0);
+            spec.addRoot(metaOp);
+            spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+            return spec;
+        }
+    }
+
+    @Override
+    protected int getNumSecondaryKeys() {
+        return numSecondaryKeys;
+    }
+
+    @Override
+    public JobSpecification buildCompactJobSpec() throws AsterixException, AlgebricksException {
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+
+        AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
+        boolean temp = dataset.getDatasetDetails().isTemp();
+        LSMTreeIndexCompactOperatorDescriptor compactOp;
+        if (dataset.getDatasetType() == DatasetType.INTERNAL) {
+            compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec,
+                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, secondaryFileSplitProvider, secondaryTypeTraits,
+                    secondaryComparatorFactories, secondaryBloomFilterKeyFields, new LSMBTreeDataflowHelperFactory(
+                            new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()), mergePolicyFactory,
+                            mergePolicyFactoryProperties, new SecondaryIndexOperationTrackerProvider(
+                                    dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                            LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                            storageProperties.getBloomFilterFalsePositiveRate(), false, filterTypeTraits,
+                            filterCmpFactories, secondaryBTreeFields, secondaryFilterFields, !temp),
+                    NoOpOperationCallbackFactory.INSTANCE);
+        } else {
+            // External dataset
+            compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec,
+                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, secondaryFileSplitProvider, secondaryTypeTraits,
+                    secondaryComparatorFactories, secondaryBloomFilterKeyFields,
+                    new ExternalBTreeWithBuddyDataflowHelperFactory(mergePolicyFactory, mergePolicyFactoryProperties,
+                            new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                            LSMBTreeWithBuddyIOOperationCallbackFactory.INSTANCE, storageProperties
+                                    .getBloomFilterFalsePositiveRate(), new int[] { numSecondaryKeys },
+                            ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true),
+                    NoOpOperationCallbackFactory.INSTANCE);
+        }
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp,
+                secondaryPartitionConstraint);
+        spec.addRoot(compactOp);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        return spec;
+    }
+
+    @Override
+    @SuppressWarnings("rawtypes")
+    protected void setSecondaryRecDescAndComparators(IndexType indexType, List<List<String>> secondaryKeyFields,
+            List<IAType> secondaryKeyTypes, int gramLength, AqlMetadataProvider metadataProvider)
+            throws AlgebricksException, AsterixException {
+        secondaryFieldAccessEvalFactories = new ICopyEvaluatorFactory[numSecondaryKeys + numFilterFields];
+        secondaryComparatorFactories = new IBinaryComparatorFactory[numSecondaryKeys + numPrimaryKeys];
+        secondaryBloomFilterKeyFields = new int[numSecondaryKeys];
+        ISerializerDeserializer[] secondaryRecFields = new ISerializerDeserializer[numPrimaryKeys + numSecondaryKeys
+                + numFilterFields];
+        ISerializerDeserializer[] enforcedRecFields = new ISerializerDeserializer[1 + numPrimaryKeys + numFilterFields];
+        secondaryTypeTraits = new ITypeTraits[numSecondaryKeys + numPrimaryKeys];
+        ITypeTraits[] enforcedTypeTraits = new ITypeTraits[1 + numPrimaryKeys];
+        ISerializerDeserializerProvider serdeProvider = metadataProvider.getFormat().getSerdeProvider();
+        ITypeTraitProvider typeTraitProvider = metadataProvider.getFormat().getTypeTraitProvider();
+        IBinaryComparatorFactoryProvider comparatorFactoryProvider = metadataProvider.getFormat()
+                .getBinaryComparatorFactoryProvider();
+        // Record column is 0 for external datasets, numPrimaryKeys for internal ones
+        int recordColumn = dataset.getDatasetType() == DatasetType.INTERNAL ? numPrimaryKeys : 0;
+        for (int i = 0; i < numSecondaryKeys; i++) {
+            secondaryFieldAccessEvalFactories[i] = metadataProvider.getFormat().getFieldAccessEvaluatorFactory(
+                    isEnforcingKeyTypes ? enforcedItemType : itemType, secondaryKeyFields.get(i), recordColumn);
+            Pair<IAType, Boolean> keyTypePair = Index.getNonNullableOpenFieldType(secondaryKeyTypes.get(i),
+                    secondaryKeyFields.get(i), itemType);
+            IAType keyType = keyTypePair.first;
+            anySecondaryKeyIsNullable = anySecondaryKeyIsNullable || keyTypePair.second;
+            ISerializerDeserializer keySerde = serdeProvider.getSerializerDeserializer(keyType);
+            secondaryRecFields[i] = keySerde;
+            secondaryComparatorFactories[i] = comparatorFactoryProvider.getBinaryComparatorFactory(keyType, true);
+            secondaryTypeTraits[i] = typeTraitProvider.getTypeTrait(keyType);
+            secondaryBloomFilterKeyFields[i] = i;
+        }
+        if (dataset.getDatasetType() == DatasetType.INTERNAL) {
+            // Add serializers and comparators for primary index fields.
+            for (int i = 0; i < numPrimaryKeys; i++) {
+                secondaryRecFields[numSecondaryKeys + i] = primaryRecDesc.getFields()[i];
+                enforcedRecFields[i] = primaryRecDesc.getFields()[i];
+                secondaryTypeTraits[numSecondaryKeys + i] = primaryRecDesc.getTypeTraits()[i];
+                enforcedTypeTraits[i] = primaryRecDesc.getTypeTraits()[i];
+                secondaryComparatorFactories[numSecondaryKeys + i] = primaryComparatorFactories[i];
+            }
+        } else {
+            // Add serializers and comparators for RID fields.
+            for (int i = 0; i < numPrimaryKeys; i++) {
+                secondaryRecFields[numSecondaryKeys + i] = IndexingConstants.getSerializerDeserializer(i);
+                enforcedRecFields[i] = IndexingConstants.getSerializerDeserializer(i);
+                secondaryTypeTraits[numSecondaryKeys + i] = IndexingConstants.getTypeTraits(i);
+                enforcedTypeTraits[i] = IndexingConstants.getTypeTraits(i);
+                secondaryComparatorFactories[numSecondaryKeys + i] = IndexingConstants.getComparatorFactory(i);
+            }
+        }
+        enforcedRecFields[numPrimaryKeys] = serdeProvider.getSerializerDeserializer(itemType);
+
+        if (numFilterFields > 0) {
+            secondaryFieldAccessEvalFactories[numSecondaryKeys] = metadataProvider.getFormat()
+                    .getFieldAccessEvaluatorFactory(itemType, filterFieldName, numPrimaryKeys);
+            Pair<IAType, Boolean> keyTypePair = Index.getNonNullableKeyFieldType(filterFieldName, itemType);
+            IAType type = keyTypePair.first;
+            ISerializerDeserializer serde = serdeProvider.getSerializerDeserializer(type);
+            secondaryRecFields[numPrimaryKeys + numSecondaryKeys] = serde;
+        }
+
+        secondaryRecDesc = new RecordDescriptor(secondaryRecFields, secondaryTypeTraits);
+        enforcedRecDesc = new RecordDescriptor(enforcedRecFields, enforcedTypeTraits);
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/file/SecondaryIndexOperationsHelper.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/SecondaryIndexOperationsHelper.java b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryIndexOperationsHelper.java
new file mode 100644
index 0000000..07c8bab
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryIndexOperationsHelper.java
@@ -0,0 +1,575 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.file;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
+import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
+import edu.uci.ics.asterix.common.context.AsterixVirtualBufferCacheProvider;
+import edu.uci.ics.asterix.common.context.ITransactionSubsystemProvider;
+import edu.uci.ics.asterix.common.context.TransactionSubsystemProvider;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
+import edu.uci.ics.asterix.common.transactions.IRecoveryManager.ResourceType;
+import edu.uci.ics.asterix.common.transactions.JobId;
+import edu.uci.ics.asterix.external.indexing.operators.ExternalIndexBulkModifyOperatorDescriptor;
+import edu.uci.ics.asterix.formats.nontagged.AqlBinaryBooleanInspectorImpl;
+import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.formats.nontagged.AqlTypeTraitProvider;
+import edu.uci.ics.asterix.metadata.MetadataException;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.ExternalFile;
+import edu.uci.ics.asterix.metadata.external.IndexingConstants;
+import edu.uci.ics.asterix.metadata.feeds.ExternalDataScanOperatorDescriptor;
+import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.asterix.runtime.evaluators.functions.AndDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.CastRecordDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.IsNullDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.NotDescriptor;
+import edu.uci.ics.asterix.runtime.job.listener.JobEventListenerFactory;
+import edu.uci.ics.asterix.transaction.management.opcallbacks.PrimaryIndexInstantSearchOperationCallbackFactory;
+import edu.uci.ics.asterix.transaction.management.opcallbacks.PrimaryIndexOperationTrackerProvider;
+import edu.uci.ics.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
+import edu.uci.ics.asterix.transaction.management.service.transaction.JobIdFactory;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.LogicalExpressionJobGenToExpressionRuntimeProviderAdapter;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
+import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.evaluators.ColumnAccessEvalFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
+import edu.uci.ics.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.operators.std.StreamSelectRuntimeFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.IJobletEventListenerFactory;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
+
+@SuppressWarnings("rawtypes")
+// TODO: We should eventually have a hierarchy of classes that can create all
+// possible index job specs,
+// not just for creation.
+public abstract class SecondaryIndexOperationsHelper {
+    protected final PhysicalOptimizationConfig physOptConf;
+
+    protected int numPrimaryKeys;
+    protected int numSecondaryKeys;
+    protected AqlMetadataProvider metadataProvider;
+    protected String dataverseName;
+    protected String datasetName;
+    protected Dataset dataset;
+    protected ARecordType itemType;
+    protected ISerializerDeserializer payloadSerde;
+    protected IFileSplitProvider primaryFileSplitProvider;
+    protected AlgebricksPartitionConstraint primaryPartitionConstraint;
+    protected IFileSplitProvider secondaryFileSplitProvider;
+    protected AlgebricksPartitionConstraint secondaryPartitionConstraint;
+    protected String secondaryIndexName;
+    protected boolean anySecondaryKeyIsNullable = false;
+    protected boolean isEnforcingKeyTypes = false;
+
+    protected long numElementsHint;
+    protected IBinaryComparatorFactory[] primaryComparatorFactories;
+    protected int[] primaryBloomFilterKeyFields;
+    protected RecordDescriptor primaryRecDesc;
+    protected IBinaryComparatorFactory[] secondaryComparatorFactories;
+    protected ITypeTraits[] secondaryTypeTraits;
+    protected int[] secondaryBloomFilterKeyFields;
+    protected RecordDescriptor secondaryRecDesc;
+    protected ICopyEvaluatorFactory[] secondaryFieldAccessEvalFactories;
+
+    protected IAsterixPropertiesProvider propertiesProvider;
+    protected ILSMMergePolicyFactory mergePolicyFactory;
+    protected Map<String, String> mergePolicyFactoryProperties;
+    protected RecordDescriptor enforcedRecDesc;
+    protected ARecordType enforcedItemType;
+
+    protected int numFilterFields;
+    protected List<String> filterFieldName;
+    protected ITypeTraits[] filterTypeTraits;
+    protected IBinaryComparatorFactory[] filterCmpFactories;
+    protected int[] secondaryFilterFields;
+    protected int[] primaryFilterFields;
+    protected int[] primaryBTreeFields;
+    protected int[] secondaryBTreeFields;
+    protected List<ExternalFile> externalFiles;
+
+    // Prevent public construction. Should be created via createIndexCreator().
+    protected SecondaryIndexOperationsHelper(PhysicalOptimizationConfig physOptConf,
+            IAsterixPropertiesProvider propertiesProvider) {
+        this.physOptConf = physOptConf;
+        this.propertiesProvider = propertiesProvider;
+    }
+
+    public static SecondaryIndexOperationsHelper createIndexOperationsHelper(IndexType indexType, String dataverseName,
+            String datasetName, String indexName, List<List<String>> secondaryKeyFields,
+            List<IAType> secondaryKeyTypes, boolean isEnforced, int gramLength, AqlMetadataProvider metadataProvider,
+            PhysicalOptimizationConfig physOptConf, ARecordType recType, ARecordType enforcedType)
+            throws AsterixException, AlgebricksException {
+        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
+        SecondaryIndexOperationsHelper indexOperationsHelper = null;
+        switch (indexType) {
+            case BTREE: {
+                indexOperationsHelper = new SecondaryBTreeOperationsHelper(physOptConf, asterixPropertiesProvider);
+                break;
+            }
+            case RTREE: {
+                indexOperationsHelper = new SecondaryRTreeOperationsHelper(physOptConf, asterixPropertiesProvider);
+                break;
+            }
+            case SINGLE_PARTITION_WORD_INVIX:
+            case SINGLE_PARTITION_NGRAM_INVIX:
+            case LENGTH_PARTITIONED_WORD_INVIX:
+            case LENGTH_PARTITIONED_NGRAM_INVIX: {
+                indexOperationsHelper = new SecondaryInvertedIndexOperationsHelper(physOptConf,
+                        asterixPropertiesProvider);
+                break;
+            }
+            default: {
+                throw new AsterixException("Unknown Index Type: " + indexType);
+            }
+        }
+        indexOperationsHelper.init(indexType, dataverseName, datasetName, indexName, secondaryKeyFields,
+                secondaryKeyTypes, isEnforced, gramLength, metadataProvider, recType, enforcedType);
+        return indexOperationsHelper;
+    }
+
+    public abstract JobSpecification buildCreationJobSpec() throws AsterixException, AlgebricksException;
+
+    public abstract JobSpecification buildLoadingJobSpec() throws AsterixException, AlgebricksException;
+
+    public abstract JobSpecification buildCompactJobSpec() throws AsterixException, AlgebricksException;
+
+    protected void init(IndexType indexType, String dvn, String dsn, String in, List<List<String>> secondaryKeyFields,
+            List<IAType> secondaryKeyTypes, boolean isEnforced, int gramLength, AqlMetadataProvider metadataProvider,
+            ARecordType aRecType, ARecordType enforcedType) throws AsterixException, AlgebricksException {
+        this.metadataProvider = metadataProvider;
+        dataverseName = dvn == null ? metadataProvider.getDefaultDataverseName() : dvn;
+        datasetName = dsn;
+        secondaryIndexName = in;
+        isEnforcingKeyTypes = isEnforced;
+        dataset = metadataProvider.findDataset(dataverseName, datasetName);
+        if (dataset == null) {
+            throw new AsterixException("Unknown dataset " + datasetName);
+        }
+        boolean temp = dataset.getDatasetDetails().isTemp();
+        itemType = aRecType;
+        enforcedItemType = enforcedType;
+        payloadSerde = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(itemType);
+        numSecondaryKeys = secondaryKeyFields.size();
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider
+                .splitProviderAndPartitionConstraintsForDataset(dataverseName, datasetName, secondaryIndexName, temp);
+        secondaryFileSplitProvider = secondarySplitsAndConstraint.first;
+        secondaryPartitionConstraint = secondarySplitsAndConstraint.second;
+
+        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
+            numPrimaryKeys = ExternalIndexingOperations.getRIDSize(dataset);
+        } else {
+            filterFieldName = DatasetUtils.getFilterField(dataset);
+            if (filterFieldName != null) {
+                numFilterFields = 1;
+            } else {
+                numFilterFields = 0;
+            }
+
+            numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
+            Pair<IFileSplitProvider, AlgebricksPartitionConstraint> primarySplitsAndConstraint = metadataProvider
+                    .splitProviderAndPartitionConstraintsForDataset(dataverseName, datasetName, datasetName, temp);
+            primaryFileSplitProvider = primarySplitsAndConstraint.first;
+            primaryPartitionConstraint = primarySplitsAndConstraint.second;
+            setPrimaryRecDescAndComparators();
+        }
+        setSecondaryRecDescAndComparators(indexType, secondaryKeyFields, secondaryKeyTypes, gramLength,
+                metadataProvider);
+        numElementsHint = metadataProvider.getCardinalityPerPartitionHint(dataset);
+        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
+                metadataProvider.getMetadataTxnContext());
+        mergePolicyFactory = compactionInfo.first;
+        mergePolicyFactoryProperties = compactionInfo.second;
+
+        if (numFilterFields > 0) {
+            setFilterTypeTraitsAndComparators();
+        }
+    }
+
+    protected void setFilterTypeTraitsAndComparators() throws AlgebricksException {
+        filterTypeTraits = new ITypeTraits[numFilterFields];
+        filterCmpFactories = new IBinaryComparatorFactory[numFilterFields];
+        secondaryFilterFields = new int[numFilterFields];
+        primaryFilterFields = new int[numFilterFields];
+        primaryBTreeFields = new int[numPrimaryKeys + 1];
+        secondaryBTreeFields = new int[numSecondaryKeys + numPrimaryKeys];
+        for (int i = 0; i < primaryBTreeFields.length; i++) {
+            primaryBTreeFields[i] = i;
+        }
+        for (int i = 0; i < secondaryBTreeFields.length; i++) {
+            secondaryBTreeFields[i] = i;
+        }
+
+        IAType type;
+        try {
+            type = itemType.getSubFieldType(filterFieldName);
+        } catch (IOException e) {
+            throw new AlgebricksException(e);
+        }
+        filterCmpFactories[0] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(type, true);
+        filterTypeTraits[0] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(type);
+        secondaryFilterFields[0] = getNumSecondaryKeys() + numPrimaryKeys;
+        primaryFilterFields[0] = numPrimaryKeys + 1;
+    }
+
+    protected abstract int getNumSecondaryKeys();
+
+    protected void setPrimaryRecDescAndComparators() throws AlgebricksException {
+        List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
+        int numPrimaryKeys = partitioningKeys.size();
+        ISerializerDeserializer[] primaryRecFields = new ISerializerDeserializer[numPrimaryKeys + 1];
+        ITypeTraits[] primaryTypeTraits = new ITypeTraits[numPrimaryKeys + 1];
+        primaryComparatorFactories = new IBinaryComparatorFactory[numPrimaryKeys];
+        primaryBloomFilterKeyFields = new int[numPrimaryKeys];
+        ISerializerDeserializerProvider serdeProvider = metadataProvider.getFormat().getSerdeProvider();
+        for (int i = 0; i < numPrimaryKeys; i++) {
+            IAType keyType;
+            try {
+                keyType = itemType.getSubFieldType(partitioningKeys.get(i));
+            } catch (IOException e) {
+                throw new AlgebricksException(e);
+            }
+            primaryRecFields[i] = serdeProvider.getSerializerDeserializer(keyType);
+            primaryComparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(
+                    keyType, true);
+            primaryTypeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
+            primaryBloomFilterKeyFields[i] = i;
+        }
+        primaryRecFields[numPrimaryKeys] = payloadSerde;
+        primaryTypeTraits[numPrimaryKeys] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(itemType);
+        primaryRecDesc = new RecordDescriptor(primaryRecFields, primaryTypeTraits);
+    }
+
+    protected abstract void setSecondaryRecDescAndComparators(IndexType indexType,
+            List<List<String>> secondaryKeyFields, List<IAType> secondaryKeyTypes, int gramLength,
+            AqlMetadataProvider metadataProvider) throws AlgebricksException, AsterixException;
+
+    protected AbstractOperatorDescriptor createDummyKeyProviderOp(JobSpecification spec) throws AsterixException,
+            AlgebricksException {
+        // Build dummy tuple containing one field with a dummy value inside.
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
+        DataOutput dos = tb.getDataOutput();
+        tb.reset();
+        try {
+            // Serialize dummy value into a field.
+            IntegerSerializerDeserializer.INSTANCE.serialize(0, dos);
+        } catch (HyracksDataException e) {
+            throw new AsterixException(e);
+        }
+        // Add dummy field.
+        tb.addFieldEndOffset();
+        ISerializerDeserializer[] keyRecDescSers = { IntegerSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, keyProviderOp,
+                primaryPartitionConstraint);
+        return keyProviderOp;
+    }
+
+    protected BTreeSearchOperatorDescriptor createPrimaryIndexScanOp(JobSpecification spec) throws AlgebricksException {
+        // -Infinity
+        int[] lowKeyFields = null;
+        // +Infinity
+        int[] highKeyFields = null;
+        ITransactionSubsystemProvider txnSubsystemProvider = new TransactionSubsystemProvider();
+        JobId jobId = JobIdFactory.generateJobId();
+        metadataProvider.setJobId(jobId);
+        boolean isWriteTransaction = metadataProvider.isWriteTransaction();
+        IJobletEventListenerFactory jobEventListenerFactory = new JobEventListenerFactory(jobId, isWriteTransaction);
+        spec.setJobletEventListenerFactory(jobEventListenerFactory);
+
+        boolean temp = dataset.getDatasetDetails().isTemp();
+        ISearchOperationCallbackFactory searchCallbackFactory = temp ? NoOpOperationCallbackFactory.INSTANCE
+                : new PrimaryIndexInstantSearchOperationCallbackFactory(jobId, dataset.getDatasetId(),
+                        primaryBloomFilterKeyFields, txnSubsystemProvider, ResourceType.LSM_BTREE);
+        AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
+        BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                primaryFileSplitProvider, primaryRecDesc.getTypeTraits(), primaryComparatorFactories,
+                primaryBloomFilterKeyFields, lowKeyFields, highKeyFields, true, true,
+                new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
+                        mergePolicyFactory, mergePolicyFactoryProperties, new PrimaryIndexOperationTrackerProvider(
+                                dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                        LSMBTreeIOOperationCallbackFactory.INSTANCE, storageProperties
+                                .getBloomFilterFalsePositiveRate(), true, filterTypeTraits, filterCmpFactories,
+                        primaryBTreeFields, primaryFilterFields, !temp), false, false, null,
+                searchCallbackFactory, null, null);
+
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, primarySearchOp,
+                primaryPartitionConstraint);
+        return primarySearchOp;
+    }
+
+    protected AlgebricksMetaOperatorDescriptor createAssignOp(JobSpecification spec,
+            AbstractOperatorDescriptor primaryScanOp, int numSecondaryKeyFields) throws AlgebricksException {
+        int[] outColumns = new int[numSecondaryKeyFields + numFilterFields];
+        int[] projectionList = new int[numSecondaryKeyFields + numPrimaryKeys + numFilterFields];
+        for (int i = 0; i < numSecondaryKeyFields + numFilterFields; i++) {
+            outColumns[i] = numPrimaryKeys + i;
+        }
+        int projCount = 0;
+        for (int i = 0; i < numSecondaryKeyFields; i++) {
+            projectionList[projCount++] = numPrimaryKeys + i;
+        }
+        for (int i = 0; i < numPrimaryKeys; i++) {
+            projectionList[projCount++] = i;
+        }
+        if (numFilterFields > 0) {
+            projectionList[projCount++] = numPrimaryKeys + numSecondaryKeyFields;
+        }
+
+        IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[secondaryFieldAccessEvalFactories.length];
+        for (int i = 0; i < secondaryFieldAccessEvalFactories.length; ++i) {
+            sefs[i] = new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.ScalarEvaluatorFactoryAdapter(
+                    secondaryFieldAccessEvalFactories[i]);
+        }
+        AssignRuntimeFactory assign = new AssignRuntimeFactory(outColumns, sefs, projectionList);
+        AlgebricksMetaOperatorDescriptor asterixAssignOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 1,
+                new IPushRuntimeFactory[] { assign }, new RecordDescriptor[] { secondaryRecDesc });
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, asterixAssignOp,
+                primaryPartitionConstraint);
+        return asterixAssignOp;
+    }
+
+    protected AlgebricksMetaOperatorDescriptor createCastOp(JobSpecification spec,
+            AbstractOperatorDescriptor primaryScanOp, int numSecondaryKeyFields, DatasetType dsType) {
+        CastRecordDescriptor castFuncDesc = (CastRecordDescriptor) CastRecordDescriptor.FACTORY
+                .createFunctionDescriptor();
+        castFuncDesc.reset(enforcedItemType, itemType);
+
+        int[] outColumns = new int[1];
+        int[] projectionList = new int[1 + numPrimaryKeys];
+        int recordIdx;
+        //external datascan operator returns a record as the first field, instead of the last in internal case
+        if (dsType == DatasetType.EXTERNAL) {
+            recordIdx = 0;
+            outColumns[0] = 0;
+        } else {
+            recordIdx = numPrimaryKeys;
+            outColumns[0] = numPrimaryKeys;
+        }
+        for (int i = 0; i <= numPrimaryKeys; i++) {
+            projectionList[i] = i;
+        }
+        ICopyEvaluatorFactory[] castEvalFact = new ICopyEvaluatorFactory[] { new ColumnAccessEvalFactory(recordIdx) };
+        IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[1];
+        sefs[0] = new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.ScalarEvaluatorFactoryAdapter(
+                castFuncDesc.createEvaluatorFactory(castEvalFact));
+        AssignRuntimeFactory castAssign = new AssignRuntimeFactory(outColumns, sefs, projectionList);
+        AlgebricksMetaOperatorDescriptor castRecAssignOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 1,
+                new IPushRuntimeFactory[] { castAssign }, new RecordDescriptor[] { enforcedRecDesc });
+
+        return castRecAssignOp;
+    }
+
+    protected ExternalSortOperatorDescriptor createSortOp(JobSpecification spec,
+            IBinaryComparatorFactory[] secondaryComparatorFactories, RecordDescriptor secondaryRecDesc) {
+        int[] sortFields = new int[secondaryComparatorFactories.length];
+        for (int i = 0; i < secondaryComparatorFactories.length; i++) {
+            sortFields[i] = i;
+        }
+        ExternalSortOperatorDescriptor sortOp = new ExternalSortOperatorDescriptor(spec,
+                physOptConf.getMaxFramesExternalSort(), sortFields, secondaryComparatorFactories, secondaryRecDesc);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, sortOp, primaryPartitionConstraint);
+        return sortOp;
+    }
+
+    protected TreeIndexBulkLoadOperatorDescriptor createTreeIndexBulkLoadOp(JobSpecification spec,
+            int numSecondaryKeyFields, IIndexDataflowHelperFactory dataflowHelperFactory, float fillFactor)
+            throws MetadataException, AlgebricksException {
+        int[] fieldPermutation = new int[numSecondaryKeyFields + numPrimaryKeys + numFilterFields];
+        for (int i = 0; i < fieldPermutation.length; i++) {
+            fieldPermutation[i] = i;
+        }
+        TreeIndexBulkLoadOperatorDescriptor treeIndexBulkLoadOp = new TreeIndexBulkLoadOperatorDescriptor(spec,
+                secondaryRecDesc, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, secondaryFileSplitProvider,
+                secondaryRecDesc.getTypeTraits(), secondaryComparatorFactories, secondaryBloomFilterKeyFields,
+                fieldPermutation, fillFactor, false, numElementsHint, false, dataflowHelperFactory);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, treeIndexBulkLoadOp,
+                secondaryPartitionConstraint);
+        return treeIndexBulkLoadOp;
+    }
+
+    public AlgebricksMetaOperatorDescriptor createFilterNullsSelectOp(JobSpecification spec, int numSecondaryKeyFields)
+            throws AlgebricksException {
+        ICopyEvaluatorFactory[] andArgsEvalFactories = new ICopyEvaluatorFactory[numSecondaryKeyFields];
+        NotDescriptor notDesc = new NotDescriptor();
+        IsNullDescriptor isNullDesc = new IsNullDescriptor();
+        for (int i = 0; i < numSecondaryKeyFields; i++) {
+            // Access column i, and apply 'is not null'.
+            ColumnAccessEvalFactory columnAccessEvalFactory = new ColumnAccessEvalFactory(i);
+            ICopyEvaluatorFactory isNullEvalFactory = isNullDesc
+                    .createEvaluatorFactory(new ICopyEvaluatorFactory[] { columnAccessEvalFactory });
+            ICopyEvaluatorFactory notEvalFactory = notDesc
+                    .createEvaluatorFactory(new ICopyEvaluatorFactory[] { isNullEvalFactory });
+            andArgsEvalFactories[i] = notEvalFactory;
+        }
+        ICopyEvaluatorFactory selectCond = null;
+        if (numSecondaryKeyFields > 1) {
+            // Create conjunctive condition where all secondary index keys must
+            // satisfy 'is not null'.
+            AndDescriptor andDesc = new AndDescriptor();
+            selectCond = andDesc.createEvaluatorFactory(andArgsEvalFactories);
+        } else {
+            selectCond = andArgsEvalFactories[0];
+        }
+        StreamSelectRuntimeFactory select = new StreamSelectRuntimeFactory(
+                new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.ScalarEvaluatorFactoryAdapter(selectCond),
+                null, AqlBinaryBooleanInspectorImpl.FACTORY, false, -1, null);
+        AlgebricksMetaOperatorDescriptor asterixSelectOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 1,
+                new IPushRuntimeFactory[] { select }, new RecordDescriptor[] { secondaryRecDesc });
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, asterixSelectOp,
+                primaryPartitionConstraint);
+        return asterixSelectOp;
+    }
+
+    // This method creates a source indexing operator for external data
+    protected ExternalDataScanOperatorDescriptor createExternalIndexingOp(JobSpecification spec)
+            throws AlgebricksException, AsterixException {
+        // A record + primary keys
+        ISerializerDeserializer[] serdes = new ISerializerDeserializer[1 + numPrimaryKeys];
+        ITypeTraits[] typeTraits = new ITypeTraits[1 + numPrimaryKeys];
+        // payload serde and type traits for the record slot
+        serdes[0] = payloadSerde;
+        typeTraits[0] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(itemType);
+        //  serdes and type traits for rid fields
+        for (int i = 1; i < serdes.length; i++) {
+            serdes[i] = IndexingConstants.getSerializerDeserializer(i - 1);
+            typeTraits[i] = IndexingConstants.getTypeTraits(i - 1);
+        }
+        // output record desc
+        RecordDescriptor indexerDesc = new RecordDescriptor(serdes, typeTraits);
+
+        // Create the operator and its partition constraits
+        Pair<ExternalDataScanOperatorDescriptor, AlgebricksPartitionConstraint> indexingOpAndConstraints;
+        try {
+            indexingOpAndConstraints = ExternalIndexingOperations.createExternalIndexingOp(spec, metadataProvider,
+                    dataset, itemType, indexerDesc, externalFiles);
+        } catch (Exception e) {
+            throw new AlgebricksException(e);
+        }
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, indexingOpAndConstraints.first,
+                indexingOpAndConstraints.second);
+
+        // Set the primary partition constraints to this partition constraints
+        primaryPartitionConstraint = indexingOpAndConstraints.second;
+        return indexingOpAndConstraints.first;
+    }
+
+    protected AlgebricksMetaOperatorDescriptor createExternalAssignOp(JobSpecification spec, int numSecondaryKeys)
+            throws AlgebricksException {
+        int[] outColumns = new int[numSecondaryKeys];
+        int[] projectionList = new int[numSecondaryKeys + numPrimaryKeys];
+        for (int i = 0; i < numSecondaryKeys; i++) {
+            outColumns[i] = i + numPrimaryKeys + 1;
+            projectionList[i] = i + numPrimaryKeys + 1;
+        }
+
+        IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[secondaryFieldAccessEvalFactories.length];
+        for (int i = 0; i < secondaryFieldAccessEvalFactories.length; ++i) {
+            sefs[i] = new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.ScalarEvaluatorFactoryAdapter(
+                    secondaryFieldAccessEvalFactories[i]);
+        }
+        //add External RIDs to the projection list
+        for (int i = 0; i < numPrimaryKeys; i++) {
+            projectionList[numSecondaryKeys + i] = i + 1;
+        }
+
+        AssignRuntimeFactory assign = new AssignRuntimeFactory(outColumns, sefs, projectionList);
+        AlgebricksMetaOperatorDescriptor asterixAssignOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 1,
+                new IPushRuntimeFactory[] { assign }, new RecordDescriptor[] { secondaryRecDesc });
+        return asterixAssignOp;
+    }
+
+    protected ExternalIndexBulkModifyOperatorDescriptor createExternalIndexBulkModifyOp(JobSpecification spec,
+            int numSecondaryKeyFields, IIndexDataflowHelperFactory dataflowHelperFactory, float fillFactor)
+            throws MetadataException, AlgebricksException {
+        int[] fieldPermutation = new int[numSecondaryKeyFields + numPrimaryKeys];
+        for (int i = 0; i < numSecondaryKeyFields + numPrimaryKeys; i++) {
+            fieldPermutation[i] = i;
+        }
+        // create a list of file ids
+        int numOfDeletedFiles = 0;
+        for (ExternalFile file : externalFiles) {
+            if (file.getPendingOp() == ExternalFilePendingOp.PENDING_DROP_OP)
+                numOfDeletedFiles++;
+        }
+        int[] deletedFiles = new int[numOfDeletedFiles];
+        int i = 0;
+        for (ExternalFile file : externalFiles) {
+            if (file.getPendingOp() == ExternalFilePendingOp.PENDING_DROP_OP) {
+                deletedFiles[i] = file.getFileNumber();
+            }
+        }
+        ExternalIndexBulkModifyOperatorDescriptor treeIndexBulkLoadOp = new ExternalIndexBulkModifyOperatorDescriptor(
+                spec, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, secondaryFileSplitProvider, secondaryTypeTraits,
+                secondaryComparatorFactories, secondaryBloomFilterKeyFields, dataflowHelperFactory,
+                NoOpOperationCallbackFactory.INSTANCE, deletedFiles, fieldPermutation, fillFactor, numElementsHint);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, treeIndexBulkLoadOp,
+                secondaryPartitionConstraint);
+        return treeIndexBulkLoadOp;
+    }
+
+    public List<ExternalFile> getExternalFiles() {
+        return externalFiles;
+    }
+
+    public void setExternalFiles(List<ExternalFile> externalFiles) {
+        this.externalFiles = externalFiles;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/file/SecondaryInvertedIndexOperationsHelper.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/SecondaryInvertedIndexOperationsHelper.java b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryInvertedIndexOperationsHelper.java
new file mode 100644
index 0000000..74c4256
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryInvertedIndexOperationsHelper.java
@@ -0,0 +1,373 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.file;
+
+import java.util.List;
+
+import edu.uci.ics.asterix.common.api.ILocalResourceMetadata;
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
+import edu.uci.ics.asterix.common.context.AsterixVirtualBufferCacheProvider;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.ioopcallbacks.LSMInvertedIndexIOOperationCallbackFactory;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
+import edu.uci.ics.asterix.runtime.formats.FormatUtils;
+import edu.uci.ics.asterix.transaction.management.opcallbacks.SecondaryIndexOperationTrackerProvider;
+import edu.uci.ics.asterix.transaction.management.resource.LSMInvertedIndexLocalResourceMetadata;
+import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceFactoryProvider;
+import edu.uci.ics.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.ConnectorPolicyAssignmentPolicy;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
+import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;
+import edu.uci.ics.hyracks.algebricks.data.ITypeTraitProvider;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.operators.base.SinkRuntimeFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.ShortPointable;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.ShortSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.BinaryTokenizerOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexCompactOperator;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexCreateOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.PartitionedLSMInvertedIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
+import edu.uci.ics.hyracks.storage.common.file.ILocalResourceFactoryProvider;
+import edu.uci.ics.hyracks.storage.common.file.LocalResource;
+
+public class SecondaryInvertedIndexOperationsHelper extends SecondaryIndexOperationsHelper {
+
+    private IAType secondaryKeyType;
+    private ITypeTraits[] invListsTypeTraits;
+    private IBinaryComparatorFactory[] tokenComparatorFactories;
+    private ITypeTraits[] tokenTypeTraits;
+    private IBinaryTokenizerFactory tokenizerFactory;
+    // For tokenization, sorting and loading. Represents <token, primary keys>.
+    private int numTokenKeyPairFields;
+    private IBinaryComparatorFactory[] tokenKeyPairComparatorFactories;
+    private RecordDescriptor tokenKeyPairRecDesc;
+    private boolean isPartitioned;
+    private int[] invertedIndexFields;
+    private int[] invertedIndexFieldsForNonBulkLoadOps;
+    private int[] secondaryFilterFieldsForNonBulkLoadOps;
+
+    protected SecondaryInvertedIndexOperationsHelper(PhysicalOptimizationConfig physOptConf,
+            IAsterixPropertiesProvider propertiesProvider) {
+        super(physOptConf, propertiesProvider);
+    }
+
+    @Override
+    @SuppressWarnings("rawtypes")
+    protected void setSecondaryRecDescAndComparators(IndexType indexType, List<List<String>> secondaryKeyFields,
+            List<IAType> secondaryKeyTypes, int gramLength, AqlMetadataProvider metadata) throws AlgebricksException,
+            AsterixException {
+        // Sanity checks.
+        if (numPrimaryKeys > 1) {
+            throw new AsterixException("Cannot create inverted index on dataset with composite primary key.");
+        }
+        if (numSecondaryKeys > 1) {
+            throw new AsterixException("Cannot create composite inverted index on multiple fields.");
+        }
+        if (indexType == IndexType.LENGTH_PARTITIONED_WORD_INVIX
+                || indexType == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX) {
+            isPartitioned = true;
+        } else {
+            isPartitioned = false;
+        }
+        // Prepare record descriptor used in the assign op, and the optional
+        // select op.
+        secondaryFieldAccessEvalFactories = new ICopyEvaluatorFactory[numSecondaryKeys + numFilterFields];
+        ISerializerDeserializer[] secondaryRecFields = new ISerializerDeserializer[numPrimaryKeys + numSecondaryKeys
+                + numFilterFields];
+        ISerializerDeserializer[] enforcedRecFields = new ISerializerDeserializer[1 + numPrimaryKeys + numFilterFields];
+        secondaryTypeTraits = new ITypeTraits[numSecondaryKeys + numPrimaryKeys];
+        ITypeTraits[] enforcedTypeTraits = new ITypeTraits[1 + numPrimaryKeys];
+        ISerializerDeserializerProvider serdeProvider = FormatUtils.getDefaultFormat().getSerdeProvider();
+        ITypeTraitProvider typeTraitProvider = FormatUtils.getDefaultFormat().getTypeTraitProvider();
+        if (numSecondaryKeys > 0) {
+            secondaryFieldAccessEvalFactories[0] = FormatUtils.getDefaultFormat().getFieldAccessEvaluatorFactory(
+                    isEnforcingKeyTypes ? enforcedItemType : itemType, secondaryKeyFields.get(0), numPrimaryKeys);
+            Pair<IAType, Boolean> keyTypePair = Index.getNonNullableOpenFieldType(secondaryKeyTypes.get(0),
+                    secondaryKeyFields.get(0), itemType);
+            secondaryKeyType = keyTypePair.first;
+            anySecondaryKeyIsNullable = anySecondaryKeyIsNullable || keyTypePair.second;
+            ISerializerDeserializer keySerde = serdeProvider.getSerializerDeserializer(secondaryKeyType);
+            secondaryRecFields[0] = keySerde;
+            secondaryTypeTraits[0] = typeTraitProvider.getTypeTrait(secondaryKeyType);
+        }
+        if (numFilterFields > 0) {
+            secondaryFieldAccessEvalFactories[numSecondaryKeys] = FormatUtils.getDefaultFormat()
+                    .getFieldAccessEvaluatorFactory(itemType, filterFieldName, numPrimaryKeys);
+            Pair<IAType, Boolean> keyTypePair = Index.getNonNullableKeyFieldType(filterFieldName, itemType);
+            IAType type = keyTypePair.first;
+            ISerializerDeserializer serde = serdeProvider.getSerializerDeserializer(type);
+            secondaryRecFields[numPrimaryKeys + numSecondaryKeys] = serde;
+        }
+        secondaryRecDesc = new RecordDescriptor(secondaryRecFields);
+        // Comparators and type traits for tokens.
+        int numTokenFields = (!isPartitioned) ? numSecondaryKeys : numSecondaryKeys + 1;
+        tokenComparatorFactories = new IBinaryComparatorFactory[numTokenFields];
+        tokenTypeTraits = new ITypeTraits[numTokenFields];
+        tokenComparatorFactories[0] = NonTaggedFormatUtil.getTokenBinaryComparatorFactory(secondaryKeyType);
+        tokenTypeTraits[0] = NonTaggedFormatUtil.getTokenTypeTrait(secondaryKeyType);
+        if (isPartitioned) {
+            // The partitioning field is hardcoded to be a short *without* an Asterix type tag.
+            tokenComparatorFactories[1] = PointableBinaryComparatorFactory.of(ShortPointable.FACTORY);
+            tokenTypeTraits[1] = ShortPointable.TYPE_TRAITS;
+        }
+        // Set tokenizer factory.
+        // TODO: We might want to expose the hashing option at the AQL level,
+        // and add the choice to the index metadata.
+        tokenizerFactory = NonTaggedFormatUtil.getBinaryTokenizerFactory(secondaryKeyType.getTypeTag(), indexType,
+                gramLength);
+        // Type traits for inverted-list elements. Inverted lists contain
+        // primary keys.
+        invListsTypeTraits = new ITypeTraits[numPrimaryKeys];
+        if (numPrimaryKeys > 0) {
+            invListsTypeTraits[0] = primaryRecDesc.getTypeTraits()[0];
+            enforcedRecFields[0] = primaryRecDesc.getFields()[0];
+            enforcedTypeTraits[0] = primaryRecDesc.getTypeTraits()[0];
+        }
+        enforcedRecFields[numPrimaryKeys] = serdeProvider.getSerializerDeserializer(itemType);
+        enforcedRecDesc = new RecordDescriptor(enforcedRecFields, enforcedTypeTraits);
+        // For tokenization, sorting and loading.
+        // One token (+ optional partitioning field) + primary keys.
+        numTokenKeyPairFields = (!isPartitioned) ? 1 + numPrimaryKeys : 2 + numPrimaryKeys;
+        ISerializerDeserializer[] tokenKeyPairFields = new ISerializerDeserializer[numTokenKeyPairFields
+                + numFilterFields];
+        ITypeTraits[] tokenKeyPairTypeTraits = new ITypeTraits[numTokenKeyPairFields];
+        tokenKeyPairComparatorFactories = new IBinaryComparatorFactory[numTokenKeyPairFields];
+        tokenKeyPairFields[0] = serdeProvider.getSerializerDeserializer(secondaryKeyType);
+        tokenKeyPairTypeTraits[0] = tokenTypeTraits[0];
+        tokenKeyPairComparatorFactories[0] = NonTaggedFormatUtil.getTokenBinaryComparatorFactory(secondaryKeyType);
+        int pkOff = 1;
+        if (isPartitioned) {
+            tokenKeyPairFields[1] = ShortSerializerDeserializer.INSTANCE;
+            tokenKeyPairTypeTraits[1] = tokenTypeTraits[1];
+            tokenKeyPairComparatorFactories[1] = PointableBinaryComparatorFactory.of(ShortPointable.FACTORY);
+            pkOff = 2;
+        }
+        if (numPrimaryKeys > 0) {
+            tokenKeyPairFields[pkOff] = primaryRecDesc.getFields()[0];
+            tokenKeyPairTypeTraits[pkOff] = primaryRecDesc.getTypeTraits()[0];
+            tokenKeyPairComparatorFactories[pkOff] = primaryComparatorFactories[0];
+        }
+        if (numFilterFields > 0) {
+            tokenKeyPairFields[numPrimaryKeys + pkOff] = secondaryRecFields[numPrimaryKeys + numSecondaryKeys];
+        }
+        tokenKeyPairRecDesc = new RecordDescriptor(tokenKeyPairFields, tokenKeyPairTypeTraits);
+        if (filterFieldName != null) {
+            invertedIndexFields = new int[numTokenKeyPairFields];
+            for (int i = 0; i < invertedIndexFields.length; i++) {
+                invertedIndexFields[i] = i;
+            }
+            secondaryFilterFieldsForNonBulkLoadOps = new int[numFilterFields];
+            secondaryFilterFieldsForNonBulkLoadOps[0] = numSecondaryKeys + numPrimaryKeys;
+            invertedIndexFieldsForNonBulkLoadOps = new int[numSecondaryKeys + numPrimaryKeys];
+            for (int i = 0; i < invertedIndexFieldsForNonBulkLoadOps.length; i++) {
+                invertedIndexFieldsForNonBulkLoadOps[i] = i;
+            }
+        }
+
+    }
+
+    @Override
+    protected int getNumSecondaryKeys() {
+        return numTokenKeyPairFields - numPrimaryKeys;
+    }
+
+    @Override
+    public JobSpecification buildCreationJobSpec() throws AsterixException, AlgebricksException {
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+
+        //prepare a LocalResourceMetadata which will be stored in NC's local resource repository
+        ILocalResourceMetadata localResourceMetadata = new LSMInvertedIndexLocalResourceMetadata(invListsTypeTraits,
+                primaryComparatorFactories, tokenTypeTraits, tokenComparatorFactories, tokenizerFactory, isPartitioned,
+                dataset.getDatasetId(), mergePolicyFactory, mergePolicyFactoryProperties, filterTypeTraits,
+                filterCmpFactories, invertedIndexFields, secondaryFilterFields, secondaryFilterFieldsForNonBulkLoadOps,
+                invertedIndexFieldsForNonBulkLoadOps);
+        ILocalResourceFactoryProvider localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(
+                localResourceMetadata, LocalResource.LSMInvertedIndexResource);
+
+        IIndexDataflowHelperFactory dataflowHelperFactory = createDataflowHelperFactory();
+        LSMInvertedIndexCreateOperatorDescriptor invIndexCreateOp = new LSMInvertedIndexCreateOperatorDescriptor(spec,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, secondaryFileSplitProvider,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, tokenTypeTraits, tokenComparatorFactories,
+                invListsTypeTraits, primaryComparatorFactories, tokenizerFactory, dataflowHelperFactory,
+                localResourceFactoryProvider, NoOpOperationCallbackFactory.INSTANCE);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, invIndexCreateOp,
+                secondaryPartitionConstraint);
+        spec.addRoot(invIndexCreateOp);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        return spec;
+    }
+
+    @Override
+    public JobSpecification buildLoadingJobSpec() throws AsterixException, AlgebricksException {
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+
+        // Create dummy key provider for feeding the primary index scan.
+        AbstractOperatorDescriptor keyProviderOp = createDummyKeyProviderOp(spec);
+
+        // Create primary index scan op.
+        BTreeSearchOperatorDescriptor primaryScanOp = createPrimaryIndexScanOp(spec);
+
+        AbstractOperatorDescriptor sourceOp = primaryScanOp;
+        if (isEnforcingKeyTypes) {
+            sourceOp = createCastOp(spec, primaryScanOp, numSecondaryKeys, dataset.getDatasetType());
+            spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, sourceOp, 0);
+        }
+        AlgebricksMetaOperatorDescriptor asterixAssignOp = createAssignOp(spec, sourceOp, numSecondaryKeys);
+
+        // If any of the secondary fields are nullable, then add a select op
+        // that filters nulls.
+        AlgebricksMetaOperatorDescriptor selectOp = null;
+        if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
+            selectOp = createFilterNullsSelectOp(spec, numSecondaryKeys);
+        }
+
+        // Create a tokenizer op.
+        AbstractOperatorDescriptor tokenizerOp = createTokenizerOp(spec);
+
+        // Sort by token + primary keys.
+        ExternalSortOperatorDescriptor sortOp = createSortOp(spec, tokenKeyPairComparatorFactories, tokenKeyPairRecDesc);
+
+        // Create secondary inverted index bulk load op.
+        LSMInvertedIndexBulkLoadOperatorDescriptor invIndexBulkLoadOp = createInvertedIndexBulkLoadOp(spec);
+
+        AlgebricksMetaOperatorDescriptor metaOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
+                new IPushRuntimeFactory[] { new SinkRuntimeFactory() }, new RecordDescriptor[] {});
+        // Connect the operators.
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryScanOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), sourceOp, 0, asterixAssignOp, 0);
+        if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
+            spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, selectOp, 0);
+            spec.connect(new OneToOneConnectorDescriptor(spec), selectOp, 0, tokenizerOp, 0);
+        } else {
+            spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, tokenizerOp, 0);
+        }
+        spec.connect(new OneToOneConnectorDescriptor(spec), tokenizerOp, 0, sortOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), sortOp, 0, invIndexBulkLoadOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), invIndexBulkLoadOp, 0, metaOp, 0);
+        spec.addRoot(metaOp);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        return spec;
+    }
+
+    private AbstractOperatorDescriptor createTokenizerOp(JobSpecification spec) throws AlgebricksException {
+        int docField = 0;
+        int[] primaryKeyFields = new int[numPrimaryKeys + numFilterFields];
+        for (int i = 0; i < primaryKeyFields.length; i++) {
+            primaryKeyFields[i] = numSecondaryKeys + i;
+        }
+        BinaryTokenizerOperatorDescriptor tokenizerOp = new BinaryTokenizerOperatorDescriptor(spec,
+                tokenKeyPairRecDesc, tokenizerFactory, docField, primaryKeyFields, isPartitioned, false);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, tokenizerOp,
+                primaryPartitionConstraint);
+        return tokenizerOp;
+    }
+
+    @Override
+    protected ExternalSortOperatorDescriptor createSortOp(JobSpecification spec,
+            IBinaryComparatorFactory[] secondaryComparatorFactories, RecordDescriptor secondaryRecDesc) {
+        // Sort on token and primary keys.
+        int[] sortFields = new int[numTokenKeyPairFields];
+        for (int i = 0; i < numTokenKeyPairFields; i++) {
+            sortFields[i] = i;
+        }
+        ExternalSortOperatorDescriptor sortOp = new ExternalSortOperatorDescriptor(spec,
+                physOptConf.getMaxFramesExternalSort(), sortFields, tokenKeyPairComparatorFactories, secondaryRecDesc);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, sortOp, primaryPartitionConstraint);
+        return sortOp;
+    }
+
+    private LSMInvertedIndexBulkLoadOperatorDescriptor createInvertedIndexBulkLoadOp(JobSpecification spec) {
+        int[] fieldPermutation = new int[numTokenKeyPairFields + numFilterFields];
+        for (int i = 0; i < fieldPermutation.length; i++) {
+            fieldPermutation[i] = i;
+        }
+        IIndexDataflowHelperFactory dataflowHelperFactory = createDataflowHelperFactory();
+        LSMInvertedIndexBulkLoadOperatorDescriptor invIndexBulkLoadOp = new LSMInvertedIndexBulkLoadOperatorDescriptor(
+                spec, secondaryRecDesc, fieldPermutation, false, numElementsHint, false,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, secondaryFileSplitProvider,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, tokenTypeTraits, tokenComparatorFactories,
+                invListsTypeTraits, primaryComparatorFactories, tokenizerFactory, dataflowHelperFactory);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, invIndexBulkLoadOp,
+                secondaryPartitionConstraint);
+        return invIndexBulkLoadOp;
+    }
+
+    private IIndexDataflowHelperFactory createDataflowHelperFactory() {
+        AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
+        boolean temp = dataset.getDatasetDetails().isTemp();
+        if (!isPartitioned) {
+            return new LSMInvertedIndexDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
+                    dataset.getDatasetId()), mergePolicyFactory, mergePolicyFactoryProperties,
+                    new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                    LSMInvertedIndexIOOperationCallbackFactory.INSTANCE,
+                    storageProperties.getBloomFilterFalsePositiveRate(), invertedIndexFields, filterTypeTraits,
+                    filterCmpFactories, secondaryFilterFields, secondaryFilterFieldsForNonBulkLoadOps,
+                    invertedIndexFieldsForNonBulkLoadOps, !temp);
+        } else {
+            return new PartitionedLSMInvertedIndexDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
+                    dataset.getDatasetId()), mergePolicyFactory, mergePolicyFactoryProperties,
+                    new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                    LSMInvertedIndexIOOperationCallbackFactory.INSTANCE,
+                    storageProperties.getBloomFilterFalsePositiveRate(), invertedIndexFields, filterTypeTraits,
+                    filterCmpFactories, secondaryFilterFields, secondaryFilterFieldsForNonBulkLoadOps,
+                    invertedIndexFieldsForNonBulkLoadOps, !temp);
+        }
+    }
+
+    @Override
+    public JobSpecification buildCompactJobSpec() throws AsterixException, AlgebricksException {
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+
+        IIndexDataflowHelperFactory dataflowHelperFactory = createDataflowHelperFactory();
+        LSMInvertedIndexCompactOperator compactOp = new LSMInvertedIndexCompactOperator(spec,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, secondaryFileSplitProvider,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, tokenTypeTraits, tokenComparatorFactories,
+                invListsTypeTraits, primaryComparatorFactories, tokenizerFactory, dataflowHelperFactory,
+                NoOpOperationCallbackFactory.INSTANCE);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp,
+                secondaryPartitionConstraint);
+
+        spec.addRoot(compactOp);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        return spec;
+    }
+}


[22/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/translator/AqlPlusExpressionToPlanTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/translator/AqlPlusExpressionToPlanTranslator.java b/asterix-algebra/src/main/java/org/apache/asterix/translator/AqlPlusExpressionToPlanTranslator.java
new file mode 100644
index 0000000..7bfa056
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/translator/AqlPlusExpressionToPlanTranslator.java
@@ -0,0 +1,1498 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.translator;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.logging.Logger;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.aql.base.Clause;
+import edu.uci.ics.asterix.aql.base.Expression;
+import edu.uci.ics.asterix.aql.base.Expression.Kind;
+import edu.uci.ics.asterix.aql.expression.CallExpr;
+import edu.uci.ics.asterix.aql.expression.CompactStatement;
+import edu.uci.ics.asterix.aql.expression.ConnectFeedStatement;
+import edu.uci.ics.asterix.aql.expression.CreateDataverseStatement;
+import edu.uci.ics.asterix.aql.expression.CreateFeedPolicyStatement;
+import edu.uci.ics.asterix.aql.expression.CreateFeedStatement;
+import edu.uci.ics.asterix.aql.expression.CreateFunctionStatement;
+import edu.uci.ics.asterix.aql.expression.CreateIndexStatement;
+import edu.uci.ics.asterix.aql.expression.CreatePrimaryFeedStatement;
+import edu.uci.ics.asterix.aql.expression.CreateSecondaryFeedStatement;
+import edu.uci.ics.asterix.aql.expression.DatasetDecl;
+import edu.uci.ics.asterix.aql.expression.DataverseDecl;
+import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
+import edu.uci.ics.asterix.aql.expression.DeleteStatement;
+import edu.uci.ics.asterix.aql.expression.DisconnectFeedStatement;
+import edu.uci.ics.asterix.aql.expression.DistinctClause;
+import edu.uci.ics.asterix.aql.expression.DropStatement;
+import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
+import edu.uci.ics.asterix.aql.expression.FeedDropStatement;
+import edu.uci.ics.asterix.aql.expression.FeedPolicyDropStatement;
+import edu.uci.ics.asterix.aql.expression.FieldAccessor;
+import edu.uci.ics.asterix.aql.expression.FieldBinding;
+import edu.uci.ics.asterix.aql.expression.ForClause;
+import edu.uci.ics.asterix.aql.expression.FunctionDecl;
+import edu.uci.ics.asterix.aql.expression.FunctionDropStatement;
+import edu.uci.ics.asterix.aql.expression.GbyVariableExpressionPair;
+import edu.uci.ics.asterix.aql.expression.GroupbyClause;
+import edu.uci.ics.asterix.aql.expression.Identifier;
+import edu.uci.ics.asterix.aql.expression.IfExpr;
+import edu.uci.ics.asterix.aql.expression.IndexAccessor;
+import edu.uci.ics.asterix.aql.expression.IndexDropStatement;
+import edu.uci.ics.asterix.aql.expression.InsertStatement;
+import edu.uci.ics.asterix.aql.expression.JoinClause;
+import edu.uci.ics.asterix.aql.expression.LetClause;
+import edu.uci.ics.asterix.aql.expression.LimitClause;
+import edu.uci.ics.asterix.aql.expression.ListConstructor;
+import edu.uci.ics.asterix.aql.expression.ListConstructor.Type;
+import edu.uci.ics.asterix.aql.expression.LiteralExpr;
+import edu.uci.ics.asterix.aql.expression.LoadStatement;
+import edu.uci.ics.asterix.aql.expression.MetaVariableClause;
+import edu.uci.ics.asterix.aql.expression.MetaVariableExpr;
+import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
+import edu.uci.ics.asterix.aql.expression.NodegroupDecl;
+import edu.uci.ics.asterix.aql.expression.OperatorExpr;
+import edu.uci.ics.asterix.aql.expression.OperatorType;
+import edu.uci.ics.asterix.aql.expression.OrderbyClause;
+import edu.uci.ics.asterix.aql.expression.OrderbyClause.OrderModifier;
+import edu.uci.ics.asterix.aql.expression.OrderedListTypeDefinition;
+import edu.uci.ics.asterix.aql.expression.QuantifiedExpression;
+import edu.uci.ics.asterix.aql.expression.QuantifiedExpression.Quantifier;
+import edu.uci.ics.asterix.aql.expression.QuantifiedPair;
+import edu.uci.ics.asterix.aql.expression.Query;
+import edu.uci.ics.asterix.aql.expression.RecordConstructor;
+import edu.uci.ics.asterix.aql.expression.RecordTypeDefinition;
+import edu.uci.ics.asterix.aql.expression.SetStatement;
+import edu.uci.ics.asterix.aql.expression.TypeDecl;
+import edu.uci.ics.asterix.aql.expression.TypeDropStatement;
+import edu.uci.ics.asterix.aql.expression.TypeReferenceExpression;
+import edu.uci.ics.asterix.aql.expression.UnaryExpr;
+import edu.uci.ics.asterix.aql.expression.UnaryExpr.Sign;
+import edu.uci.ics.asterix.aql.expression.UnionExpr;
+import edu.uci.ics.asterix.aql.expression.UnorderedListTypeDefinition;
+import edu.uci.ics.asterix.aql.expression.UpdateClause;
+import edu.uci.ics.asterix.aql.expression.UpdateStatement;
+import edu.uci.ics.asterix.aql.expression.VariableExpr;
+import edu.uci.ics.asterix.aql.expression.WhereClause;
+import edu.uci.ics.asterix.aql.expression.WriteStatement;
+import edu.uci.ics.asterix.aql.expression.visitor.IAqlPlusExpressionVisitor;
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.common.functions.FunctionSignature;
+import edu.uci.ics.asterix.common.transactions.JobId;
+import edu.uci.ics.asterix.formats.base.IDataFormat;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.declared.FileSplitDataSink;
+import edu.uci.ics.asterix.metadata.declared.FileSplitSinkId;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.AsterixFunctionInfo;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.translator.CompiledStatements.ICompiledDmlStatement;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.Counter;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.OperatorAnnotations;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression.FunctionKind;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.BroadcastExpressionAnnotation;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.BroadcastExpressionAnnotation.BroadcastSide;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+
+/**
+ * Each visit returns a pair of an operator and a variable. The variable
+ * corresponds to the new column, if any, added to the tuple flow. E.g., for
+ * Unnest, the column is the variable bound to the elements in the list, for
+ * Subplan it is null. The first argument of a visit method is the expression
+ * which is translated. The second argument of a visit method is the tuple
+ * source for the current subtree.
+ */
+
+public class AqlPlusExpressionToPlanTranslator extends AbstractAqlTranslator implements
+        IAqlPlusExpressionVisitor<Pair<ILogicalOperator, LogicalVariable>, Mutable<ILogicalOperator>> {
+
+    private static final Logger LOGGER = Logger.getLogger(AqlPlusExpressionToPlanTranslator.class.getName());
+
+    private class MetaScopeLogicalVariable {
+        private HashMap<Identifier, LogicalVariable> map = new HashMap<Identifier, LogicalVariable>();
+
+        public VariableReferenceExpression getVariableReferenceExpression(Identifier id) throws AsterixException {
+            LogicalVariable var = map.get(id);
+            LOGGER.fine("get:" + id + ":" + var);
+            if (var == null) {
+                throw new AsterixException("Identifier " + id + " not found in AQL+ meta-scope.");
+            }
+            return new VariableReferenceExpression(var);
+        }
+
+        public void put(Identifier id, LogicalVariable var) {
+            LOGGER.fine("put:" + id + ":" + var);
+            map.put(id, var);
+        }
+    }
+
+    private class MetaScopeILogicalOperator {
+        private HashMap<Identifier, ILogicalOperator> map = new HashMap<Identifier, ILogicalOperator>();
+
+        public ILogicalOperator get(Identifier id) throws AsterixException {
+            ILogicalOperator op = map.get(id);
+            if (op == null) {
+                throw new AsterixException("Identifier " + id + " not found in AQL+ meta-scope.");
+            }
+            return op;
+        }
+
+        public void put(Identifier id, ILogicalOperator op) {
+            LOGGER.fine("put:" + id + ":" + op);
+            map.put(id, op);
+        }
+    }
+
+    private final JobId jobId;
+    private TranslationContext context;
+    private String outputDatasetName;
+    private ICompiledDmlStatement stmt;
+    private AqlMetadataProvider metadataProvider;
+
+    private MetaScopeLogicalVariable metaScopeExp = new MetaScopeLogicalVariable();
+    private MetaScopeILogicalOperator metaScopeOp = new MetaScopeILogicalOperator();
+    private static LogicalVariable METADATA_DUMMY_VAR = new LogicalVariable(-1);
+
+    public AqlPlusExpressionToPlanTranslator(JobId jobId, AqlMetadataProvider metadataProvider,
+            Counter currentVarCounter, String outputDatasetName, ICompiledDmlStatement stmt) {
+        this.jobId = jobId;
+        this.metadataProvider = metadataProvider;
+        this.context = new TranslationContext(currentVarCounter);
+        this.outputDatasetName = outputDatasetName;
+        this.stmt = stmt;
+        this.context.setTopFlwor(false);
+    }
+
+    public int getVarCounter() {
+        return context.getVarCounter();
+    }
+
+    public ILogicalPlan translate(Query expr) throws AlgebricksException, AsterixException {
+        return translate(expr, null);
+    }
+
+    public ILogicalPlan translate(Query expr, AqlMetadataProvider metadata) throws AlgebricksException,
+            AsterixException {
+        IDataFormat format = metadata.getFormat();
+        if (format == null) {
+            throw new AlgebricksException("Data format has not been set.");
+        }
+        format.registerRuntimeFunctions();
+        Pair<ILogicalOperator, LogicalVariable> p = expr.accept(this, new MutableObject<ILogicalOperator>(
+                new EmptyTupleSourceOperator()));
+
+        ArrayList<Mutable<ILogicalOperator>> globalPlanRoots = new ArrayList<Mutable<ILogicalOperator>>();
+
+        boolean isTransactionalWrite = false;
+        ILogicalOperator topOp = p.first;
+        ProjectOperator project = (ProjectOperator) topOp;
+        LogicalVariable resVar = project.getVariables().get(0);
+        if (outputDatasetName == null) {
+            List<Mutable<ILogicalExpression>> writeExprList = new ArrayList<Mutable<ILogicalExpression>>(1);
+            writeExprList.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(resVar)));
+            FileSplitSinkId fssi = new FileSplitSinkId(metadata.getOutputFile());
+            FileSplitDataSink sink = new FileSplitDataSink(fssi, null);
+            topOp = new WriteOperator(writeExprList, sink);
+            topOp.getInputs().add(new MutableObject<ILogicalOperator>(project));
+        } else {
+            Dataset dataset = metadata.findDataset(stmt.getDataverseName(), outputDatasetName);
+            if (dataset == null) {
+                throw new AlgebricksException("Cannot find dataset " + outputDatasetName);
+            }
+            if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
+                throw new AlgebricksException("Cannot write output to an external dataset.");
+            }
+            ARecordType itemType = (ARecordType) metadata.findType(dataset.getDataverseName(),
+                    dataset.getItemTypeName());
+            List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
+            ArrayList<LogicalVariable> vars = new ArrayList<LogicalVariable>();
+            ArrayList<Mutable<ILogicalExpression>> exprs = new ArrayList<Mutable<ILogicalExpression>>();
+            List<Mutable<ILogicalExpression>> varRefsForLoading = new ArrayList<Mutable<ILogicalExpression>>();
+            for (List<String> partitioningKey : partitioningKeys) {
+                Triple<ICopyEvaluatorFactory, ScalarFunctionCallExpression, IAType> partitioner = format
+                        .partitioningEvaluatorFactory(itemType, partitioningKey);
+                AbstractFunctionCallExpression f = partitioner.second.cloneExpression();
+                f.substituteVar(METADATA_DUMMY_VAR, resVar);
+                exprs.add(new MutableObject<ILogicalExpression>(f));
+                LogicalVariable v = context.newVar();
+                vars.add(v);
+                varRefsForLoading.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(v)));
+            }
+            AssignOperator assign = new AssignOperator(vars, exprs);
+            assign.getInputs().add(new MutableObject<ILogicalOperator>(project));
+        }
+
+        globalPlanRoots.add(new MutableObject<ILogicalOperator>(topOp));
+        ILogicalPlan plan = new ALogicalPlanImpl(globalPlanRoots);
+        return plan;
+    }
+
+    public ILogicalPlan translate(List<Clause> clauses) throws AlgebricksException, AsterixException {
+
+        if (clauses == null) {
+            return null;
+        }
+
+        Mutable<ILogicalOperator> opRef = new MutableObject<ILogicalOperator>(new EmptyTupleSourceOperator());
+        Pair<ILogicalOperator, LogicalVariable> p = null;
+        for (Clause c : clauses) {
+            p = c.accept(this, opRef);
+            opRef = new MutableObject<ILogicalOperator>(p.first);
+        }
+
+        ArrayList<Mutable<ILogicalOperator>> globalPlanRoots = new ArrayList<Mutable<ILogicalOperator>>();
+
+        ILogicalOperator topOp = p.first;
+
+        globalPlanRoots.add(new MutableObject<ILogicalOperator>(topOp));
+        ILogicalPlan plan = new ALogicalPlanImpl(globalPlanRoots);
+        return plan;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitForClause(ForClause fc, Mutable<ILogicalOperator> tupSource)
+            throws AsterixException {
+        LogicalVariable v = context.newVar(fc.getVarExpr());
+
+        Expression inExpr = fc.getInExpr();
+        Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(inExpr, tupSource);
+        ILogicalOperator returnedOp;
+
+        if (fc.getPosVarExpr() == null) {
+            returnedOp = new UnnestOperator(v, new MutableObject<ILogicalExpression>(makeUnnestExpression(eo.first)));
+        } else {
+            LogicalVariable pVar = context.newVar(fc.getPosVarExpr());
+            returnedOp = new UnnestOperator(v, new MutableObject<ILogicalExpression>(makeUnnestExpression(eo.first)),
+                    pVar, BuiltinType.AINT32, new AqlPositionWriter());
+        }
+        returnedOp.getInputs().add(eo.second);
+
+        return new Pair<ILogicalOperator, LogicalVariable>(returnedOp, v);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitLetClause(LetClause lc, Mutable<ILogicalOperator> tupSource)
+            throws AsterixException {
+        LogicalVariable v;
+        ILogicalOperator returnedOp;
+
+        switch (lc.getBindingExpr().getKind()) {
+            case VARIABLE_EXPRESSION: {
+                v = context.newVar(lc.getVarExpr());
+                LogicalVariable prev = context.getVar(((VariableExpr) lc.getBindingExpr()).getVar().getId());
+                returnedOp = new AssignOperator(v, new MutableObject<ILogicalExpression>(
+                        new VariableReferenceExpression(prev)));
+                returnedOp.getInputs().add(tupSource);
+                break;
+            }
+            default: {
+                Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(lc.getBindingExpr(),
+                        tupSource);
+                v = context.newVar(lc.getVarExpr());
+                returnedOp = new AssignOperator(v, new MutableObject<ILogicalExpression>(eo.first));
+                returnedOp.getInputs().add(eo.second);
+                break;
+            }
+        }
+        return new Pair<ILogicalOperator, LogicalVariable>(returnedOp, v);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitFlworExpression(FLWOGRExpression flwor,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        Mutable<ILogicalOperator> flworPlan = tupSource;
+        boolean isTop = context.isTopFlwor();
+        if (isTop) {
+            context.setTopFlwor(false);
+        }
+        for (Clause c : flwor.getClauseList()) {
+            Pair<ILogicalOperator, LogicalVariable> pC = c.accept(this, flworPlan);
+            flworPlan = new MutableObject<ILogicalOperator>(pC.first);
+        }
+
+        Expression r = flwor.getReturnExpr();
+        boolean noFlworClause = flwor.noForClause();
+
+        if (r.getKind() == Kind.VARIABLE_EXPRESSION) {
+            VariableExpr v = (VariableExpr) r;
+            LogicalVariable var = context.getVar(v.getVar().getId());
+
+            return produceFlwrResult(noFlworClause, isTop, flworPlan, var);
+
+        } else {
+            Mutable<ILogicalOperator> baseOp = new MutableObject<ILogicalOperator>(flworPlan.getValue());
+            Pair<ILogicalOperator, LogicalVariable> rRes = r.accept(this, baseOp);
+            ILogicalOperator rOp = rRes.first;
+            ILogicalOperator resOp;
+            if (expressionNeedsNoNesting(r)) {
+                baseOp.setValue(flworPlan.getValue());
+                resOp = rOp;
+            } else {
+                SubplanOperator s = new SubplanOperator(rOp);
+                s.getInputs().add(flworPlan);
+                resOp = s;
+                baseOp.setValue(new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(s)));
+            }
+            Mutable<ILogicalOperator> resOpRef = new MutableObject<ILogicalOperator>(resOp);
+            return produceFlwrResult(noFlworClause, isTop, resOpRef, rRes.second);
+        }
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitFieldAccessor(FieldAccessor fa,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(fa.getExpr(), tupSource);
+        LogicalVariable v = context.newVar();
+        AbstractFunctionCallExpression fldAccess = new ScalarFunctionCallExpression(
+                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME));
+        fldAccess.getArguments().add(new MutableObject<ILogicalExpression>(p.first));
+        ILogicalExpression faExpr = new ConstantExpression(new AsterixConstantValue(new AString(fa.getIdent()
+                .getValue())));
+        fldAccess.getArguments().add(new MutableObject<ILogicalExpression>(faExpr));
+        AssignOperator a = new AssignOperator(v, new MutableObject<ILogicalExpression>(fldAccess));
+        a.getInputs().add(p.second);
+        return new Pair<ILogicalOperator, LogicalVariable>(a, v);
+
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitIndexAccessor(IndexAccessor ia,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(ia.getExpr(), tupSource);
+        LogicalVariable v = context.newVar();
+        AbstractFunctionCallExpression f;
+        if (ia.isAny()) {
+            f = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.ANY_COLLECTION_MEMBER));
+            f.getArguments().add(new MutableObject<ILogicalExpression>(p.first));
+        } else {
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> indexPair = aqlExprToAlgExpression(ia.getIndexExpr(),
+                    tupSource);
+            f = new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM));
+            f.getArguments().add(new MutableObject<ILogicalExpression>(p.first));
+            f.getArguments().add(new MutableObject<ILogicalExpression>(indexPair.first));
+        }
+        AssignOperator a = new AssignOperator(v, new MutableObject<ILogicalExpression>(f));
+        a.getInputs().add(p.second);
+        return new Pair<ILogicalOperator, LogicalVariable>(a, v);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitCallExpr(CallExpr fcall, Mutable<ILogicalOperator> tupSource)
+            throws AsterixException {
+        LogicalVariable v = context.newVar();
+        FunctionSignature signature = fcall.getFunctionSignature();
+        List<Mutable<ILogicalExpression>> args = new ArrayList<Mutable<ILogicalExpression>>();
+        Mutable<ILogicalOperator> topOp = tupSource;
+
+        for (Expression expr : fcall.getExprList()) {
+            switch (expr.getKind()) {
+                case VARIABLE_EXPRESSION: {
+                    LogicalVariable var = context.getVar(((VariableExpr) expr).getVar().getId());
+                    args.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(var)));
+                    break;
+                }
+                case LITERAL_EXPRESSION: {
+                    LiteralExpr val = (LiteralExpr) expr;
+                    args.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
+                            ConstantHelper.objectFromLiteral(val.getValue())))));
+                    break;
+                }
+                default: {
+                    Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(expr, topOp);
+                    AbstractLogicalOperator o1 = (AbstractLogicalOperator) eo.second.getValue();
+                    args.add(new MutableObject<ILogicalExpression>(eo.first));
+                    if (o1 != null && !(o1.getOperatorTag() == LogicalOperatorTag.ASSIGN && hasOnlyChild(o1, topOp))) {
+                        topOp = eo.second;
+                    }
+                    break;
+                }
+            }
+        }
+
+        FunctionIdentifier fi = new FunctionIdentifier(AlgebricksBuiltinFunctions.ALGEBRICKS_NS, signature.getName());
+        AsterixFunctionInfo afi = AsterixBuiltinFunctions.lookupFunction(fi);
+        FunctionIdentifier builtinAquafi = afi == null ? null : afi.getFunctionIdentifier();
+
+        if (builtinAquafi != null) {
+            fi = builtinAquafi;
+        } else {
+            fi = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, signature.getName());
+            FunctionIdentifier builtinAsterixFi = AsterixBuiltinFunctions.getBuiltinFunctionIdentifier(fi);
+            if (builtinAsterixFi != null) {
+                fi = builtinAsterixFi;
+            }
+        }
+        AbstractFunctionCallExpression f;
+        if (AsterixBuiltinFunctions.isBuiltinAggregateFunction(fi)) {
+            f = AsterixBuiltinFunctions.makeAggregateFunctionExpression(fi, args);
+        } else if (AsterixBuiltinFunctions.isBuiltinUnnestingFunction(fi)) {
+            UnnestingFunctionCallExpression ufce = new UnnestingFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(fi), args);
+            ufce.setReturnsUniqueValues(AsterixBuiltinFunctions.returnsUniqueValues(fi));
+            f = ufce;
+        } else {
+            f = new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(fi), args);
+        }
+        AssignOperator op = new AssignOperator(v, new MutableObject<ILogicalExpression>(f));
+        if (topOp != null) {
+            op.getInputs().add(topOp);
+        }
+
+        return new Pair<ILogicalOperator, LogicalVariable>(op, v);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitFunctionDecl(FunctionDecl fd,
+            Mutable<ILogicalOperator> tupSource) {
+        // TODO Auto-generated method stub
+        throw new NotImplementedException();
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitGroupbyClause(GroupbyClause gc,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        GroupByOperator gOp = new GroupByOperator();
+        Mutable<ILogicalOperator> topOp = tupSource;
+        for (GbyVariableExpressionPair ve : gc.getGbyPairList()) {
+            LogicalVariable v;
+            VariableExpr vexpr = ve.getVar();
+            if (vexpr != null) {
+                v = context.newVar(vexpr);
+            } else {
+                v = context.newVar();
+            }
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(ve.getExpr(), topOp);
+            gOp.addGbyExpression(v, eo.first);
+            topOp = eo.second;
+        }
+        for (GbyVariableExpressionPair ve : gc.getDecorPairList()) {
+            LogicalVariable v;
+            VariableExpr vexpr = ve.getVar();
+            if (vexpr != null) {
+                v = context.newVar(vexpr);
+            } else {
+                v = context.newVar();
+            }
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(ve.getExpr(), topOp);
+            gOp.addDecorExpression(v, eo.first);
+            topOp = eo.second;
+        }
+        gOp.getInputs().add(topOp);
+
+        for (VariableExpr var : gc.getWithVarList()) {
+            LogicalVariable aggVar = context.newVar();
+            LogicalVariable oldVar = context.getVar(var);
+            List<Mutable<ILogicalExpression>> flArgs = new ArrayList<Mutable<ILogicalExpression>>(1);
+            flArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(oldVar)));
+            AggregateFunctionCallExpression fListify = AsterixBuiltinFunctions.makeAggregateFunctionExpression(
+                    AsterixBuiltinFunctions.LISTIFY, flArgs);
+            AggregateOperator agg = new AggregateOperator(mkSingletonArrayList(aggVar),
+                    (List) mkSingletonArrayList(new MutableObject<ILogicalExpression>(fListify)));
+            agg.getInputs().add(
+                    new MutableObject<ILogicalOperator>(new NestedTupleSourceOperator(
+                            new MutableObject<ILogicalOperator>(gOp))));
+            ILogicalPlan plan = new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(agg));
+            gOp.getNestedPlans().add(plan);
+            // Hide the variable that was part of the "with", replacing it with
+            // the one bound by the aggregation op.
+            context.setVar(var, aggVar);
+        }
+
+        gOp.getAnnotations().put(OperatorAnnotations.USE_HASH_GROUP_BY, gc.hasHashGroupByHint());
+        return new Pair<ILogicalOperator, LogicalVariable>(gOp, null);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitIfExpr(IfExpr ifexpr, Mutable<ILogicalOperator> tupSource)
+            throws AsterixException {
+        // In the most general case, IfThenElse is translated in the following
+        // way.
+        //
+        // We assign the result of the condition to one variable varCond.
+        // We create one subplan which contains the plan for the "then" branch,
+        // on top of which there is a selection whose condition is varCond.
+        // Similarly, we create one subplan for the "else" branch, in which the
+        // selection is not(varCond).
+        // Finally, we concatenate the results. (??)
+
+        Pair<ILogicalOperator, LogicalVariable> pCond = ifexpr.getCondExpr().accept(this, tupSource);
+        ILogicalOperator opCond = pCond.first;
+        LogicalVariable varCond = pCond.second;
+
+        SubplanOperator sp = new SubplanOperator();
+        Mutable<ILogicalOperator> nestedSource = new MutableObject<ILogicalOperator>(new NestedTupleSourceOperator(
+                new MutableObject<ILogicalOperator>(sp)));
+
+        Pair<ILogicalOperator, LogicalVariable> pThen = ifexpr.getThenExpr().accept(this, nestedSource);
+        SelectOperator sel1 = new SelectOperator(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
+                varCond)), false, null);
+        sel1.getInputs().add(new MutableObject<ILogicalOperator>(pThen.first));
+
+        Pair<ILogicalOperator, LogicalVariable> pElse = ifexpr.getElseExpr().accept(this, nestedSource);
+        AbstractFunctionCallExpression notVarCond = new ScalarFunctionCallExpression(
+                FunctionUtils.getFunctionInfo(AlgebricksBuiltinFunctions.NOT), new MutableObject<ILogicalExpression>(
+                        new VariableReferenceExpression(varCond)));
+        SelectOperator sel2 = new SelectOperator(new MutableObject<ILogicalExpression>(notVarCond), false, null);
+        sel2.getInputs().add(new MutableObject<ILogicalOperator>(pElse.first));
+
+        ILogicalPlan p1 = new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(sel1));
+        sp.getNestedPlans().add(p1);
+        ILogicalPlan p2 = new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(sel2));
+        sp.getNestedPlans().add(p2);
+
+        Mutable<ILogicalOperator> opCondRef = new MutableObject<ILogicalOperator>(opCond);
+        sp.getInputs().add(opCondRef);
+
+        LogicalVariable resV = context.newVar();
+        AbstractFunctionCallExpression concatNonNull = new ScalarFunctionCallExpression(
+                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.CONCAT_NON_NULL),
+                new MutableObject<ILogicalExpression>(new VariableReferenceExpression(pThen.second)),
+                new MutableObject<ILogicalExpression>(new VariableReferenceExpression(pElse.second)));
+        AssignOperator a = new AssignOperator(resV, new MutableObject<ILogicalExpression>(concatNonNull));
+        a.getInputs().add(new MutableObject<ILogicalOperator>(sp));
+
+        return new Pair<ILogicalOperator, LogicalVariable>(a, resV);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitLiteralExpr(LiteralExpr l, Mutable<ILogicalOperator> tupSource) {
+        LogicalVariable var = context.newVar();
+        AssignOperator a = new AssignOperator(var, new MutableObject<ILogicalExpression>(new ConstantExpression(
+                new AsterixConstantValue(ConstantHelper.objectFromLiteral(l.getValue())))));
+        if (tupSource != null) {
+            a.getInputs().add(tupSource);
+        }
+        return new Pair<ILogicalOperator, LogicalVariable>(a, var);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitOperatorExpr(OperatorExpr op,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        ArrayList<OperatorType> ops = op.getOpList();
+        int nOps = ops.size();
+
+        if (nOps > 0 && (ops.get(0) == OperatorType.AND || ops.get(0) == OperatorType.OR)) {
+            return visitAndOrOperator(op, tupSource);
+        }
+
+        ArrayList<Expression> exprs = op.getExprList();
+
+        Mutable<ILogicalOperator> topOp = tupSource;
+
+        ILogicalExpression currExpr = null;
+        for (int i = 0; i <= nOps; i++) {
+
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(exprs.get(i), topOp);
+            topOp = p.second;
+            ILogicalExpression e = p.first;
+            // now look at the operator
+            if (i < nOps) {
+                if (OperatorExpr.opIsComparison(ops.get(i))) {
+                    AbstractFunctionCallExpression c = createComparisonExpression(ops.get(i));
+
+                    // chain the operators
+                    if (i == 0) {
+                        c.getArguments().add(new MutableObject<ILogicalExpression>(e));
+                        currExpr = c;
+                        if (op.isBroadcastOperand(i)) {
+                            BroadcastExpressionAnnotation bcast = new BroadcastExpressionAnnotation();
+                            bcast.setObject(BroadcastSide.LEFT);
+                            c.getAnnotations().put(BroadcastExpressionAnnotation.BROADCAST_ANNOTATION_KEY, bcast);
+                        }
+                    } else {
+                        ((AbstractFunctionCallExpression) currExpr).getArguments().add(
+                                new MutableObject<ILogicalExpression>(e));
+                        c.getArguments().add(new MutableObject<ILogicalExpression>(currExpr));
+                        currExpr = c;
+                        if (i == 1 && op.isBroadcastOperand(i)) {
+                            BroadcastExpressionAnnotation bcast = new BroadcastExpressionAnnotation();
+                            bcast.setObject(BroadcastSide.RIGHT);
+                            c.getAnnotations().put(BroadcastExpressionAnnotation.BROADCAST_ANNOTATION_KEY, bcast);
+                        }
+                    }
+                } else {
+                    AbstractFunctionCallExpression f = createFunctionCallExpressionForBuiltinOperator(ops.get(i));
+
+                    if (i == 0) {
+                        f.getArguments().add(new MutableObject<ILogicalExpression>(e));
+                        currExpr = f;
+                    } else {
+                        ((AbstractFunctionCallExpression) currExpr).getArguments().add(
+                                new MutableObject<ILogicalExpression>(e));
+                        f.getArguments().add(new MutableObject<ILogicalExpression>(currExpr));
+                        currExpr = f;
+                    }
+                }
+            } else { // don't forget the last expression...
+                ((AbstractFunctionCallExpression) currExpr).getArguments()
+                        .add(new MutableObject<ILogicalExpression>(e));
+                if (i == 1 && op.isBroadcastOperand(i)) {
+                    BroadcastExpressionAnnotation bcast = new BroadcastExpressionAnnotation();
+                    bcast.setObject(BroadcastSide.RIGHT);
+                    ((AbstractFunctionCallExpression) currExpr).getAnnotations().put(
+                            BroadcastExpressionAnnotation.BROADCAST_ANNOTATION_KEY, bcast);
+                }
+            }
+        }
+
+        LogicalVariable assignedVar = context.newVar();
+        AssignOperator a = new AssignOperator(assignedVar, new MutableObject<ILogicalExpression>(currExpr));
+
+        a.getInputs().add(topOp);
+
+        return new Pair<ILogicalOperator, LogicalVariable>(a, assignedVar);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitOrderbyClause(OrderbyClause oc,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+
+        OrderOperator ord = new OrderOperator();
+        Iterator<OrderModifier> modifIter = oc.getModifierList().iterator();
+        Mutable<ILogicalOperator> topOp = tupSource;
+        for (Expression e : oc.getOrderbyList()) {
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(e, topOp);
+            OrderModifier m = modifIter.next();
+            OrderOperator.IOrder comp = (m == OrderModifier.ASC) ? OrderOperator.ASC_ORDER : OrderOperator.DESC_ORDER;
+            ord.getOrderExpressions()
+                    .add(new Pair<IOrder, Mutable<ILogicalExpression>>(comp, new MutableObject<ILogicalExpression>(
+                            p.first)));
+            topOp = p.second;
+        }
+        ord.getInputs().add(topOp);
+        if (oc.getNumTuples() > 0) {
+            ord.getAnnotations().put(OperatorAnnotations.CARDINALITY, oc.getNumTuples());
+        }
+        if (oc.getNumFrames() > 0) {
+            ord.getAnnotations().put(OperatorAnnotations.MAX_NUMBER_FRAMES, oc.getNumFrames());
+        }
+        return new Pair<ILogicalOperator, LogicalVariable>(ord, null);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitQuantifiedExpression(QuantifiedExpression qe,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        Mutable<ILogicalOperator> topOp = tupSource;
+
+        ILogicalOperator firstOp = null;
+        Mutable<ILogicalOperator> lastOp = null;
+
+        for (QuantifiedPair qt : qe.getQuantifiedList()) {
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo1 = aqlExprToAlgExpression(qt.getExpr(), topOp);
+            topOp = eo1.second;
+            LogicalVariable uVar = context.newVar(qt.getVarExpr());
+            ILogicalOperator u = new UnnestOperator(uVar, new MutableObject<ILogicalExpression>(
+                    makeUnnestExpression(eo1.first)));
+
+            if (firstOp == null) {
+                firstOp = u;
+            }
+            if (lastOp != null) {
+                u.getInputs().add(lastOp);
+            }
+            lastOp = new MutableObject<ILogicalOperator>(u);
+        }
+
+        // We make all the unnest correspond. to quantif. vars. sit on top
+        // in the hope of enabling joins & other optimiz.
+        firstOp.getInputs().add(topOp);
+        topOp = lastOp;
+
+        Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo2 = aqlExprToAlgExpression(qe.getSatisfiesExpr(), topOp);
+
+        AggregateFunctionCallExpression fAgg;
+        SelectOperator s;
+        if (qe.getQuantifier() == Quantifier.SOME) {
+            s = new SelectOperator(new MutableObject<ILogicalExpression>(eo2.first), false, null);
+            s.getInputs().add(eo2.second);
+            fAgg = AsterixBuiltinFunctions.makeAggregateFunctionExpression(AsterixBuiltinFunctions.NON_EMPTY_STREAM,
+                    new ArrayList<Mutable<ILogicalExpression>>());
+        } else { // EVERY
+            List<Mutable<ILogicalExpression>> satExprList = new ArrayList<Mutable<ILogicalExpression>>(1);
+            satExprList.add(new MutableObject<ILogicalExpression>(eo2.first));
+            s = new SelectOperator(new MutableObject<ILogicalExpression>(new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AlgebricksBuiltinFunctions.NOT), satExprList)), false, null);
+            s.getInputs().add(eo2.second);
+            fAgg = AsterixBuiltinFunctions.makeAggregateFunctionExpression(AsterixBuiltinFunctions.EMPTY_STREAM,
+                    new ArrayList<Mutable<ILogicalExpression>>());
+        }
+        LogicalVariable qeVar = context.newVar();
+        AggregateOperator a = new AggregateOperator(mkSingletonArrayList(qeVar),
+                (List) mkSingletonArrayList(new MutableObject<ILogicalExpression>(fAgg)));
+        a.getInputs().add(new MutableObject<ILogicalOperator>(s));
+        return new Pair<ILogicalOperator, LogicalVariable>(a, qeVar);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitQuery(Query q, Mutable<ILogicalOperator> tupSource)
+            throws AsterixException {
+        return q.getBody().accept(this, tupSource);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitRecordConstructor(RecordConstructor rc,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        AbstractFunctionCallExpression f = new ScalarFunctionCallExpression(
+                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR));
+        LogicalVariable v1 = context.newVar();
+        AssignOperator a = new AssignOperator(v1, new MutableObject<ILogicalExpression>(f));
+        Mutable<ILogicalOperator> topOp = tupSource;
+        for (FieldBinding fb : rc.getFbList()) {
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo1 = aqlExprToAlgExpression(fb.getLeftExpr(), topOp);
+            f.getArguments().add(new MutableObject<ILogicalExpression>(eo1.first));
+            topOp = eo1.second;
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo2 = aqlExprToAlgExpression(fb.getRightExpr(), topOp);
+            f.getArguments().add(new MutableObject<ILogicalExpression>(eo2.first));
+            topOp = eo2.second;
+        }
+        a.getInputs().add(topOp);
+        return new Pair<ILogicalOperator, LogicalVariable>(a, v1);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitListConstructor(ListConstructor lc,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        FunctionIdentifier fid = (lc.getType() == Type.ORDERED_LIST_CONSTRUCTOR) ? AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR
+                : AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR;
+        AbstractFunctionCallExpression f = new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(fid));
+        LogicalVariable v1 = context.newVar();
+        AssignOperator a = new AssignOperator(v1, new MutableObject<ILogicalExpression>(f));
+        Mutable<ILogicalOperator> topOp = tupSource;
+        for (Expression expr : lc.getExprList()) {
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(expr, topOp);
+            f.getArguments().add(new MutableObject<ILogicalExpression>(eo.first));
+            topOp = eo.second;
+        }
+        a.getInputs().add(topOp);
+        return new Pair<ILogicalOperator, LogicalVariable>(a, v1);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitUnaryExpr(UnaryExpr u, Mutable<ILogicalOperator> tupSource)
+            throws AsterixException {
+        Expression expr = u.getExpr();
+        Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(expr, tupSource);
+        LogicalVariable v1 = context.newVar();
+        AssignOperator a;
+        if (u.getSign() == Sign.POSITIVE) {
+            a = new AssignOperator(v1, new MutableObject<ILogicalExpression>(eo.first));
+        } else {
+            AbstractFunctionCallExpression m = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.NUMERIC_UNARY_MINUS));
+            m.getArguments().add(new MutableObject<ILogicalExpression>(eo.first));
+            a = new AssignOperator(v1, new MutableObject<ILogicalExpression>(m));
+        }
+        a.getInputs().add(eo.second);
+        return new Pair<ILogicalOperator, LogicalVariable>(a, v1);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitVariableExpr(VariableExpr v, Mutable<ILogicalOperator> tupSource) {
+        // Should we ever get to this method?
+        LogicalVariable var = context.newVar();
+        LogicalVariable oldV = context.getVar(v.getVar().getId());
+        AssignOperator a = new AssignOperator(var, new MutableObject<ILogicalExpression>(
+                new VariableReferenceExpression(oldV)));
+        a.getInputs().add(tupSource);
+        return new Pair<ILogicalOperator, LogicalVariable>(a, var);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitWhereClause(WhereClause w, Mutable<ILogicalOperator> tupSource)
+            throws AsterixException {
+        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(w.getWhereExpr(), tupSource);
+        SelectOperator s = new SelectOperator(new MutableObject<ILogicalExpression>(p.first), false, null);
+        s.getInputs().add(p.second);
+
+        return new Pair<ILogicalOperator, LogicalVariable>(s, null);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitLimitClause(LimitClause lc, Mutable<ILogicalOperator> tupSource)
+            throws AsterixException {
+        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p1 = aqlExprToAlgExpression(lc.getLimitExpr(), tupSource);
+        LimitOperator opLim;
+        Expression offset = lc.getOffset();
+        if (offset != null) {
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p2 = aqlExprToAlgExpression(offset, p1.second);
+            opLim = new LimitOperator(p1.first, p2.first);
+            opLim.getInputs().add(p2.second);
+        } else {
+            opLim = new LimitOperator(p1.first);
+            opLim.getInputs().add(p1.second);
+        }
+        return new Pair<ILogicalOperator, LogicalVariable>(opLim, null);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitDistinctClause(DistinctClause dc,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        List<Mutable<ILogicalExpression>> exprList = new ArrayList<Mutable<ILogicalExpression>>();
+        Mutable<ILogicalOperator> input = null;
+        for (Expression expr : dc.getDistinctByExpr()) {
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(expr, tupSource);
+            exprList.add(new MutableObject<ILogicalExpression>(p.first));
+            input = p.second;
+        }
+        DistinctOperator opDistinct = new DistinctOperator(exprList);
+        opDistinct.getInputs().add(input);
+        return new Pair<ILogicalOperator, LogicalVariable>(opDistinct, null);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitUnionExpr(UnionExpr unionExpr,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        Mutable<ILogicalOperator> ts = tupSource;
+        ILogicalOperator lastOp = null;
+        LogicalVariable lastVar = null;
+        boolean first = true;
+        for (Expression e : unionExpr.getExprs()) {
+            if (first) {
+                first = false;
+            } else {
+                ts = new MutableObject<ILogicalOperator>(new EmptyTupleSourceOperator());
+            }
+            Pair<ILogicalOperator, LogicalVariable> p1 = e.accept(this, ts);
+            if (lastOp == null) {
+                lastOp = p1.first;
+                lastVar = p1.second;
+            } else {
+                LogicalVariable unnestVar1 = context.newVar();
+                UnnestOperator unnest1 = new UnnestOperator(unnestVar1, new MutableObject<ILogicalExpression>(
+                        makeUnnestExpression(new VariableReferenceExpression(lastVar))));
+                unnest1.getInputs().add(new MutableObject<ILogicalOperator>(lastOp));
+                LogicalVariable unnestVar2 = context.newVar();
+                UnnestOperator unnest2 = new UnnestOperator(unnestVar2, new MutableObject<ILogicalExpression>(
+                        makeUnnestExpression(new VariableReferenceExpression(p1.second))));
+                unnest2.getInputs().add(new MutableObject<ILogicalOperator>(p1.first));
+                List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> varMap = new ArrayList<Triple<LogicalVariable, LogicalVariable, LogicalVariable>>(
+                        1);
+                LogicalVariable resultVar = context.newVar();
+                Triple<LogicalVariable, LogicalVariable, LogicalVariable> triple = new Triple<LogicalVariable, LogicalVariable, LogicalVariable>(
+                        unnestVar1, unnestVar2, resultVar);
+                varMap.add(triple);
+                UnionAllOperator unionOp = new UnionAllOperator(varMap);
+                unionOp.getInputs().add(new MutableObject<ILogicalOperator>(unnest1));
+                unionOp.getInputs().add(new MutableObject<ILogicalOperator>(unnest2));
+                lastVar = resultVar;
+                lastOp = unionOp;
+            }
+        }
+        LogicalVariable aggVar = context.newVar();
+        ArrayList<LogicalVariable> aggregVars = new ArrayList<LogicalVariable>(1);
+        aggregVars.add(aggVar);
+        List<Mutable<ILogicalExpression>> afcExprs = new ArrayList<Mutable<ILogicalExpression>>(1);
+        afcExprs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(lastVar)));
+        AggregateFunctionCallExpression afc = AsterixBuiltinFunctions.makeAggregateFunctionExpression(
+                AsterixBuiltinFunctions.LISTIFY, afcExprs);
+        ArrayList<Mutable<ILogicalExpression>> aggregExprs = new ArrayList<Mutable<ILogicalExpression>>(1);
+        aggregExprs.add(new MutableObject<ILogicalExpression>(afc));
+        AggregateOperator agg = new AggregateOperator(aggregVars, aggregExprs);
+        agg.getInputs().add(new MutableObject<ILogicalOperator>(lastOp));
+        return new Pair<ILogicalOperator, LogicalVariable>(agg, aggVar);
+    }
+
+    private AbstractFunctionCallExpression createComparisonExpression(OperatorType t) {
+        FunctionIdentifier fi = operatorTypeToFunctionIdentifier(t);
+        IFunctionInfo finfo = FunctionUtils.getFunctionInfo(fi);
+        return new ScalarFunctionCallExpression(finfo);
+    }
+
+    private FunctionIdentifier operatorTypeToFunctionIdentifier(OperatorType t) {
+        switch (t) {
+            case EQ: {
+                return AlgebricksBuiltinFunctions.EQ;
+            }
+            case NEQ: {
+                return AlgebricksBuiltinFunctions.NEQ;
+            }
+            case GT: {
+                return AlgebricksBuiltinFunctions.GT;
+            }
+            case GE: {
+                return AlgebricksBuiltinFunctions.GE;
+            }
+            case LT: {
+                return AlgebricksBuiltinFunctions.LT;
+            }
+            case LE: {
+                return AlgebricksBuiltinFunctions.LE;
+            }
+            default: {
+                throw new IllegalStateException();
+            }
+        }
+    }
+
+    private AbstractFunctionCallExpression createFunctionCallExpressionForBuiltinOperator(OperatorType t)
+            throws AsterixException {
+
+        FunctionIdentifier fid = null;
+        switch (t) {
+            case PLUS: {
+                fid = AlgebricksBuiltinFunctions.NUMERIC_ADD;
+                break;
+            }
+            case MINUS: {
+                fid = AsterixBuiltinFunctions.NUMERIC_SUBTRACT;
+                break;
+            }
+            case MUL: {
+                fid = AsterixBuiltinFunctions.NUMERIC_MULTIPLY;
+                break;
+            }
+            case DIV: {
+                fid = AsterixBuiltinFunctions.NUMERIC_DIVIDE;
+                break;
+            }
+            case MOD: {
+                fid = AsterixBuiltinFunctions.NUMERIC_MOD;
+                break;
+            }
+            case IDIV: {
+                fid = AsterixBuiltinFunctions.NUMERIC_IDIV;
+                break;
+            }
+            case CARET: {
+                fid = AsterixBuiltinFunctions.CARET;
+                break;
+            }
+            case AND: {
+                fid = AlgebricksBuiltinFunctions.AND;
+                break;
+            }
+            case OR: {
+                fid = AlgebricksBuiltinFunctions.OR;
+                break;
+            }
+            case FUZZY_EQ: {
+                fid = AsterixBuiltinFunctions.FUZZY_EQ;
+                break;
+            }
+
+            default: {
+                throw new NotImplementedException("Operator " + t + " is not yet implemented");
+            }
+        }
+        return new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(fid));
+    }
+
+    private static boolean hasOnlyChild(ILogicalOperator parent, Mutable<ILogicalOperator> childCandidate) {
+        List<Mutable<ILogicalOperator>> inp = parent.getInputs();
+        if (inp == null || inp.size() != 1) {
+            return false;
+        }
+        return inp.get(0) == childCandidate;
+    }
+
+    private Pair<ILogicalExpression, Mutable<ILogicalOperator>> aqlExprToAlgExpression(Expression expr,
+            Mutable<ILogicalOperator> topOp) throws AsterixException {
+        switch (expr.getKind()) {
+            case VARIABLE_EXPRESSION: {
+                VariableReferenceExpression ve = new VariableReferenceExpression(context.getVar(((VariableExpr) expr)
+                        .getVar().getId()));
+                return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(ve, topOp);
+            }
+            case METAVARIABLE_EXPRESSION: {
+                ILogicalExpression le = metaScopeExp.getVariableReferenceExpression(((VariableExpr) expr).getVar());
+                return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(le, topOp);
+            }
+            case LITERAL_EXPRESSION: {
+                LiteralExpr val = (LiteralExpr) expr;
+                return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(new ConstantExpression(
+                        new AsterixConstantValue(ConstantHelper.objectFromLiteral(val.getValue()))), topOp);
+            }
+            default: {
+                // Mutable<ILogicalExpression> src = new
+                // Mutable<ILogicalExpression>();
+                // Mutable<ILogicalExpression> src = topOp;
+                if (expressionNeedsNoNesting(expr)) {
+                    Pair<ILogicalOperator, LogicalVariable> p = expr.accept(this, topOp);
+                    ILogicalExpression exp = ((AssignOperator) p.first).getExpressions().get(0).getValue();
+                    return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(exp, p.first.getInputs().get(0));
+                } else {
+                    Mutable<ILogicalOperator> src = new MutableObject<ILogicalOperator>();
+
+                    Pair<ILogicalOperator, LogicalVariable> p = expr.accept(this, src);
+
+                    if (((AbstractLogicalOperator) p.first).getOperatorTag() == LogicalOperatorTag.SUBPLAN) {
+                        // src.setOperator(topOp.getOperator());
+                        Mutable<ILogicalOperator> top2 = new MutableObject<ILogicalOperator>(p.first);
+                        return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(new VariableReferenceExpression(
+                                p.second), top2);
+                    } else {
+                        SubplanOperator s = new SubplanOperator();
+                        s.getInputs().add(topOp);
+                        src.setValue(new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(s)));
+                        Mutable<ILogicalOperator> planRoot = new MutableObject<ILogicalOperator>(p.first);
+                        s.setRootOp(planRoot);
+                        return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(new VariableReferenceExpression(
+                                p.second), new MutableObject<ILogicalOperator>(s));
+                    }
+                }
+            }
+        }
+
+    }
+
+    private Pair<ILogicalOperator, LogicalVariable> produceFlwrResult(boolean noForClause, boolean isTop,
+            Mutable<ILogicalOperator> resOpRef, LogicalVariable resVar) {
+        if (isTop) {
+            ProjectOperator pr = new ProjectOperator(resVar);
+            pr.getInputs().add(resOpRef);
+            return new Pair<ILogicalOperator, LogicalVariable>(pr, resVar);
+
+        } else if (noForClause) {
+            return new Pair<ILogicalOperator, LogicalVariable>(resOpRef.getValue(), resVar);
+        } else {
+            return aggListify(resVar, resOpRef, false);
+        }
+    }
+
+    private Pair<ILogicalOperator, LogicalVariable> aggListify(LogicalVariable var, Mutable<ILogicalOperator> opRef,
+            boolean bProject) {
+        AggregateFunctionCallExpression funAgg = AsterixBuiltinFunctions.makeAggregateFunctionExpression(
+                AsterixBuiltinFunctions.LISTIFY, new ArrayList<Mutable<ILogicalExpression>>());
+        funAgg.getArguments().add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(var)));
+        LogicalVariable varListified = context.newVar();
+        AggregateOperator agg = new AggregateOperator(mkSingletonArrayList(varListified),
+                (List) mkSingletonArrayList(new MutableObject<ILogicalExpression>(funAgg)));
+        agg.getInputs().add(opRef);
+        ILogicalOperator res;
+        if (bProject) {
+            ProjectOperator pr = new ProjectOperator(varListified);
+            pr.getInputs().add(new MutableObject<ILogicalOperator>(agg));
+            res = pr;
+        } else {
+            res = agg;
+        }
+        return new Pair<ILogicalOperator, LogicalVariable>(res, varListified);
+    }
+
+    private Pair<ILogicalOperator, LogicalVariable> visitAndOrOperator(OperatorExpr op,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        ArrayList<OperatorType> ops = op.getOpList();
+        int nOps = ops.size();
+
+        ArrayList<Expression> exprs = op.getExprList();
+
+        Mutable<ILogicalOperator> topOp = tupSource;
+
+        OperatorType opLogical = ops.get(0);
+        AbstractFunctionCallExpression f = createFunctionCallExpressionForBuiltinOperator(opLogical);
+
+        for (int i = 0; i <= nOps; i++) {
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(exprs.get(i), topOp);
+            topOp = p.second;
+            // now look at the operator
+            if (i < nOps) {
+                if (ops.get(i) != opLogical) {
+                    throw new TranslationException("Unexpected operator " + ops.get(i)
+                            + " in an OperatorExpr starting with " + opLogical);
+                }
+            }
+            f.getArguments().add(new MutableObject<ILogicalExpression>(p.first));
+        }
+
+        LogicalVariable assignedVar = context.newVar();
+        AssignOperator a = new AssignOperator(assignedVar, new MutableObject<ILogicalExpression>(f));
+        a.getInputs().add(topOp);
+
+        return new Pair<ILogicalOperator, LogicalVariable>(a, assignedVar);
+
+    }
+
+    private static boolean expressionNeedsNoNesting(Expression expr) {
+        Kind k = expr.getKind();
+        return k == Kind.LITERAL_EXPRESSION || k == Kind.LIST_CONSTRUCTOR_EXPRESSION
+                || k == Kind.RECORD_CONSTRUCTOR_EXPRESSION || k == Kind.VARIABLE_EXPRESSION
+                || k == Kind.CALL_EXPRESSION || k == Kind.OP_EXPRESSION || k == Kind.FIELD_ACCESSOR_EXPRESSION
+                || k == Kind.INDEX_ACCESSOR_EXPRESSION || k == Kind.UNARY_EXPRESSION;
+    }
+
+    private <T> ArrayList<T> mkSingletonArrayList(T item) {
+        ArrayList<T> array = new ArrayList<T>(1);
+        array.add(item);
+        return array;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitTypeDecl(TypeDecl td, Mutable<ILogicalOperator> arg)
+            throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitRecordTypeDefiniton(RecordTypeDefinition tre,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitTypeReferenceExpression(TypeReferenceExpression tre,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitNodegroupDecl(NodegroupDecl ngd, Mutable<ILogicalOperator> arg)
+            throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitLoadStatement(LoadStatement stmtLoad,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitDropStatement(DropStatement del, Mutable<ILogicalOperator> arg)
+            throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitCreateIndexStatement(CreateIndexStatement cis,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitOrderedListTypeDefiniton(OrderedListTypeDefinition olte,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitUnorderedListTypeDefiniton(UnorderedListTypeDefinition ulte,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitMetaVariableClause(MetaVariableClause mc,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        return new Pair<ILogicalOperator, LogicalVariable>(metaScopeOp.get(mc.getVar()), null);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitJoinClause(JoinClause jc, Mutable<ILogicalOperator> tupSource)
+            throws AsterixException {
+        // Pair<ILogicalOperator, LogicalVariable> leftSide =
+        // jc.getLeftExpr().accept(this, tupSource);
+        Mutable<ILogicalOperator> opRef = tupSource;
+        Pair<ILogicalOperator, LogicalVariable> leftSide = null;
+        for (Clause c : jc.getLeftClauses()) {
+            leftSide = c.accept(this, opRef);
+            opRef = new MutableObject<ILogicalOperator>(leftSide.first);
+        }
+
+        // Pair<ILogicalOperator, LogicalVariable> rightSide =
+        // jc.getRightExpr().accept(this, tupSource);
+        opRef = tupSource;
+        Pair<ILogicalOperator, LogicalVariable> rightSide = null;
+        for (Clause c : jc.getRightClauses()) {
+            rightSide = c.accept(this, opRef);
+            opRef = new MutableObject<ILogicalOperator>(rightSide.first);
+        }
+
+        Pair<ILogicalExpression, Mutable<ILogicalOperator>> whereCond = aqlExprToAlgExpression(jc.getWhereExpr(),
+                tupSource);
+
+        AbstractBinaryJoinOperator join;
+        switch (jc.getKind()) {
+            case INNER: {
+                join = new InnerJoinOperator(new MutableObject<ILogicalExpression>(whereCond.first));
+                break;
+            }
+            case LEFT_OUTER: {
+                join = new LeftOuterJoinOperator(new MutableObject<ILogicalExpression>(whereCond.first));
+                break;
+            }
+            default: {
+                throw new IllegalStateException();
+            }
+        }
+        join.getInputs().add(new MutableObject<ILogicalOperator>(leftSide.first));
+        join.getInputs().add(new MutableObject<ILogicalOperator>(rightSide.first));
+        return new Pair<ILogicalOperator, LogicalVariable>(join, null);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitMetaVariableExpr(MetaVariableExpr me,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        LogicalVariable var = context.newVar();
+        AssignOperator a = new AssignOperator(var, new MutableObject<ILogicalExpression>(
+                metaScopeExp.getVariableReferenceExpression(me.getVar())));
+        a.getInputs().add(tupSource);
+        return new Pair<ILogicalOperator, LogicalVariable>(a, var);
+    }
+
+    public void addOperatorToMetaScope(Identifier id, ILogicalOperator op) {
+        metaScopeOp.put(id, op);
+    }
+
+    public void addVariableToMetaScope(Identifier id, LogicalVariable var) {
+        metaScopeExp.put(id, var);
+    }
+
+    private ILogicalExpression makeUnnestExpression(ILogicalExpression expr) {
+        switch (expr.getExpressionTag()) {
+            case VARIABLE: {
+                return new UnnestingFunctionCallExpression(
+                        FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.SCAN_COLLECTION),
+                        new MutableObject<ILogicalExpression>(expr));
+            }
+            case FUNCTION_CALL: {
+                AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expr;
+                if (fce.getKind() == FunctionKind.UNNEST) {
+                    return expr;
+                } else {
+                    return new UnnestingFunctionCallExpression(
+                            FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.SCAN_COLLECTION),
+                            new MutableObject<ILogicalExpression>(expr));
+                }
+            }
+            default: {
+                return expr;
+            }
+        }
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitInsertStatement(InsertStatement insert,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitDeleteStatement(DeleteStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitUpdateStatement(UpdateStatement update,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitUpdateClause(UpdateClause del, Mutable<ILogicalOperator> arg)
+            throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitDataverseDecl(DataverseDecl dv, Mutable<ILogicalOperator> arg)
+            throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitDatasetDecl(DatasetDecl dd, Mutable<ILogicalOperator> arg)
+            throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitSetStatement(SetStatement ss, Mutable<ILogicalOperator> arg)
+            throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitWriteStatement(WriteStatement ws, Mutable<ILogicalOperator> arg)
+            throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitCreateDataverseStatement(CreateDataverseStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitIndexDropStatement(IndexDropStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitNodeGroupDropStatement(NodeGroupDropStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitDataverseDropStatement(DataverseDropStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitTypeDropStatement(TypeDropStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitDisconnectFeedStatement(DisconnectFeedStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visit(CreateFunctionStatement cfs, Mutable<ILogicalOperator> arg)
+            throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitFunctionDropStatement(FunctionDropStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitConnectFeedStatement(ConnectFeedStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitDropFeedStatement(FeedDropStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitCompactStatement(CompactStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+    
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitCreatePrimaryFeedStatement(CreatePrimaryFeedStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitCreateSecondaryFeedStatement(CreateSecondaryFeedStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitCreateFeedPolicyStatement(CreateFeedPolicyStatement cfps,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitDropFeedPolicyStatement(FeedPolicyDropStatement dfs,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/translator/AqlPositionWriter.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/translator/AqlPositionWriter.java b/asterix-algebra/src/main/java/org/apache/asterix/translator/AqlPositionWriter.java
new file mode 100644
index 0000000..da66fe4
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/translator/AqlPositionWriter.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.translator;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.Serializable;
+
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.hyracks.algebricks.runtime.base.IUnnestingPositionWriter;
+
+public class AqlPositionWriter implements IUnnestingPositionWriter, Serializable {
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public void write(DataOutput dataOutput, long position) throws IOException {
+        dataOutput.writeByte(BuiltinType.AINT64.getTypeTag().serialize());
+        dataOutput.writeLong(position);
+    }
+
+}


[31/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
new file mode 100644
index 0000000..42b60d5
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
@@ -0,0 +1,598 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Stack;
+
+import org.apache.commons.lang3.ArrayUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
+import edu.uci.ics.asterix.metadata.declared.AqlIndex;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AOrderedList;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
+import edu.uci.ics.asterix.om.types.AOrderedListType;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class IntroduceSecondaryIndexInsertDeleteRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op0 = (AbstractLogicalOperator) opRef.getValue();
+        if (op0.getOperatorTag() != LogicalOperatorTag.SINK) {
+            return false;
+        }
+        AbstractLogicalOperator op1 = (AbstractLogicalOperator) op0.getInputs().get(0).getValue();
+        if (op1.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE) {
+            return false;
+        }
+
+        FunctionIdentifier fid = null;
+        /** find the record variable */
+        InsertDeleteOperator insertOp = (InsertDeleteOperator) op1;
+        ILogicalExpression recordExpr = insertOp.getPayloadExpression().getValue();
+        List<LogicalVariable> recordVar = new ArrayList<LogicalVariable>();
+        /** assume the payload is always a single variable expression */
+        recordExpr.getUsedVariables(recordVar);
+
+        /**
+         * op2 is the assign operator which extract primary keys from the record
+         * variable
+         */
+        AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
+
+        if (recordVar.size() == 0) {
+            /**
+             * For the case primary key-assignment expressions are constant
+             * expressions, find assign op that creates record to be
+             * inserted/deleted.
+             */
+            while (fid != AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR) {
+                if (op2.getInputs().size() == 0) {
+                    return false;
+                }
+                op2 = (AbstractLogicalOperator) op2.getInputs().get(0).getValue();
+                if (op2.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
+                    continue;
+                }
+                AssignOperator assignOp = (AssignOperator) op2;
+                ILogicalExpression assignExpr = assignOp.getExpressions().get(0).getValue();
+                if (assignExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                    ScalarFunctionCallExpression funcExpr = (ScalarFunctionCallExpression) assignOp.getExpressions()
+                            .get(0).getValue();
+                    fid = funcExpr.getFunctionIdentifier();
+                }
+            }
+            AssignOperator assignOp2 = (AssignOperator) op2;
+            recordVar.addAll(assignOp2.getVariables());
+        }
+        AqlDataSource datasetSource = (AqlDataSource) insertOp.getDataSource();
+        AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
+        String dataverseName = datasetSource.getId().getDataverseName();
+        String datasetName = datasetSource.getId().getDatasourceName();
+        Dataset dataset = mp.findDataset(dataverseName, datasetName);
+        if (dataset == null) {
+            throw new AlgebricksException("Unknown dataset " + datasetName + " in dataverse " + dataverseName);
+        }
+        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
+            return false;
+        }
+
+        // Create operators for secondary index insert/delete.
+        String itemTypeName = dataset.getItemTypeName();
+        IAType itemType = mp.findType(dataset.getDataverseName(), itemTypeName);
+        if (itemType.getTypeTag() != ATypeTag.RECORD) {
+            throw new AlgebricksException("Only record types can be indexed.");
+        }
+        ARecordType recType = (ARecordType) itemType;
+        List<Index> indexes = mp.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName());
+        ILogicalOperator currentTop = op1;
+        boolean hasSecondaryIndex = false;
+
+        // Put an n-gram or a keyword index in the later stage of index-update,
+        // since TokenizeOperator needs to be involved.
+        Collections.sort(indexes, new Comparator<Index>() {
+            @Override
+            public int compare(Index o1, Index o2) {
+                return o1.getIndexType().ordinal() - o2.getIndexType().ordinal();
+            }
+
+        });
+
+        // Check whether multiple keyword or n-gram indexes exist
+        int secondaryIndexTotalCnt = 0;
+        for (Index index : indexes) {
+            if (index.isSecondaryIndex())
+                secondaryIndexTotalCnt++;
+        }
+
+        // Initialize inputs to the SINK operator
+        if (secondaryIndexTotalCnt > 0) {
+            op0.getInputs().clear();
+        }
+
+        // Replicate Operator is applied only when doing the bulk-load.
+        AbstractLogicalOperator replicateOp = null;
+
+        if (secondaryIndexTotalCnt > 1 && insertOp.isBulkload()) {
+            // Split the logical plan into "each secondary index update branch"
+            // to replicate each <PK,RECORD> pair.
+            replicateOp = new ReplicateOperator(secondaryIndexTotalCnt);
+            replicateOp.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
+            replicateOp.setExecutionMode(ExecutionMode.PARTITIONED);
+            context.computeAndSetTypeEnvironmentForOperator(replicateOp);
+            currentTop = replicateOp;
+        }
+
+        // Prepare filtering field information
+        List<String> additionalFilteringField = ((InternalDatasetDetails) dataset.getDatasetDetails()).getFilterField();
+        List<LogicalVariable> additionalFilteringVars = null;
+        List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions = null;
+        List<Mutable<ILogicalExpression>> additionalFilteringExpressions = null;
+        AssignOperator additionalFilteringAssign = null;
+
+        if (additionalFilteringField != null) {
+            additionalFilteringVars = new ArrayList<LogicalVariable>();
+            additionalFilteringAssignExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+            additionalFilteringExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+            prepareVarAndExpression(additionalFilteringField, recType.getFieldNames(), recordVar.get(0),
+                    additionalFilteringAssignExpressions, additionalFilteringVars, context);
+            additionalFilteringAssign = new AssignOperator(additionalFilteringVars,
+                    additionalFilteringAssignExpressions);
+            for (LogicalVariable var : additionalFilteringVars) {
+                additionalFilteringExpressions.add(new MutableObject<ILogicalExpression>(
+                        new VariableReferenceExpression(var)));
+            }
+        }
+
+        // Iterate each secondary index and applying Index Update operations.
+        for (Index index : indexes) {
+            List<LogicalVariable> projectVars = new ArrayList<LogicalVariable>();
+            VariableUtilities.getUsedVariables(op1, projectVars);
+            if (!index.isSecondaryIndex()) {
+                continue;
+            }
+            LogicalVariable enforcedRecordVar = recordVar.get(0);
+            hasSecondaryIndex = true;
+            //if the index is enforcing field types
+            if (index.isEnforcingKeyFileds()) {
+                try {
+                    DatasetDataSource ds = (DatasetDataSource) (insertOp.getDataSource());
+                    ARecordType insertRecType = (ARecordType) ds.getSchemaTypes()[ds.getSchemaTypes().length - 1];
+                    LogicalVariable castVar = context.newVar();
+                    ARecordType enforcedType = createEnforcedType(insertRecType, index);
+                    //introduce casting to enforced type
+                    AbstractFunctionCallExpression castFunc = new ScalarFunctionCallExpression(
+                            FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.CAST_RECORD));
+
+                    castFunc.getArguments().add(
+                            new MutableObject<ILogicalExpression>(insertOp.getPayloadExpression().getValue()));
+                    TypeComputerUtilities.setRequiredAndInputTypes(castFunc, enforcedType, insertRecType);
+                    AssignOperator newAssignOperator = new AssignOperator(castVar,
+                            new MutableObject<ILogicalExpression>(castFunc));
+                    newAssignOperator.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
+                    currentTop = newAssignOperator;
+                    //project out casted record
+                    projectVars.add(castVar);
+                    enforcedRecordVar = castVar;
+                    context.computeAndSetTypeEnvironmentForOperator(newAssignOperator);
+                    context.computeAndSetTypeEnvironmentForOperator(currentTop);
+                    recType = enforcedType;
+                } catch (AsterixException e) {
+                    throw new AlgebricksException(e);
+                }
+            }
+
+            List<List<String>> secondaryKeyFields = index.getKeyFieldNames();
+            List<IAType> secondaryKeyTypes = index.getKeyFieldTypes();
+            List<LogicalVariable> secondaryKeyVars = new ArrayList<LogicalVariable>();
+            List<Mutable<ILogicalExpression>> expressions = new ArrayList<Mutable<ILogicalExpression>>();
+            List<Mutable<ILogicalExpression>> secondaryExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+
+            for (List<String> secondaryKey : secondaryKeyFields) {
+                prepareVarAndExpression(secondaryKey, recType.getFieldNames(), enforcedRecordVar, expressions,
+                        secondaryKeyVars, context);
+            }
+
+            AssignOperator assign = new AssignOperator(secondaryKeyVars, expressions);
+            ProjectOperator project = new ProjectOperator(projectVars);
+
+            if (additionalFilteringAssign != null) {
+                additionalFilteringAssign.getInputs().add(new MutableObject<ILogicalOperator>(project));
+                assign.getInputs().add(new MutableObject<ILogicalOperator>(additionalFilteringAssign));
+            } else {
+                assign.getInputs().add(new MutableObject<ILogicalOperator>(project));
+            }
+
+            // Only apply replicate operator when doing bulk-load
+            if (secondaryIndexTotalCnt > 1 && insertOp.isBulkload())
+                project.getInputs().add(new MutableObject<ILogicalOperator>(replicateOp));
+            else
+                project.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
+
+            context.computeAndSetTypeEnvironmentForOperator(project);
+
+            if (additionalFilteringAssign != null) {
+                context.computeAndSetTypeEnvironmentForOperator(additionalFilteringAssign);
+            }
+
+            context.computeAndSetTypeEnvironmentForOperator(assign);
+            currentTop = assign;
+
+            // BTree, Keyword, or n-gram index case
+            if (index.getIndexType() == IndexType.BTREE
+                    || index.getIndexType() == IndexType.SINGLE_PARTITION_WORD_INVIX
+                    || index.getIndexType() == IndexType.SINGLE_PARTITION_NGRAM_INVIX
+                    || index.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX
+                    || index.getIndexType() == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX) {
+                for (LogicalVariable secondaryKeyVar : secondaryKeyVars) {
+                    secondaryExpressions.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
+                            secondaryKeyVar)));
+                }
+                Mutable<ILogicalExpression> filterExpression = createFilterExpression(secondaryKeyVars,
+                        context.getOutputTypeEnvironment(currentTop), false);
+                AqlIndex dataSourceIndex = new AqlIndex(index, dataverseName, datasetName, mp);
+
+                // Introduce the TokenizeOperator only when doing bulk-load,
+                // and index type is keyword or n-gram.
+                if (index.getIndexType() != IndexType.BTREE && insertOp.isBulkload()) {
+
+                    // Check whether the index is length-partitioned or not.
+                    // If partitioned, [input variables to TokenizeOperator,
+                    // token, number of token] pairs will be generated and
+                    // fed into the IndexInsertDeleteOperator.
+                    // If not, [input variables, token] pairs will be generated
+                    // and fed into the IndexInsertDeleteOperator.
+                    // Input variables are passed since TokenizeOperator is not an
+                    // filtering operator.
+                    boolean isPartitioned = false;
+                    if (index.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX
+                            || index.getIndexType() == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX)
+                        isPartitioned = true;
+
+                    // Create a new logical variable - token
+                    List<LogicalVariable> tokenizeKeyVars = new ArrayList<LogicalVariable>();
+                    List<Mutable<ILogicalExpression>> tokenizeKeyExprs = new ArrayList<Mutable<ILogicalExpression>>();
+                    LogicalVariable tokenVar = context.newVar();
+                    tokenizeKeyVars.add(tokenVar);
+                    tokenizeKeyExprs.add(new MutableObject<ILogicalExpression>(
+                            new VariableReferenceExpression(tokenVar)));
+
+                    // Check the field type of the secondary key.
+                    IAType secondaryKeyType = null;
+                    Pair<IAType, Boolean> keyPairType = Index.getNonNullableKeyFieldType(secondaryKeyFields.get(0),
+                            recType);
+                    secondaryKeyType = keyPairType.first;
+
+                    List<Object> varTypes = new ArrayList<Object>();
+                    varTypes.add(NonTaggedFormatUtil.getTokenType(secondaryKeyType));
+
+                    // If the index is a length-partitioned, then create
+                    // additional variable - number of token.
+                    // We use a special type for the length-partitioned index.
+                    // The type is short, and this does not contain type info.
+                    if (isPartitioned) {
+                        LogicalVariable lengthVar = context.newVar();
+                        tokenizeKeyVars.add(lengthVar);
+                        tokenizeKeyExprs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
+                                lengthVar)));
+                        varTypes.add(BuiltinType.SHORTWITHOUTTYPEINFO);
+                    }
+
+                    // TokenizeOperator to tokenize [SK, PK] pairs
+                    TokenizeOperator tokenUpdate = new TokenizeOperator(dataSourceIndex,
+                            insertOp.getPrimaryKeyExpressions(), secondaryExpressions, tokenizeKeyVars,
+                            filterExpression, insertOp.getOperation(), insertOp.isBulkload(), isPartitioned, varTypes);
+                    tokenUpdate.getInputs().add(new MutableObject<ILogicalOperator>(assign));
+                    context.computeAndSetTypeEnvironmentForOperator(tokenUpdate);
+
+                    IndexInsertDeleteOperator indexUpdate = new IndexInsertDeleteOperator(dataSourceIndex,
+                            insertOp.getPrimaryKeyExpressions(), tokenizeKeyExprs, filterExpression,
+                            insertOp.getOperation(), insertOp.isBulkload());
+                    indexUpdate.setAdditionalFilteringExpressions(additionalFilteringExpressions);
+                    indexUpdate.getInputs().add(new MutableObject<ILogicalOperator>(tokenUpdate));
+
+                    context.computeAndSetTypeEnvironmentForOperator(indexUpdate);
+
+                    currentTop = indexUpdate;
+                    op0.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
+
+                } else {
+                    // When TokenizeOperator is not needed
+                    IndexInsertDeleteOperator indexUpdate = new IndexInsertDeleteOperator(dataSourceIndex,
+                            insertOp.getPrimaryKeyExpressions(), secondaryExpressions, filterExpression,
+                            insertOp.getOperation(), insertOp.isBulkload());
+                    indexUpdate.setAdditionalFilteringExpressions(additionalFilteringExpressions);
+                    indexUpdate.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
+
+                    currentTop = indexUpdate;
+                    context.computeAndSetTypeEnvironmentForOperator(indexUpdate);
+
+                    if (insertOp.isBulkload())
+                        op0.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
+
+                }
+
+            } else if (index.getIndexType() == IndexType.RTREE) {
+                Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(secondaryKeyTypes.get(0),
+                        secondaryKeyFields.get(0), recType);
+                IAType spatialType = keyPairType.first;
+                int dimension = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
+                int numKeys = dimension * 2;
+                List<LogicalVariable> keyVarList = new ArrayList<LogicalVariable>();
+                List<Mutable<ILogicalExpression>> keyExprList = new ArrayList<Mutable<ILogicalExpression>>();
+                for (int i = 0; i < numKeys; i++) {
+                    LogicalVariable keyVar = context.newVar();
+                    keyVarList.add(keyVar);
+                    AbstractFunctionCallExpression createMBR = new ScalarFunctionCallExpression(
+                            FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.CREATE_MBR));
+                    createMBR.getArguments().add(
+                            new MutableObject<ILogicalExpression>(new VariableReferenceExpression(secondaryKeyVars
+                                    .get(0))));
+                    createMBR.getArguments().add(
+                            new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
+                                    new AInt32(dimension)))));
+                    createMBR.getArguments().add(
+                            new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
+                                    new AInt32(i)))));
+                    keyExprList.add(new MutableObject<ILogicalExpression>(createMBR));
+                }
+                for (LogicalVariable secondaryKeyVar : keyVarList) {
+                    secondaryExpressions.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
+                            secondaryKeyVar)));
+                }
+                AssignOperator assignCoordinates = new AssignOperator(keyVarList, keyExprList);
+                assignCoordinates.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
+                context.computeAndSetTypeEnvironmentForOperator(assignCoordinates);
+                // We must enforce the filter if the originating spatial type is
+                // nullable.
+                boolean forceFilter = keyPairType.second;
+                Mutable<ILogicalExpression> filterExpression = createFilterExpression(keyVarList,
+                        context.getOutputTypeEnvironment(assignCoordinates), forceFilter);
+                AqlIndex dataSourceIndex = new AqlIndex(index, dataverseName, datasetName, mp);
+                IndexInsertDeleteOperator indexUpdate = new IndexInsertDeleteOperator(dataSourceIndex,
+                        insertOp.getPrimaryKeyExpressions(), secondaryExpressions, filterExpression,
+                        insertOp.getOperation(), insertOp.isBulkload());
+                indexUpdate.setAdditionalFilteringExpressions(additionalFilteringExpressions);
+                indexUpdate.getInputs().add(new MutableObject<ILogicalOperator>(assignCoordinates));
+                currentTop = indexUpdate;
+                context.computeAndSetTypeEnvironmentForOperator(indexUpdate);
+
+                if (insertOp.isBulkload())
+                    op0.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
+
+            }
+
+        }
+        if (!hasSecondaryIndex) {
+            return false;
+        }
+
+        if (!insertOp.isBulkload()) {
+            op0.getInputs().clear();
+            op0.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
+        }
+        return true;
+    }
+
+    public static ARecordType createEnforcedType(ARecordType initialType, Index index) throws AsterixException,
+            AlgebricksException {
+        ARecordType enforcedType = initialType;
+        for (int i = 0; i < index.getKeyFieldNames().size(); i++) {
+            try {
+                Stack<Pair<ARecordType, String>> nestedTypeStack = new Stack<Pair<ARecordType, String>>();
+                List<String> splits = index.getKeyFieldNames().get(i);
+                ARecordType nestedFieldType = enforcedType;
+                boolean openRecords = false;
+                String bridgeName = nestedFieldType.getTypeName();
+                int j;
+                //Build the stack for the enforced type
+                for (j = 1; j < splits.size(); j++) {
+                    nestedTypeStack.push(new Pair<ARecordType, String>(nestedFieldType, splits.get(j - 1)));
+                    bridgeName = nestedFieldType.getTypeName();
+                    nestedFieldType = (ARecordType) enforcedType.getSubFieldType(splits.subList(0, j));
+                    if (nestedFieldType == null) {
+                        openRecords = true;
+                        break;
+                    }
+                }
+                if (openRecords == true) {
+                    //create the smallest record
+                    enforcedType = new ARecordType(splits.get(splits.size() - 2), new String[] { splits.get(splits
+                            .size() - 1) }, new IAType[] { AUnionType.createNullableType(index.getKeyFieldTypes()
+                            .get(i)) }, true);
+                    //create the open part of the nested field
+                    for (int k = splits.size() - 3; k > (j - 2); k--) {
+                        enforcedType = new ARecordType(splits.get(k), new String[] { splits.get(k + 1) },
+                                new IAType[] { AUnionType.createNullableType(enforcedType) }, true);
+                    }
+                    //Bridge the gap
+                    Pair<ARecordType, String> gapPair = nestedTypeStack.pop();
+                    ARecordType parent = gapPair.first;
+
+                    IAType[] parentFieldTypes = ArrayUtils.addAll(parent.getFieldTypes().clone(),
+                            new IAType[] { AUnionType.createNullableType(enforcedType) });
+                    enforcedType = new ARecordType(bridgeName, ArrayUtils.addAll(parent.getFieldNames(),
+                            enforcedType.getTypeName()), parentFieldTypes, true);
+
+                } else {
+                    //Schema is closed all the way to the field
+                    //enforced fields are either null or strongly typed
+                    enforcedType = new ARecordType(nestedFieldType.getTypeName(), ArrayUtils.addAll(
+                            nestedFieldType.getFieldNames(), splits.get(splits.size() - 1)), ArrayUtils.addAll(
+                            nestedFieldType.getFieldTypes(),
+                            AUnionType.createNullableType(index.getKeyFieldTypes().get(i))), nestedFieldType.isOpen());
+                }
+
+                //Create the enforcedtype for the nested fields in the schema, from the ground up
+                if (nestedTypeStack.size() > 0) {
+                    while (!nestedTypeStack.isEmpty()) {
+                        Pair<ARecordType, String> nestedTypePair = nestedTypeStack.pop();
+                        ARecordType nestedRecType = nestedTypePair.first;
+                        IAType[] nestedRecTypeFieldTypes = nestedRecType.getFieldTypes().clone();
+                        nestedRecTypeFieldTypes[nestedRecType.findFieldPosition(nestedTypePair.second)] = enforcedType;
+                        enforcedType = new ARecordType(nestedRecType.getTypeName(), nestedRecType.getFieldNames(),
+                                nestedRecTypeFieldTypes, nestedRecType.isOpen());
+                    }
+                }
+
+            } catch (AsterixException e) {
+                throw new AlgebricksException("Cannot enforce typed fields "
+                        + StringUtils.join(index.getKeyFieldNames()), e);
+            } catch (IOException e) {
+                throw new AsterixException(e);
+            }
+        }
+        return enforcedType;
+    }
+
+    @SuppressWarnings("unchecked")
+    private void prepareVarAndExpression(List<String> field, String[] fieldNames, LogicalVariable recordVar,
+            List<Mutable<ILogicalExpression>> expressions, List<LogicalVariable> vars, IOptimizationContext context)
+            throws AlgebricksException {
+        Mutable<ILogicalExpression> varRef = new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
+                recordVar));
+        int pos = -1;
+        if (field.size() == 1) {
+            for (int j = 0; j < fieldNames.length; j++) {
+                if (fieldNames[j].equals(field.get(0))) {
+                    pos = j;
+                    break;
+                }
+            }
+        }
+        if (pos == -1) {
+            AbstractFunctionCallExpression func;
+            if (field.size() > 1) {
+                AOrderedList fieldList = new AOrderedList(new AOrderedListType(BuiltinType.ASTRING, null));
+                for (int i = 0; i < field.size(); i++) {
+                    fieldList.add(new AString(field.get(i)));
+                }
+                Mutable<ILogicalExpression> fieldRef = new MutableObject<ILogicalExpression>(new ConstantExpression(
+                        new AsterixConstantValue(fieldList)));
+                //Create an expression for the nested case
+                func = new ScalarFunctionCallExpression(
+                        FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_NESTED), varRef, fieldRef);
+            } else {
+                Mutable<ILogicalExpression> fieldRef = new MutableObject<ILogicalExpression>(new ConstantExpression(
+                        new AsterixConstantValue(new AString(field.get(0)))));
+                //Create an expression for the open field case (By name)
+                func = new ScalarFunctionCallExpression(
+                        FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME), varRef, fieldRef);
+            }
+            expressions.add(new MutableObject<ILogicalExpression>(func));
+            LogicalVariable newVar = context.newVar();
+            vars.add(newVar);
+        } else {
+            // Assumes the indexed field is in the closed portion of the type.
+            Mutable<ILogicalExpression> indexRef = new MutableObject<ILogicalExpression>(new ConstantExpression(
+                    new AsterixConstantValue(new AInt32(pos))));
+            AbstractFunctionCallExpression func = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX), varRef, indexRef);
+            expressions.add(new MutableObject<ILogicalExpression>(func));
+            LogicalVariable newVar = context.newVar();
+            vars.add(newVar);
+        }
+    }
+
+    @SuppressWarnings("unchecked")
+    private Mutable<ILogicalExpression> createFilterExpression(List<LogicalVariable> secondaryKeyVars,
+            IVariableTypeEnvironment typeEnv, boolean forceFilter) throws AlgebricksException {
+        List<Mutable<ILogicalExpression>> filterExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+        // Add 'is not null' to all nullable secondary index keys as a filtering
+        // condition.
+        for (LogicalVariable secondaryKeyVar : secondaryKeyVars) {
+            IAType secondaryKeyType = (IAType) typeEnv.getVarType(secondaryKeyVar);
+            if (!NonTaggedFormatUtil.isOptional(secondaryKeyType) && !forceFilter) {
+                continue;
+            }
+            ScalarFunctionCallExpression isNullFuncExpr = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.IS_NULL),
+                    new MutableObject<ILogicalExpression>(new VariableReferenceExpression(secondaryKeyVar)));
+            ScalarFunctionCallExpression notFuncExpr = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.NOT), new MutableObject<ILogicalExpression>(
+                            isNullFuncExpr));
+            filterExpressions.add(new MutableObject<ILogicalExpression>(notFuncExpr));
+        }
+        // No nullable secondary keys.
+        if (filterExpressions.isEmpty()) {
+            return null;
+        }
+        Mutable<ILogicalExpression> filterExpression = null;
+        if (filterExpressions.size() > 1) {
+            // Create a conjunctive condition.
+            filterExpression = new MutableObject<ILogicalExpression>(new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.AND), filterExpressions));
+        } else {
+            filterExpression = filterExpressions.get(0);
+        }
+        return filterExpression;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceStaticTypeCastForInsertRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceStaticTypeCastForInsertRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceStaticTypeCastForInsertRule.java
new file mode 100644
index 0000000..6419fc1
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceStaticTypeCastForInsertRule.java
@@ -0,0 +1,157 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
+import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.optimizer.rules.typecast.StaticTypeCastUtil;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * Statically cast a constant from its type to a specified required type, in a
+ * recursive way. It enables: 1. bag-based fields in a record, 2. bidirectional
+ * cast of an open field and a matched closed field, and 3. put in null fields
+ * when necessary. It should be fired before the constant folding rule.
+ * This rule is not responsible for type casting between primitive types.
+ * Here is an example: A record { "hobby": {{"music", "coding"}}, "id": "001",
+ * "name": "Person Three"} which confirms to closed type ( id: string, name:
+ * string, hobby: {{string}}? ) can be cast to an open type (id: string ), or
+ * vice versa.
+ * Implementation wise: first, we match the record's type and its target dataset
+ * type to see if it is "cast-able"; second, if the types are cast-able, we
+ * embed the required type into the original producer expression. If the types
+ * are not cast-able, we throw a compile time exception.
+ * Then, at runtime (not in this rule), the corresponding record/list
+ * constructors know what to do by checking the required output type.
+ * TODO: right now record/list constructor of the cast result is not done in the
+ * ConstantFoldingRule and has to go to the runtime, because the
+ * ConstantFoldingRule uses ARecordSerializerDeserializer which seems to have
+ * some problem.
+ */
+public class IntroduceStaticTypeCastForInsertRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        /**
+         * pattern match: sink/insert/assign record type is propagated from
+         * insert data source to the record-constructor expression
+         */
+        if (context.checkIfInDontApplySet(this, opRef.getValue()))
+            return false;
+        context.addToDontApplySet(this, opRef.getValue());
+
+        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
+        List<LogicalVariable> producedVariables = new ArrayList<LogicalVariable>();
+        LogicalVariable oldRecordVariable;
+
+        if (op1.getOperatorTag() != LogicalOperatorTag.SINK)
+            return false;
+        AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
+        if (op2.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE)
+            return false;
+        InsertDeleteOperator insertDeleteOp = (InsertDeleteOperator) op2;
+        if (insertDeleteOp.getOperation() == InsertDeleteOperator.Kind.DELETE)
+            return false;
+        /**
+         * get required record type
+         */
+        InsertDeleteOperator insertDeleteOperator = (InsertDeleteOperator) op2;
+        AqlDataSource dataSource = (AqlDataSource) insertDeleteOperator.getDataSource();
+        IAType[] schemaTypes = (IAType[]) dataSource.getSchemaTypes();
+        IAType requiredRecordType = schemaTypes[schemaTypes.length - 1];
+
+        List<LogicalVariable> usedVariables = new ArrayList<LogicalVariable>();
+        insertDeleteOperator.getPayloadExpression().getValue().getUsedVariables(usedVariables);
+
+        // the used variable should contain the record that will be inserted
+        // but it will not fail in many cases even if the used variable set is
+        // empty
+        if (usedVariables.size() == 0)
+            return false;
+
+        oldRecordVariable = usedVariables.get(0);
+        LogicalVariable inputRecordVar = usedVariables.get(0);
+        IVariableTypeEnvironment env = insertDeleteOperator.computeOutputTypeEnvironment(context);
+        IAType inputRecordType = (IAType) env.getVarType(inputRecordVar);
+
+        AbstractLogicalOperator currentOperator = (AbstractLogicalOperator) op2.getInputs().get(0).getValue();
+        /**
+         * find the assign operator for the "input record" to the insert_delete
+         * operator
+         */
+        do {
+            context.addToDontApplySet(this, currentOperator);
+            if (currentOperator.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+                AssignOperator assignOp = (AssignOperator) currentOperator;
+                producedVariables.clear();
+                VariableUtilities.getProducedVariables(currentOperator, producedVariables);
+                int position = producedVariables.indexOf(oldRecordVariable);
+
+                /**
+                 * set the top-down propagated type
+                 */
+                if (position >= 0) {
+                    AssignOperator originalAssign = (AssignOperator) currentOperator;
+                    List<Mutable<ILogicalExpression>> expressionRefs = originalAssign.getExpressions();
+                    ILogicalExpression expr = expressionRefs.get(position).getValue();
+                    if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+                        // that expression has been rewritten, and it will not
+                        // fail but just return false
+                        if (TypeComputerUtilities.getRequiredType(funcExpr) != null) {
+                            context.computeAndSetTypeEnvironmentForOperator(assignOp);
+                            return false;
+                        }
+                        IVariableTypeEnvironment assignEnv = assignOp.computeOutputTypeEnvironment(context);
+                        StaticTypeCastUtil.rewriteFuncExpr(funcExpr, requiredRecordType, inputRecordType, assignEnv);
+                    }
+                    context.computeAndSetTypeEnvironmentForOperator(originalAssign);
+                }
+            }
+            if (currentOperator.getInputs().size() > 0)
+                currentOperator = (AbstractLogicalOperator) currentOperator.getInputs().get(0).getValue();
+            else
+                break;
+        } while (currentOperator != null);
+        return true;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceTransactionCommitByAssignOpRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceTransactionCommitByAssignOpRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceTransactionCommitByAssignOpRule.java
new file mode 100644
index 0000000..3984fe8
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceTransactionCommitByAssignOpRule.java
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class IntroduceTransactionCommitByAssignOpRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
+            return false;
+        }
+        SelectOperator selectOperator = (SelectOperator) op;
+
+        Mutable<ILogicalOperator> childOfSelect = selectOperator.getInputs().get(0);
+
+        //[Direction] SelectOp(cond1)<--ChildOps... ==> SelectOp(booleanValue of cond1)<--NewAssignOp(cond1)<--ChildOps...
+        //#. Create an assign-operator with a new local variable and the condition of the select-operator.
+        //#. Set the input(child operator) of the new assign-operator to input(child operator) of the select-operator.
+        //   (Later, the newly created assign-operator will apply the condition on behalf of the select-operator,
+        //    and set the variable of the assign-operator to a boolean value according to the condition evaluation.)
+        //#. Give the select-operator the result boolean value created by the newly created child assign-operator.
+
+        //create an assignOp with a variable and the condition of the select-operator.
+        LogicalVariable v = context.newVar();
+        AssignOperator assignOperator = new AssignOperator(v, new MutableObject<ILogicalExpression>(selectOperator
+                .getCondition().getValue()));
+
+        //set the input of the new assign-operator to the input of the select-operator.
+        assignOperator.getInputs().add(childOfSelect);
+
+        //set the result value of the assign-operator to the condition of the select-operator
+        selectOperator.getCondition().setValue(new VariableReferenceExpression(v));//scalarFunctionCallExpression);
+        selectOperator.getInputs().set(0, new MutableObject<ILogicalOperator>(assignOperator));
+
+        context.computeAndSetTypeEnvironmentForOperator(assignOperator);
+
+        //Once this rule is fired, don't apply again.
+        context.addToDontApplySet(this, selectOperator);
+        return true;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceUnionRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceUnionRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceUnionRule.java
new file mode 100644
index 0000000..050a2cd
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceUnionRule.java
@@ -0,0 +1,123 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * @author kereno, ecarm002, ildar.absalyamov
+ *         Generates a union operator and puts it instead of "assign <- [function-call: asterix:union]"
+ *         Before rule:
+ *         ============
+ *         assign [var] <- [asterix:union(left_branch, right_branch)]
+ *         join (TRUE)
+ *         left_branch
+ *         right_branch
+ *         After rule:
+ *         ============
+ *         union (left_branch, right_branch, result_var)
+ *         left_branch
+ *         right_branch
+ */
+public class IntroduceUnionRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+
+        if (!opRef.getValue().getOperatorTag().equals(LogicalOperatorTag.ASSIGN)) {
+            return false;
+        }
+
+        AssignOperator assignUnion = (AssignOperator) opRef.getValue();
+
+        if (assignUnion.getExpressions().get(0).getValue().getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL)
+            return false;
+
+        AbstractFunctionCallExpression u = (AbstractFunctionCallExpression) assignUnion.getExpressions().get(0)
+                .getValue();
+        if (!AsterixBuiltinFunctions.UNION.equals(u.getFunctionIdentifier())) {
+            return false;
+        }
+
+        //Retrieving the logical variables for the union from the two aggregates which are inputs to the join
+        Mutable<ILogicalOperator> join = assignUnion.getInputs().get(0);
+
+        LogicalOperatorTag tag1 = join.getValue().getOperatorTag();
+        if (tag1 != LogicalOperatorTag.INNERJOIN && tag1 != LogicalOperatorTag.LEFTOUTERJOIN) {
+            return false;
+        }
+        AbstractBinaryJoinOperator join1 = (AbstractBinaryJoinOperator) join.getValue();
+        ILogicalExpression cond1 = join1.getCondition().getValue();
+        // don't try to push a product down
+        if (!OperatorPropertiesUtil.isAlwaysTrueCond(cond1)) {
+            return false;
+        }
+
+        List<Mutable<ILogicalOperator>> joinInputs = join.getValue().getInputs();
+
+        Mutable<ILogicalOperator> left_branch = joinInputs.get(0);
+        Mutable<ILogicalOperator> right_branch = joinInputs.get(1);
+
+        List<LogicalVariable> input1Var = new ArrayList<LogicalVariable>();
+        VariableUtilities.getProducedVariables(left_branch.getValue(), input1Var);
+
+        List<LogicalVariable> input2Var = new ArrayList<LogicalVariable>();
+        VariableUtilities.getProducedVariables(right_branch.getValue(), input2Var);
+
+        List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> varMap = new ArrayList<Triple<LogicalVariable, LogicalVariable, LogicalVariable>>(
+                1);
+        Triple<LogicalVariable, LogicalVariable, LogicalVariable> triple = new Triple<LogicalVariable, LogicalVariable, LogicalVariable>(
+                input1Var.get(0), input2Var.get(0), assignUnion.getVariables().get(0));
+        varMap.add(triple);
+        UnionAllOperator unionOp = new UnionAllOperator(varMap);
+
+        unionOp.getInputs().add(left_branch);
+        unionOp.getInputs().add(right_branch);
+
+        context.computeAndSetTypeEnvironmentForOperator(unionOp);
+
+        opRef.setValue(unionOp);
+
+        return true;
+
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceUnnestForCollectionToSequenceRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceUnnestForCollectionToSequenceRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceUnnestForCollectionToSequenceRule.java
new file mode 100644
index 0000000..5943911
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceUnnestForCollectionToSequenceRule.java
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * This rule introduces a unnest operator for the collection-to-sequence function (if the input to the function is a collection).
+ *
+ * @author yingyib
+ */
+public class IntroduceUnnestForCollectionToSequenceRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
+            return false;
+        }
+        AssignOperator assign = (AssignOperator) op;
+        List<Mutable<ILogicalExpression>> exprs = assign.getExpressions();
+        if (exprs.size() != 1) {
+            return false;
+        }
+        ILogicalExpression expr = exprs.get(0).getValue();
+        if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return false;
+        }
+        AbstractFunctionCallExpression func = (AbstractFunctionCallExpression) expr;
+        if (func.getFunctionIdentifier() != AsterixBuiltinFunctions.COLLECTION_TO_SEQUENCE) {
+            return false;
+        }
+
+        IVariableTypeEnvironment env = assign.computeInputTypeEnvironment(context);
+        ILogicalExpression argExpr = func.getArguments().get(0).getValue();
+        IAType outerExprType = (IAType) env.getType(expr);
+        IAType innerExprType = (IAType) env.getType(argExpr);
+        if (outerExprType.equals(innerExprType)) {
+            /** nothing is changed with the collection-to-sequence function, remove the collection-sequence function call */
+            assign.getExpressions().set(0, new MutableObject<ILogicalExpression>(argExpr));
+            return true;
+        }
+        /** change the assign operator to an unnest operator */
+        LogicalVariable var = assign.getVariables().get(0);
+        @SuppressWarnings("unchecked")
+        UnnestOperator unnest = new UnnestOperator(var, new MutableObject<ILogicalExpression>(
+                new UnnestingFunctionCallExpression(
+                        FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.SCAN_COLLECTION),
+                        new MutableObject<ILogicalExpression>(argExpr))));
+        unnest.getInputs().addAll(assign.getInputs());
+        opRef.setValue(unnest);
+        context.computeAndSetTypeEnvironmentForOperator(unnest);
+        return true;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/LoadRecordFieldsRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/LoadRecordFieldsRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/LoadRecordFieldsRule.java
new file mode 100644
index 0000000..54e470c
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/LoadRecordFieldsRule.java
@@ -0,0 +1,368 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.algebra.base.AsterixOperatorAnnotations;
+import edu.uci.ics.asterix.common.exceptions.AsterixRuntimeException;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.optimizer.base.AnalysisUtil;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.FunctionalDependency;
+import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
+import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class LoadRecordFieldsRule implements IAlgebraicRewriteRule {
+
+    private ExtractFieldLoadExpressionVisitor exprVisitor = new ExtractFieldLoadExpressionVisitor();
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
+        if (context.checkIfInDontApplySet(this, op1)) {
+            return false;
+        }
+
+        if (op1.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+            AssignOperator a1 = (AssignOperator) op1;
+            ILogicalExpression expr = getFirstExpr(a1);
+            if (AnalysisUtil.isAccessToFieldRecord(expr)) {
+                boolean res = findAndEliminateRedundantFieldAccess(a1);
+                context.addToDontApplySet(this, op1);
+                return res;
+            }
+        }
+        exprVisitor.setTopOp(op1);
+        exprVisitor.setContext(context);
+        boolean res = op1.acceptExpressionTransform(exprVisitor);
+        if (!res) {
+            context.addToDontApplySet(this, op1);
+        }
+        if (res && op1.getOperatorTag() == LogicalOperatorTag.SELECT) {
+            // checking if we can annotate a Selection as using just one field
+            // access
+            SelectOperator sigma = (SelectOperator) op1;
+            LinkedList<LogicalVariable> vars = new LinkedList<LogicalVariable>();
+            VariableUtilities.getUsedVariables(sigma, vars);
+            if (vars.size() == 1) {
+                // we can annotate Selection
+                AssignOperator assign1 = (AssignOperator) op1.getInputs().get(0).getValue();
+                AbstractLogicalExpression expr1 = (AbstractLogicalExpression) getFirstExpr(assign1);
+                if (expr1.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                    AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expr1;
+                    // f should be a call to a field/data access kind of
+                    // function
+                    sigma.getAnnotations().put(AsterixOperatorAnnotations.FIELD_ACCESS, f.getArguments().get(0));
+                }
+            }
+        }
+
+        // TODO: avoid having to recompute type env. here
+        if (res) {
+            OperatorPropertiesUtil.typeOpRec(opRef, context);
+        }
+        return res;
+    }
+
+    private static boolean pushFieldLoads(Mutable<ILogicalExpression> exprRef, AbstractLogicalOperator topOp,
+            IOptimizationContext context) throws AlgebricksException {
+        ILogicalExpression expr = exprRef.getValue();
+        if (expr == null) {
+            return false;
+        }
+        switch (expr.getExpressionTag()) {
+            case FUNCTION_CALL: {
+                AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expr;
+                FunctionIdentifier fi = f.getFunctionIdentifier();
+                if (AlgebricksBuiltinFunctions.isComparisonFunction(fi)) {
+                    boolean b1 = pushFieldLoads(f.getArguments().get(0), topOp, context);
+                    boolean b2 = pushFieldLoads(f.getArguments().get(1), topOp, context);
+                    return b1 || b2;
+                }
+                if (fi.equals(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME)) {
+                    if (AnalysisUtil.numberOfVarsInExpr(f) == 0) {
+                        return false;
+                    }
+                    // create an assign
+                    LogicalVariable v = context.newVar();
+                    AssignOperator a2 = new AssignOperator(v, new MutableObject<ILogicalExpression>(f));
+                    pushFieldAssign(a2, topOp, context);
+                    context.computeAndSetTypeEnvironmentForOperator(a2);
+                    ILogicalExpression arg = f.getArguments().get(0).getValue();
+                    if (arg.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+                        VariableReferenceExpression ref = (VariableReferenceExpression) arg;
+                        LogicalVariable var = ref.getVariableReference();
+                        List<LogicalVariable> keys = context.findPrimaryKey(var);
+                        if (keys != null) {
+                            List<LogicalVariable> tail = new ArrayList<LogicalVariable>();
+                            tail.add(v);
+                            FunctionalDependency pk = new FunctionalDependency(keys, tail);
+                            context.addPrimaryKey(pk);
+                        }
+                    }
+                    exprRef.setValue(new VariableReferenceExpression(v));
+                    return true;
+                } else {
+                    boolean pushed = false;
+                    for (Mutable<ILogicalExpression> argRef : f.getArguments()) {
+                        if (pushFieldLoads(argRef, topOp, context)) {
+                            pushed = true;
+                        }
+                    }
+                    return pushed;
+                }
+            }
+            case CONSTANT:
+            case VARIABLE: {
+                return false;
+            }
+            default: {
+                assert false;
+                throw new IllegalArgumentException();
+            }
+        }
+    }
+
+    private static void pushFieldAssign(AssignOperator a2, AbstractLogicalOperator topOp, IOptimizationContext context)
+            throws AlgebricksException {
+        if (topOp.getInputs().size() == 1 && !topOp.hasNestedPlans()) {
+            Mutable<ILogicalOperator> topChild = topOp.getInputs().get(0);
+            // plugAccessAboveOp(a2, topChild, context);
+            List<Mutable<ILogicalOperator>> a2InptList = a2.getInputs();
+            a2InptList.clear();
+            a2InptList.add(topChild);
+            // and link it as child in the op. tree
+            topOp.getInputs().set(0, new MutableObject<ILogicalOperator>(a2));
+            findAndEliminateRedundantFieldAccess(a2);
+        } else { // e.g., a join
+            LinkedList<LogicalVariable> usedInAccess = new LinkedList<LogicalVariable>();
+            VariableUtilities.getUsedVariables(a2, usedInAccess);
+
+            LinkedList<LogicalVariable> produced2 = new LinkedList<LogicalVariable>();
+            VariableUtilities.getProducedVariables(topOp, produced2);
+
+            if (OperatorPropertiesUtil.disjoint(produced2, usedInAccess)) {
+                for (Mutable<ILogicalOperator> inp : topOp.getInputs()) {
+                    HashSet<LogicalVariable> v2 = new HashSet<LogicalVariable>();
+                    VariableUtilities.getLiveVariables(inp.getValue(), v2);
+                    if (!OperatorPropertiesUtil.disjoint(usedInAccess, v2)) {
+                        pushAccessAboveOpRef(a2, inp, context);
+                        return;
+                    }
+                }
+                if (topOp.hasNestedPlans()) {
+                    AbstractOperatorWithNestedPlans nestedOp = (AbstractOperatorWithNestedPlans) topOp;
+                    for (ILogicalPlan plan : nestedOp.getNestedPlans()) {
+                        for (Mutable<ILogicalOperator> root : plan.getRoots()) {
+                            HashSet<LogicalVariable> v2 = new HashSet<LogicalVariable>();
+                            VariableUtilities.getLiveVariables(root.getValue(), v2);
+                            if (!OperatorPropertiesUtil.disjoint(usedInAccess, v2)) {
+                                pushAccessAboveOpRef(a2, root, context);
+                                return;
+                            }
+                        }
+                    }
+                }
+                throw new AsterixRuntimeException("Field access " + getFirstExpr(a2)
+                        + " does not correspond to any input of operator " + topOp);
+            }
+        }
+    }
+
+    /**
+     * Pushes one field-access assignment above toPushThroughChildRef
+     * 
+     * @param toPush
+     * @param toPushThroughChildRef
+     */
+    private static void pushAccessAboveOpRef(AssignOperator toPush, Mutable<ILogicalOperator> toPushThroughChildRef,
+            IOptimizationContext context) throws AlgebricksException {
+        List<Mutable<ILogicalOperator>> tpInpList = toPush.getInputs();
+        tpInpList.clear();
+        tpInpList.add(new MutableObject<ILogicalOperator>(toPushThroughChildRef.getValue()));
+        toPushThroughChildRef.setValue(toPush);
+        findAndEliminateRedundantFieldAccess(toPush);
+    }
+
+    /**
+     * Rewrite
+     * assign $x := field-access($y, "field")
+     * assign $y := record-constructor { "field": Expr, ... }
+     * into
+     * assign $x := Expr
+     * assign $y := record-constructor { "field": Expr, ... }
+     * 
+     * @param toPush
+     */
+    private static boolean findAndEliminateRedundantFieldAccess(AssignOperator assign) throws AlgebricksException {
+        ILogicalExpression expr = getFirstExpr(assign);
+        AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expr;
+        ILogicalExpression arg0 = f.getArguments().get(0).getValue();
+        if (arg0.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
+            return false;
+        }
+        VariableReferenceExpression vre = (VariableReferenceExpression) arg0;
+        LogicalVariable recordVar = vre.getVariableReference();
+        ILogicalExpression arg1 = f.getArguments().get(1).getValue();
+        if (arg1.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+            return false;
+        }
+        ConstantExpression ce = (ConstantExpression) arg1;
+        if (f.getFunctionIdentifier().equals(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME)) {
+            String fldName = ((AString) ((AsterixConstantValue) ce.getValue()).getObject()).getStringValue();
+            ILogicalExpression fldExpr = findFieldExpression(assign, recordVar, fldName);
+
+            if (fldExpr != null) {
+                // check the liveness of the new expression
+                List<LogicalVariable> usedVariables = new ArrayList<LogicalVariable>();
+                fldExpr.getUsedVariables(usedVariables);
+                List<LogicalVariable> liveInputVars = new ArrayList<LogicalVariable>();
+                VariableUtilities.getLiveVariables(assign, liveInputVars);
+                usedVariables.removeAll(liveInputVars);
+                if (usedVariables.size() == 0) {
+                    assign.getExpressions().get(0).setValue(fldExpr);
+                    return true;
+                } else {
+                    return false;
+                }
+            } else {
+                return false;
+            }
+        } else if (f.getFunctionIdentifier().equals(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX)) {
+            // int fldIdx = ((IntegerLiteral) ce.getValue()).getValue();
+            // TODO
+            return false;
+        } else if (f.getFunctionIdentifier().equals(AsterixBuiltinFunctions.FIELD_ACCESS_NESTED)) {
+            return false;
+        } else {
+            throw new IllegalStateException();
+        }
+    }
+
+    private static ILogicalExpression findFieldExpression(AbstractLogicalOperator op, LogicalVariable recordVar,
+            String fldName) {
+        for (Mutable<ILogicalOperator> child : op.getInputs()) {
+            AbstractLogicalOperator opChild = (AbstractLogicalOperator) child.getValue();
+            if (opChild.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+                AssignOperator op2 = (AssignOperator) opChild;
+                int i = 0;
+                for (LogicalVariable var : op2.getVariables()) {
+                    if (var == recordVar) {
+                        AbstractLogicalExpression constr = (AbstractLogicalExpression) op2.getExpressions().get(i)
+                                .getValue();
+                        if (constr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+                            return null;
+                        }
+                        AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) constr;
+                        if (!fce.getFunctionIdentifier().equals(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR)
+                                && !fce.getFunctionIdentifier().equals(
+                                        AsterixBuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR)) {
+                            return null;
+                        }
+                        Iterator<Mutable<ILogicalExpression>> fldIter = fce.getArguments().iterator();
+                        while (fldIter.hasNext()) {
+                            ILogicalExpression fldExpr = fldIter.next().getValue();
+                            if (fldExpr.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
+                                ConstantExpression ce = (ConstantExpression) fldExpr;
+                                String f2 = ((AString) ((AsterixConstantValue) ce.getValue()).getObject())
+                                        .getStringValue();
+                                if (fldName.equals(f2)) {
+                                    return fldIter.next().getValue();
+                                }
+                            }
+                            fldIter.next();
+                        }
+                        return null;
+                    }
+                    i++;
+                }
+            } else if (opChild.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE) {
+                NestedTupleSourceOperator nts = (NestedTupleSourceOperator) opChild;
+                AbstractLogicalOperator opBelowNestedPlan = (AbstractLogicalOperator) nts.getDataSourceReference()
+                        .getValue().getInputs().get(0).getValue();
+                ILogicalExpression expr1 = findFieldExpression(opBelowNestedPlan, recordVar, fldName);
+                if (expr1 != null) {
+                    return expr1;
+                }
+            }
+            ILogicalExpression expr2 = findFieldExpression(opChild, recordVar, fldName);
+            if (expr2 != null) {
+                return expr2;
+            }
+        }
+        return null;
+    }
+
+    private final class ExtractFieldLoadExpressionVisitor implements ILogicalExpressionReferenceTransform {
+
+        private AbstractLogicalOperator topOp;
+        private IOptimizationContext context;
+
+        public void setTopOp(AbstractLogicalOperator topOp) {
+            this.topOp = topOp;
+        }
+
+        public void setContext(IOptimizationContext context) {
+            this.context = context;
+        }
+
+        @Override
+        public boolean transform(Mutable<ILogicalExpression> exprRef) throws AlgebricksException {
+            return pushFieldLoads(exprRef, topOp, context);
+        }
+
+    }
+
+    private static ILogicalExpression getFirstExpr(AssignOperator assign) {
+        return assign.getExpressions().get(0).getValue();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/NestGroupByRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/NestGroupByRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/NestGroupByRule.java
new file mode 100644
index 0000000..3ae35bc
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/NestGroupByRule.java
@@ -0,0 +1,191 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class NestGroupByRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
+        if (op1.getOperatorTag() != LogicalOperatorTag.SUBPLAN) {
+            return false;
+        }
+        SubplanOperator subplan = (SubplanOperator) op1;
+        if (subplan.getNestedPlans().size() != 1) {
+            return false;
+        }
+        ILogicalPlan p = subplan.getNestedPlans().get(0);
+        if (p.getRoots().size() != 1) {
+            return false;
+        }
+
+        Set<LogicalVariable> free = new HashSet<LogicalVariable>();
+        OperatorPropertiesUtil.getFreeVariablesInSubplans(subplan, free);
+        if (free.size() != 1) {
+            return false;
+        }
+        LogicalVariable fVar = null;
+        for (LogicalVariable v : free) {
+            fVar = v;
+            break;
+        }
+
+        AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
+        if (op2.getOperatorTag() != LogicalOperatorTag.GROUP) {
+            return false;
+        }
+        GroupByOperator gby = (GroupByOperator) op2;
+        if (gby.getNestedPlans().size() != 1) {
+            return false;
+        }
+        ILogicalPlan p2 = gby.getNestedPlans().get(0);
+        if (p2.getRoots().size() != 1) {
+            return false;
+        }
+        Mutable<ILogicalOperator> r2 = p2.getRoots().get(0);
+        AbstractLogicalOperator opr2 = (AbstractLogicalOperator) r2.getValue();
+        if (opr2.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
+            return false;
+        }
+        AggregateOperator aggOuter = (AggregateOperator) opr2;
+        int posInAggList = aggOuter.getVariables().indexOf(fVar);
+        if (posInAggList < 0) {
+            return false;
+        }
+        AbstractLogicalOperator outerAggSon = (AbstractLogicalOperator) aggOuter.getInputs().get(0).getValue();
+        if (outerAggSon.getOperatorTag() != LogicalOperatorTag.NESTEDTUPLESOURCE) {
+            return false;
+        }
+        ILogicalExpression eAgg = aggOuter.getExpressions().get(posInAggList).getValue();
+        if (eAgg.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return false;
+        }
+        AbstractFunctionCallExpression listifyCall = (AbstractFunctionCallExpression) eAgg;
+        if (listifyCall.getFunctionIdentifier() != AsterixBuiltinFunctions.LISTIFY) {
+            return false;
+        }
+        ILogicalExpression argListify = listifyCall.getArguments().get(0).getValue();
+        if (argListify.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
+            return false;
+        }
+
+        Mutable<ILogicalOperator> r = p.getRoots().get(0);
+        AbstractLogicalOperator opInS = (AbstractLogicalOperator) r.getValue();
+        if (opInS.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
+            return false;
+        }
+        AggregateOperator aggInner = (AggregateOperator) opInS;
+        do {
+            opInS = (AbstractLogicalOperator) opInS.getInputs().get(0).getValue();
+        } while (opInS.getOperatorTag() == LogicalOperatorTag.ASSIGN);
+        if (opInS.getOperatorTag() != LogicalOperatorTag.GROUP) {
+            return false;
+        }
+        AbstractLogicalOperator unnestParent = opInS;
+        AbstractLogicalOperator opUnder = (AbstractLogicalOperator) opInS.getInputs().get(0).getValue();
+        // skip Assigns
+        while (opUnder.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+            unnestParent = opUnder;
+            opUnder = (AbstractLogicalOperator) opUnder.getInputs().get(0).getValue();
+        }
+        if (opUnder.getOperatorTag() != LogicalOperatorTag.UNNEST) {
+            return false;
+        }
+        UnnestOperator unnest = (UnnestOperator) opUnder;
+        AbstractLogicalOperator unnestSon = (AbstractLogicalOperator) unnest.getInputs().get(0).getValue();
+        if (unnestSon.getOperatorTag() != LogicalOperatorTag.NESTEDTUPLESOURCE) {
+            return false;
+        }
+        NestedTupleSourceOperator innerNts = (NestedTupleSourceOperator) unnestSon;
+
+        ILogicalExpression eUnnest = unnest.getExpressionRef().getValue();
+        if (eUnnest.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return false;
+        }
+        AbstractFunctionCallExpression uf = (AbstractFunctionCallExpression) eUnnest;
+        if (uf.getFunctionIdentifier() != AsterixBuiltinFunctions.SCAN_COLLECTION) {
+            return false;
+        }
+        ILogicalExpression scanArg = uf.getArguments().get(0).getValue();
+        if (scanArg.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
+            return false;
+        }
+        if (((VariableReferenceExpression) scanArg).getVariableReference() != fVar) {
+            return false;
+        }
+        LogicalVariable uVar = unnest.getVariable();
+        GroupByOperator innerGby = (GroupByOperator) opInS;
+        Set<LogicalVariable> freeInInnerGby = new HashSet<LogicalVariable>();
+        OperatorPropertiesUtil.getFreeVariablesInSubplans(innerGby, freeInInnerGby);
+        for (LogicalVariable v : freeInInnerGby) {
+            if (v != uVar) {
+                return false;
+            }
+        }
+
+        unnestParent.getInputs().get(0).setValue(innerNts);
+        LogicalVariable listifiedVar = ((VariableReferenceExpression) argListify).getVariableReference();
+        substInSubplan(aggInner, uVar, listifiedVar, context);
+        gby.getNestedPlans().add(p);
+        innerNts.getDataSourceReference().setValue(gby);
+        opRef.setValue(gby);
+        OperatorPropertiesUtil.typePlan(p, context);
+        OperatorPropertiesUtil.typePlan(p2, context);
+        context.computeAndSetTypeEnvironmentForOperator(gby);
+        return true;
+
+    }
+
+    private void substInSubplan(AggregateOperator aggInner, LogicalVariable v1, LogicalVariable v2,
+            IOptimizationContext context) throws AlgebricksException {
+        ILogicalOperator op = aggInner;
+        while (op.getInputs().size() == 1) {
+            VariableUtilities.substituteVariables(op, v1, v2, context);
+            op = op.getInputs().get(0).getValue();
+        }
+    }
+}



[11/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/common/FeedWorkCollection.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/common/FeedWorkCollection.java b/asterix-app/src/main/java/org/apache/asterix/api/common/FeedWorkCollection.java
new file mode 100644
index 0000000..736a270
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/common/FeedWorkCollection.java
@@ -0,0 +1,197 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.common;
+
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
+import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.expression.DataverseDecl;
+import edu.uci.ics.asterix.aql.expression.Identifier;
+import edu.uci.ics.asterix.aql.expression.SubscribeFeedStatement;
+import edu.uci.ics.asterix.aql.translator.AqlTranslator;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionRequest;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionRequest.ConnectionStatus;
+import edu.uci.ics.asterix.common.feeds.api.IFeedWork;
+import edu.uci.ics.asterix.common.feeds.api.IFeedWorkEventListener;
+import edu.uci.ics.asterix.feeds.FeedCollectInfo;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.hyracks.api.job.JobId;
+
+/**
+ * A collection of feed management related task, each represented as an implementation of {@code IFeedWork}.
+ */
+public class FeedWorkCollection {
+
+    private static Logger LOGGER = Logger.getLogger(FeedWorkCollection.class.getName());
+
+    /**
+     * The task of subscribing to a feed to obtain data.
+     */
+    public static class SubscribeFeedWork implements IFeedWork {
+
+        private final Runnable runnable;
+
+        private final FeedConnectionRequest request;
+
+        @Override
+        public Runnable getRunnable() {
+            return runnable;
+        }
+
+        public SubscribeFeedWork(String[] locations, FeedConnectionRequest request) {
+            this.runnable = new SubscribeFeedWorkRunnable(locations, request);
+            this.request = request;
+        }
+
+        private static class SubscribeFeedWorkRunnable implements Runnable {
+
+            private final FeedConnectionRequest request;
+            private final String[] locations;
+
+            public SubscribeFeedWorkRunnable(String[] locations, FeedConnectionRequest request) {
+                this.request = request;
+                this.locations = locations;
+            }
+
+            @Override
+            public void run() {
+                try {
+                    PrintWriter writer = new PrintWriter(System.out, true);
+                    SessionConfig pc = new SessionConfig(writer, OutputFormat.ADM);
+                    DataverseDecl dataverseDecl = new DataverseDecl(new Identifier(request.getReceivingFeedId()
+                            .getDataverse()));
+                    SubscribeFeedStatement subscribeStmt = new SubscribeFeedStatement(locations, request);
+                    List<Statement> statements = new ArrayList<Statement>();
+                    statements.add(dataverseDecl);
+                    statements.add(subscribeStmt);
+                    AqlTranslator translator = new AqlTranslator(statements, pc);
+                    translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null, AqlTranslator.ResultDelivery.SYNC);
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info("Submitted connection requests for execution: " + request);
+                    }
+                } catch (Exception e) {
+                    if (LOGGER.isLoggable(Level.SEVERE)) {
+                        LOGGER.severe("Exception in executing " + request);
+                    }
+                    throw new RuntimeException(e);
+                }
+            }
+        }
+
+        public static class FeedSubscribeWorkEventListener implements IFeedWorkEventListener {
+
+            @Override
+            public void workFailed(IFeedWork work, Exception e) {
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning(" Feed subscription request " + ((SubscribeFeedWork) work).request
+                            + " failed with exception " + e);
+                }
+            }
+
+            @Override
+            public void workCompleted(IFeedWork work) {
+                ((SubscribeFeedWork) work).request.setSubscriptionStatus(ConnectionStatus.ACTIVE);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.warning(" Feed subscription request " + ((SubscribeFeedWork) work).request + " completed ");
+                }
+            }
+
+        }
+
+        public FeedConnectionRequest getRequest() {
+            return request;
+        }
+
+        @Override
+        public String toString() {
+            return "SubscribeFeedWork for [" + request + "]";
+        }
+
+    }
+
+    /**
+     * The task of activating a set of feeds.
+     */
+    public static class ActivateFeedWork implements IFeedWork {
+
+        private final Runnable runnable;
+
+        @Override
+        public Runnable getRunnable() {
+            return runnable;
+        }
+
+        public ActivateFeedWork(List<FeedCollectInfo> feedsToRevive) {
+            this.runnable = new FeedsActivateRunnable(feedsToRevive);
+        }
+
+        public ActivateFeedWork() {
+            this.runnable = new FeedsActivateRunnable();
+        }
+
+        private static class FeedsActivateRunnable implements Runnable {
+
+            private List<FeedCollectInfo> feedsToRevive;
+            private Mode mode;
+
+            public enum Mode {
+                REVIVAL_POST_NODE_REJOIN
+            }
+
+            public FeedsActivateRunnable(List<FeedCollectInfo> feedsToRevive) {
+                this.feedsToRevive = feedsToRevive;
+            }
+
+            public FeedsActivateRunnable() {
+            }
+
+            @Override
+            public void run() {
+                switch (mode) {
+                    case REVIVAL_POST_NODE_REJOIN:
+                        try {
+                            Thread.sleep(10000);
+                        } catch (InterruptedException e1) {
+                            if (LOGGER.isLoggable(Level.INFO)) {
+                                LOGGER.info("Attempt to resume feed interrupted");
+                            }
+                            throw new IllegalStateException(e1.getMessage());
+                        }
+                        for (FeedCollectInfo finfo : feedsToRevive) {
+                            try {
+                                JobId jobId = AsterixAppContextInfo.getInstance().getHcc().startJob(finfo.jobSpec);
+                                if (LOGGER.isLoggable(Level.INFO)) {
+                                    LOGGER.info("Resumed feed :" + finfo.feedConnectionId + " job id " + jobId);
+                                    LOGGER.info("Job:" + finfo.jobSpec);
+                                }
+                            } catch (Exception e) {
+                                if (LOGGER.isLoggable(Level.WARNING)) {
+                                    LOGGER.warning("Unable to resume feed " + finfo.feedConnectionId + " "
+                                            + e.getMessage());
+                                }
+                            }
+                        }
+                }
+            }
+
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/common/Job.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/common/Job.java b/asterix-app/src/main/java/org/apache/asterix/api/common/Job.java
new file mode 100644
index 0000000..f45a6c0
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/common/Job.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.common;
+
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+public class Job {
+
+    public enum SubmissionMode {
+        SYNCHRONOUS,
+        ASYNCHRONOUS
+    }
+
+    private final JobSpecification jobSpec;
+    private final SubmissionMode submissionMode;
+
+    public Job(JobSpecification jobSpecification, SubmissionMode submissionMode) {
+        this.jobSpec = jobSpecification;
+        this.submissionMode = submissionMode;
+    }
+
+    public Job(JobSpecification jobSpec) {
+        this.jobSpec = jobSpec;
+        this.submissionMode = SubmissionMode.SYNCHRONOUS;
+    }
+
+    public JobSpecification getJobSpec() {
+        return jobSpec;
+    }
+
+    public SubmissionMode getSubmissionMode() {
+        return submissionMode;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/common/SessionConfig.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/common/SessionConfig.java b/asterix-app/src/main/java/org/apache/asterix/api/common/SessionConfig.java
new file mode 100644
index 0000000..27d981e
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/common/SessionConfig.java
@@ -0,0 +1,201 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.common;
+
+import java.io.PrintWriter;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * SessionConfig captures several different parameters for controlling
+ * the execution of an APIFramework call.
+ * <li> It specifies how the execution will proceed (for instance,
+ * whether to optimize, or whether to execute at all).
+ * <li> It allows you specify where the primary execution output will
+ * be sent.
+ * <li> It also allows you to request additional output for optional
+ * out-of-band data about the execution (query plan, etc).
+ * <li> It allows you to specify the output format for the primary
+ * execution output - JSON, CSV, etc.
+ * <li> It allows you to specify output format-specific parameters.
+ */
+
+public class SessionConfig {
+    /**
+     * Used to specify the output format for the primary execution.
+     */
+    public enum OutputFormat {
+        ADM,
+        JSON,
+        CSV
+    };
+
+    /**
+     * Produce out-of-band output for Hyracks Job.
+     */
+    public static final String OOB_HYRACKS_JOB = "oob-hyracks-job";
+
+    /**
+     * Produce out-of-band output for Expression Tree.
+     */
+    public static final String OOB_EXPR_TREE = "oob-expr-tree";
+
+    /**
+     * Produce out-of-band output for Rewritten Expression Tree.
+     */
+    public static final String OOB_REWRITTEN_EXPR_TREE = "oob-rewritten-expr-tree";
+
+    /**
+     * Produce out-of-band output for Logical Plan.
+     */
+    public static final String OOB_LOGICAL_PLAN = "oob-logical-plan";
+
+    /**
+     * Produce out-of-band output for Optimized Logical Plan.
+     */
+    public static final String OOB_OPTIMIZED_LOGICAL_PLAN = "oob-optimized-logical-plan";
+
+    /**
+     * Format flag: print only physical ops (for optimizer tests).
+     */
+    public static final String FORMAT_ONLY_PHYSICAL_OPS = "format-only-physical-ops";
+
+    /**
+     * Format flag: wrap out-of-band data in HTML.
+     */
+    public static final String FORMAT_HTML = "format-html";
+
+    /**
+     * Format flag: print CSV header line.
+     */
+    public static final String FORMAT_CSV_HEADER = "format-csv-header";
+
+    // Standard execution flags.
+    private final boolean executeQuery;
+    private final boolean generateJobSpec;
+    private final boolean optimize;
+
+    // Output path for primary execution.
+    private final PrintWriter out;
+
+    // Output format.
+    private final OutputFormat fmt;
+
+    // Flags.
+    private final Map<String,Boolean> flags;
+
+    /**
+     * Create a SessionConfig object with all default values:
+     *
+     * - All format flags set to "false".
+     * - All out-of-band outputs set to "null".
+     * - "Optimize" set to "true".
+     * - "Execute Query" set to "true".
+     * - "Generate Job Spec" set to "true".
+     * @param out PrintWriter for execution output.
+     * @param fmt Output format for execution output.
+     */
+    public SessionConfig(PrintWriter out, OutputFormat fmt) {
+        this(out, fmt, true, true, true);
+    }
+
+    /**
+     * Create a SessionConfig object with all optional values set to defaults:
+     *
+     * - All format flags set to "false".
+     * - All out-of-band outputs set to "false".
+     * @param out PrintWriter for execution output.
+     * @param fmt Output format for execution output.
+     * @param optimize Whether to optimize the execution.
+     * @param executeQuery Whether to execute the query or not.
+     * @param generateJobSpec Whether to generate the Hyracks job specification (if
+     *    false, job cannot be executed).
+     */
+    public SessionConfig(PrintWriter out, OutputFormat fmt, boolean optimize, boolean executeQuery, boolean generateJobSpec) {
+        this.out = out;
+        this.fmt = fmt;
+        this.optimize = optimize;
+        this.executeQuery = executeQuery;
+        this.generateJobSpec = generateJobSpec;
+        this.flags = new HashMap<String,Boolean>();
+    }
+
+    /**
+     * Retrieve the PrintWriter to produce output to.
+     */
+    public PrintWriter out() {
+        return this.out;
+    }
+
+    /**
+     * Retrieve the OutputFormat for this execution.
+     */
+    public OutputFormat fmt() {
+        return this.fmt;
+    }
+
+    /**
+     * Retrieve the value of the "execute query" flag.
+     */
+    public boolean isExecuteQuery() {
+        return executeQuery;
+    }
+
+    /**
+     * Retrieve the value of the "optimize" flag.
+     */
+    public boolean isOptimize() {
+        return optimize;
+    }
+
+    /**
+     * Retrieve the value of the "generate job spec" flag.
+     */
+    public boolean isGenerateJobSpec() {
+        return generateJobSpec;
+    }
+
+    /**
+     * Specify all out-of-band settings at once. For convenience of older code.
+     */
+    public void setOOBData(boolean expr_tree, boolean rewritten_expr_tree,
+                           boolean logical_plan, boolean optimized_logical_plan,
+                           boolean hyracks_job) {
+        this.set(OOB_EXPR_TREE, expr_tree);
+        this.set(OOB_REWRITTEN_EXPR_TREE, rewritten_expr_tree);
+        this.set(OOB_LOGICAL_PLAN, logical_plan);
+        this.set(OOB_OPTIMIZED_LOGICAL_PLAN, optimized_logical_plan);
+        this.set(OOB_HYRACKS_JOB, hyracks_job);
+    }
+
+    /**
+     * Specify a flag.
+     * @param flag One of the OOB_ or FORMAT_ constants from this class.
+     * @param value Value for the flag (all flags default to "false").
+     */
+    public void set(String flag, boolean value) {
+        flags.put(flag, Boolean.valueOf(value));
+    }
+
+    /**
+     * Retrieve the setting of a format-specific flag.
+     * @param flag One of the FORMAT_ constants from this class.
+     * @returns true or false (all flags default to "false").
+     */
+    public boolean is(String flag) {
+        Boolean value = flags.get(flag);
+        return value == null ? false : value.booleanValue();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/APIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/APIServlet.java b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/APIServlet.java
new file mode 100644
index 0000000..4984741
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/APIServlet.java
@@ -0,0 +1,173 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.http.servlet;
+
+import java.awt.image.BufferedImage;
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.io.PrintWriter;
+import java.util.List;
+import java.util.logging.Level;
+
+import javax.imageio.ImageIO;
+import javax.servlet.ServletContext;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import edu.uci.ics.asterix.api.common.SessionConfig;
+import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
+import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.parser.AQLParser;
+import edu.uci.ics.asterix.aql.parser.ParseException;
+import edu.uci.ics.asterix.aql.parser.TokenMgrError;
+import edu.uci.ics.asterix.aql.translator.AqlTranslator;
+import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.result.ResultReader;
+import edu.uci.ics.asterix.result.ResultUtils;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
+import edu.uci.ics.hyracks.client.dataset.HyracksDataset;
+
+public class APIServlet extends HttpServlet {
+    private static final long serialVersionUID = 1L;
+
+    private static final String HYRACKS_CONNECTION_ATTR = "edu.uci.ics.asterix.HYRACKS_CONNECTION";
+
+    private static final String HYRACKS_DATASET_ATTR = "edu.uci.ics.asterix.HYRACKS_DATASET";
+
+    @Override
+    public void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException {
+        OutputFormat format;
+        boolean csv_and_header = false;
+        String output = request.getParameter("output-format");
+        if (output.equals("ADM")) {
+            format = OutputFormat.ADM;
+        }
+        else if (output.equals("CSV")) {
+            format = OutputFormat.CSV;
+        }
+        else if (output.equals("CSV-Header")) {
+            format = OutputFormat.CSV;
+            csv_and_header = true;
+        }
+        else {
+            // Default output format
+            format = OutputFormat.JSON;
+        }
+
+        String query = request.getParameter("query");
+        String printExprParam = request.getParameter("print-expr-tree");
+        String printRewrittenExprParam = request.getParameter("print-rewritten-expr-tree");
+        String printLogicalPlanParam = request.getParameter("print-logical-plan");
+        String printOptimizedLogicalPlanParam = request.getParameter("print-optimized-logical-plan");
+        String printJob = request.getParameter("print-job");
+        String executeQuery = request.getParameter("execute-query");
+        response.setCharacterEncoding("utf-8");
+        response.setContentType("text/html");
+        PrintWriter out = response.getWriter();
+        ServletContext context = getServletContext();
+        IHyracksClientConnection hcc;
+        IHyracksDataset hds;
+
+        try {
+            synchronized (context) {
+                hcc = (IHyracksClientConnection) context.getAttribute(HYRACKS_CONNECTION_ATTR);
+
+                hds = (IHyracksDataset) context.getAttribute(HYRACKS_DATASET_ATTR);
+                if (hds == null) {
+                    hds = new HyracksDataset(hcc, ResultReader.FRAME_SIZE, ResultReader.NUM_READERS);
+                    context.setAttribute(HYRACKS_DATASET_ATTR, hds);
+                }
+            }
+            AQLParser parser = new AQLParser(query);
+            List<Statement> aqlStatements = parser.parse();
+            SessionConfig sessionConfig = new SessionConfig(out, format, true, isSet(executeQuery), true);
+            sessionConfig.set(SessionConfig.FORMAT_HTML, true);
+            sessionConfig.set(SessionConfig.FORMAT_CSV_HEADER, csv_and_header);
+            sessionConfig.setOOBData(isSet(printExprParam), isSet(printRewrittenExprParam),
+                                     isSet(printLogicalPlanParam), isSet(printOptimizedLogicalPlanParam),
+                                     isSet(printJob));
+            MetadataManager.INSTANCE.init();
+            AqlTranslator aqlTranslator = new AqlTranslator(aqlStatements, sessionConfig);
+            double duration = 0;
+            long startTime = System.currentTimeMillis();
+            aqlTranslator.compileAndExecute(hcc, hds, AqlTranslator.ResultDelivery.SYNC);
+            long endTime = System.currentTimeMillis();
+            duration = (endTime - startTime) / 1000.00;
+            out.println("<PRE>Duration of all jobs: " + duration + " sec</PRE>");
+        } catch (ParseException | TokenMgrError | edu.uci.ics.asterix.aqlplus.parser.TokenMgrError pe) {
+            GlobalConfig.ASTERIX_LOGGER.log(Level.INFO, pe.toString(), pe);
+            ResultUtils.webUIParseExceptionHandler(out, pe, query);
+        } catch (Exception e) {
+            GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, e.getMessage(), e);
+            ResultUtils.webUIErrorHandler(out, e);
+        }
+    }
+
+    @Override
+    public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
+        String resourcePath = null;
+        String requestURI = request.getRequestURI();
+
+        if (requestURI.equals("/")) {
+            response.setContentType("text/html");
+            resourcePath = "/webui/querytemplate.html";
+        } else {
+            resourcePath = requestURI;
+        }
+
+        InputStream is = APIServlet.class.getResourceAsStream(resourcePath);
+        if (is == null) {
+            response.sendError(HttpServletResponse.SC_NOT_FOUND);
+            return;
+        }
+
+        // Special handler for font files and .png resources
+        if (resourcePath.endsWith(".png")) {
+
+            BufferedImage img = ImageIO.read(is);
+            OutputStream outputStream = response.getOutputStream();
+            String formatName = "png";
+            response.setContentType("image/png");
+            ImageIO.write(img, formatName, outputStream);
+            outputStream.close();
+            return;
+
+        }
+
+        response.setCharacterEncoding("utf-8");
+        InputStreamReader isr = new InputStreamReader(is);
+        StringBuilder sb = new StringBuilder();
+        BufferedReader br = new BufferedReader(isr);
+        String line = br.readLine();
+
+        while (line != null) {
+            sb.append(line);
+            line = br.readLine();
+        }
+
+        PrintWriter out = response.getWriter();
+        out.println(sb.toString());
+    }
+
+    private static boolean isSet(String requestParameter) {
+        return (requestParameter != null && requestParameter.equals("true"));
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/AQLAPIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/AQLAPIServlet.java b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/AQLAPIServlet.java
new file mode 100644
index 0000000..60e1261
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/AQLAPIServlet.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.http.servlet;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.servlet.http.HttpServletRequest;
+
+import edu.uci.ics.asterix.aql.base.Statement.Kind;
+
+public class AQLAPIServlet extends RESTAPIServlet {
+
+    private static final long serialVersionUID = 1L;
+
+    private static final String AQL_STMT_PARAM_NAME = "aql";
+
+    private static final List<Kind> allowedStatements = new ArrayList<>();
+
+    static {
+        for (Kind k : Kind.values()) {
+            allowedStatements.add(k);
+        }
+    }
+
+    @Override
+    protected String getQueryParameter(HttpServletRequest request) {
+        return request.getParameter(AQL_STMT_PARAM_NAME);
+    }
+
+    @Override
+    protected List<Kind> getAllowedStatements() {
+        return allowedStatements;
+    }
+
+    @Override
+    protected String getErrorMessage() {
+        throw new IllegalStateException();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java
new file mode 100644
index 0000000..267aac5
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java
@@ -0,0 +1,168 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.api.http.servlet;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.util.List;
+import java.util.Map;
+
+import javax.servlet.ServletContext;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import edu.uci.ics.asterix.feeds.CentralFeedManager;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+
+/***
+ * The REST API that takes a dataverse name and a dataset name as the input
+ * and returns an array of file splits (IP, file-path) of the dataset in JSON.
+ * It is mostly used by external runtime, e.g., Pregelix or IMRU to pull data
+ * in parallel from existing AsterixDB datasets.
+ *
+ * @author yingyi
+ */
+public class ConnectorAPIServlet extends HttpServlet {
+    private static final long serialVersionUID = 1L;
+
+    private static final String HYRACKS_CONNECTION_ATTR = "edu.uci.ics.asterix.HYRACKS_CONNECTION";
+
+    @Override
+    public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
+        response.setContentType("text/html");
+        response.setCharacterEncoding("utf-8");
+        PrintWriter out = response.getWriter();
+        try {
+            JSONObject jsonResponse = new JSONObject();
+            String dataverseName = request.getParameter("dataverseName");
+            String datasetName = request.getParameter("datasetName");
+            if (dataverseName == null || datasetName == null) {
+                jsonResponse.put("error", "Parameter dataverseName or datasetName is null,");
+                out.write(jsonResponse.toString());
+                out.flush();
+                return;
+            }
+            ServletContext context = getServletContext();
+
+            IHyracksClientConnection hcc = null;
+            synchronized (context) {
+                hcc = (IHyracksClientConnection) context.getAttribute(HYRACKS_CONNECTION_ATTR);
+            }
+
+            // Metadata transaction begins.
+            MetadataManager.INSTANCE.init();
+            MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+
+            // Retrieves file splits of the dataset.
+            AqlMetadataProvider metadataProvider = new AqlMetadataProvider(null, CentralFeedManager.getInstance());
+            metadataProvider.setMetadataTxnContext(mdTxnCtx);
+            Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
+            if (dataset == null) {
+                jsonResponse.put("error", "Dataset " + datasetName + " does not exist in " + "dataverse "
+                        + dataverseName);
+                out.write(jsonResponse.toString());
+                out.flush();
+                return;
+            }
+            boolean temp = dataset.getDatasetDetails().isTemp();
+            FileSplit[] fileSplits = metadataProvider.splitsForDataset(mdTxnCtx, dataverseName, datasetName,
+                    datasetName, temp);
+            ARecordType recordType = (ARecordType) metadataProvider.findType(dataverseName, dataset.getItemTypeName());
+            List<List<String>> primaryKeys = DatasetUtils.getPartitioningKeys(dataset);
+            StringBuilder pkStrBuf = new StringBuilder();
+            for (List<String> keys : primaryKeys) {
+                for (String key : keys) {
+                    pkStrBuf.append(key).append(",");
+                }
+            }
+            pkStrBuf.delete(pkStrBuf.length() - 1, pkStrBuf.length());
+
+            // Constructs the returned json object.
+            formResponseObject(jsonResponse, fileSplits, recordType, pkStrBuf.toString(), hcc.getNodeControllerInfos());
+            // Metadata transaction commits.
+            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+            // Writes file splits.
+            out.write(jsonResponse.toString());
+            out.flush();
+        } catch (Exception e) {
+            e.printStackTrace();
+            out.println(e.getMessage());
+            out.flush();
+            e.printStackTrace(out);
+        }
+    }
+
+    private void formResponseObject(JSONObject jsonResponse, FileSplit[] fileSplits, ARecordType recordType,
+            String primaryKeys, Map<String, NodeControllerInfo> nodeMap) throws Exception {
+        JSONArray partititons = new JSONArray();
+        // Adds a primary key.
+        jsonResponse.put("keys", primaryKeys);
+        // Adds record type.
+        jsonResponse.put("type", recordType.toJSON());
+        // Generates file partitions.
+        for (FileSplit split : fileSplits) {
+            String ipAddress = nodeMap.get(split.getNodeName()).getNetworkAddress().getAddress().toString();
+            String path = split.getLocalFile().getFile().getAbsolutePath();
+            FilePartition partition = new FilePartition(ipAddress, path);
+            partititons.put(partition.toJSONObject());
+        }
+        // Generates the response object which contains the splits.
+        jsonResponse.put("splits", partititons);
+    }
+}
+
+class FilePartition {
+    private final String ipAddress;
+    private final String path;
+
+    public FilePartition(String ipAddress, String path) {
+        this.ipAddress = ipAddress;
+        this.path = path;
+    }
+
+    public String getIPAddress() {
+        return ipAddress;
+    }
+
+    public String getPath() {
+        return path;
+    }
+
+    @Override
+    public String toString() {
+        return ipAddress + ":" + path;
+    }
+
+    public JSONObject toJSONObject() throws JSONException {
+        JSONObject partition = new JSONObject();
+        partition.put("ip", ipAddress);
+        partition.put("path", path);
+        return partition;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/DDLAPIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/DDLAPIServlet.java b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/DDLAPIServlet.java
new file mode 100644
index 0000000..e43a15a
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/DDLAPIServlet.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.http.servlet;
+
+import java.util.Arrays;
+import java.util.List;
+
+import javax.servlet.http.HttpServletRequest;
+
+import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.base.Statement.Kind;
+
+public class DDLAPIServlet extends RESTAPIServlet {
+    private static final long serialVersionUID = 1L;
+
+    protected String getQueryParameter(HttpServletRequest request) {
+        return request.getParameter("ddl");
+    }
+
+    protected List<Statement.Kind> getAllowedStatements() {
+        Kind[] statementsArray = { Kind.DATAVERSE_DECL, Kind.DATAVERSE_DROP, Kind.DATASET_DECL, Kind.NODEGROUP_DECL,
+                Kind.NODEGROUP_DROP, Kind.TYPE_DECL, Kind.TYPE_DROP, Kind.CREATE_INDEX, Kind.INDEX_DECL,
+                Kind.CREATE_DATAVERSE, Kind.DATASET_DROP, Kind.INDEX_DROP, Kind.CREATE_FUNCTION, Kind.FUNCTION_DROP,
+                Kind.CREATE_PRIMARY_FEED, Kind.CREATE_SECONDARY_FEED, Kind.DROP_FEED, Kind.CREATE_FEED_POLICY, Kind.DROP_FEED_POLICY };
+        return Arrays.asList(statementsArray);
+    }
+
+    protected String getErrorMessage() {
+        return "Invalid statement: Non-DDL statement %s to the DDL API.";
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServlet.java b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServlet.java
new file mode 100644
index 0000000..3fe1ff7
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServlet.java
@@ -0,0 +1,166 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.http.servlet;
+
+import java.awt.image.BufferedImage;
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.io.PrintWriter;
+import java.util.Collection;
+
+import javax.imageio.ImageIO;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import edu.uci.ics.asterix.common.feeds.FeedActivity;
+import edu.uci.ics.asterix.common.feeds.FeedActivity.FeedActivityDetails;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.common.feeds.FeedId;
+import edu.uci.ics.asterix.common.feeds.api.IFeedLoadManager;
+import edu.uci.ics.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
+import edu.uci.ics.asterix.feeds.CentralFeedManager;
+
+public class FeedServlet extends HttpServlet {
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
+        String resourcePath = null;
+        String requestURI = request.getRequestURI();
+
+        if (requestURI.equals("/")) {
+            response.setContentType("text/html");
+            resourcePath = "/feed/home.html";
+        } else {
+            resourcePath = requestURI;
+        }
+
+        InputStream is = FeedServlet.class.getResourceAsStream(resourcePath);
+        if (is == null) {
+            response.sendError(HttpServletResponse.SC_NOT_FOUND);
+            return;
+        }
+
+        // Special handler for font files and .png resources
+        if (resourcePath.endsWith(".png")) {
+
+            BufferedImage img = ImageIO.read(is);
+            OutputStream outputStream = response.getOutputStream();
+            String formatName = "png";
+            response.setContentType("image/png");
+            ImageIO.write(img, formatName, outputStream);
+            outputStream.close();
+            return;
+
+        }
+
+        response.setCharacterEncoding("utf-8");
+        InputStreamReader isr = new InputStreamReader(is);
+        StringBuilder sb = new StringBuilder();
+        BufferedReader br = new BufferedReader(isr);
+        String line = br.readLine();
+
+        while (line != null) {
+            sb.append(line + "\n");
+            line = br.readLine();
+        }
+
+        String outStr = null;
+        if (requestURI.startsWith("/webui/static")) {
+            outStr = sb.toString();
+        } else {
+            Collection<FeedActivity> lfa = CentralFeedManager.getInstance().getFeedLoadManager().getFeedActivities();
+            StringBuilder ldStr = new StringBuilder();
+            ldStr.append("<br />");
+            ldStr.append("<br />");
+            if (lfa == null || lfa.isEmpty()) {
+                ldStr.append("Currently there are no active feeds in AsterixDB");
+            } else {
+                ldStr.append("Active Feeds");
+            }
+            insertTable(ldStr, lfa);
+            outStr = String.format(sb.toString(), ldStr.toString());
+
+        }
+
+        PrintWriter out = response.getWriter();
+        out.println(outStr);
+    }
+
+    private void insertTable(StringBuilder html, Collection<FeedActivity> list) {
+        html.append("<table style=\"width:100%\">");
+        html.append("<th>" + FeedServletUtil.Constants.TABLE_HEADER_FEED_NAME + "</th>");
+        html.append("<th>" + FeedServletUtil.Constants.TABLE_HEADER_DATASET_NAME + "</th>");
+        html.append("<th>" + FeedServletUtil.Constants.TABLE_HEADER_ACTIVE_SINCE + "</th>");
+        html.append("<th>" + FeedServletUtil.Constants.TABLE_HEADER_INTAKE_STAGE + "</th>");
+        html.append("<th>" + FeedServletUtil.Constants.TABLE_HEADER_COMPUTE_STAGE + "</th>");
+        html.append("<th>" + FeedServletUtil.Constants.TABLE_HEADER_STORE_STAGE + "</th>");
+        html.append("<th>" + FeedServletUtil.Constants.TABLE_HEADER_INTAKE_RATE + "</th>");
+        html.append("<th>" + FeedServletUtil.Constants.TABLE_HEADER_STORE_RATE + "</th>");
+        for (FeedActivity activity : list) {
+            insertRow(html, activity);
+        }
+    }
+
+    private void insertRow(StringBuilder html, FeedActivity activity) {
+        String intake = activity.getFeedActivityDetails().get(FeedActivityDetails.INTAKE_LOCATIONS);
+        String compute = activity.getFeedActivityDetails().get(FeedActivityDetails.COMPUTE_LOCATIONS);
+        String store = activity.getFeedActivityDetails().get(FeedActivityDetails.STORAGE_LOCATIONS);
+
+        IFeedLoadManager loadManager = CentralFeedManager.getInstance().getFeedLoadManager();
+        FeedConnectionId connectionId = new FeedConnectionId(new FeedId(activity.getDataverseName(),
+                activity.getFeedName()), activity.getDatasetName());
+        int intakeRate = loadManager.getOutflowRate(connectionId, FeedRuntimeType.COLLECT) * intake.split(",").length;
+        int storeRate = loadManager.getOutflowRate(connectionId, FeedRuntimeType.STORE) * store.split(",").length;
+
+        html.append("<tr>");
+        html.append("<td>" + activity.getFeedName() + "</td>");
+        html.append("<td>" + activity.getDatasetName() + "</td>");
+        html.append("<td>" + activity.getConnectTimestamp() + "</td>");
+        //html.append("<td>" + insertLink(html, FeedDashboardServlet.getParameterizedURL(activity), "Details") + "</td>");
+        html.append("<td>" + intake + "</td>");
+        html.append("<td>" + compute + "</td>");
+        html.append("<td>" + store + "</td>");
+        String color = "black";
+        if (intakeRate > storeRate) {
+            color = "red";
+        }
+        if (intakeRate < 0) {
+            html.append("<td>" + "UNKNOWN" + "</td>");
+        } else {
+            html.append("<td>" + insertColoredText("" + intakeRate, color) + " rec/sec" + "</td>");
+        }
+        if (storeRate < 0) {
+            html.append("<td>" + "UNKNOWN" + "</td>");
+        } else {
+            html.append("<td>" + insertColoredText("" + storeRate, color) + " rec/sec" + "</td>");
+        }
+        html.append("</tr>");
+    }
+
+    private String insertLink(StringBuilder html, String url, String displayText) {
+        return ("<a href=\"" + url + "\">" + displayText + "</a>");
+    }
+
+    private String insertColoredText(String s, String color) {
+        return "<font color=\"" + color + "\">" + s + "</font>";
+    }
+}
+
+

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServletUtil.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServletUtil.java b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServletUtil.java
new file mode 100644
index 0000000..76e2614
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServletUtil.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.http.servlet;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.Socket;
+import java.nio.CharBuffer;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.feeds.FeedLifecycleListener;
+import edu.uci.ics.asterix.metadata.feeds.RemoteSocketMessageListener;
+
+public class FeedServletUtil {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedServletUtil.class.getName());
+    private static final char EOL = (char) "\n".getBytes()[0];
+
+    public static final class Constants {
+        public static final String TABLE_HEADER_FEED_NAME = "Feed";
+        public static final String TABLE_HEADER_DATASET_NAME = "Dataset";
+        public static final String TABLE_HEADER_ACTIVE_SINCE = "Timestamp";
+        public static final String TABLE_HEADER_INTAKE_STAGE = "Intake Stage";
+        public static final String TABLE_HEADER_COMPUTE_STAGE = "Compute Stage";
+        public static final String TABLE_HEADER_STORE_STAGE = "Store Stage";
+        public static final String TABLE_HEADER_INTAKE_RATE = "Intake";
+        public static final String TABLE_HEADER_STORE_RATE = "Store";
+    }
+
+    public static void initiateSubscription(FeedConnectionId feedId, String host, int port) throws IOException {
+        LinkedBlockingQueue<String> outbox = new LinkedBlockingQueue<String>();
+        int subscriptionPort = port + 1;
+        Socket sc = new Socket(host, subscriptionPort);
+        InputStream in = sc.getInputStream();
+
+        CharBuffer buffer = CharBuffer.allocate(50);
+        char ch = 0;
+        while (ch != EOL) {
+            buffer.put(ch);
+            ch = (char) in.read();
+        }
+        buffer.flip();
+        String s = new String(buffer.array());
+        int feedSubscriptionPort = Integer.parseInt(s.trim());
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Response from Super Feed Manager Report Service " + port + " will connect at " + host + " "
+                    + port);
+        }
+
+        // register the feed subscription queue with FeedLifecycleListener
+        FeedLifecycleListener.INSTANCE.registerFeedReportQueue(feedId, outbox);
+        RemoteSocketMessageListener listener = new RemoteSocketMessageListener(host, feedSubscriptionPort, outbox);
+        listener.start();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/HyracksProperties.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/HyracksProperties.java b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/HyracksProperties.java
new file mode 100644
index 0000000..2805d7f
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/HyracksProperties.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.http.servlet;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Properties;
+
+public class HyracksProperties {
+    private final InputStream is;
+
+    private final Properties properties;
+
+    private static String HYRACKS_IP = "127.0.0.1";
+
+    private static int HYRACKS_PORT = 1098;
+
+    public HyracksProperties() throws IOException {
+        is = HyracksProperties.class.getClassLoader().getResourceAsStream("hyracks-deployment.properties");
+        properties = new Properties();
+        properties.load(is);
+    }
+
+    public String getHyracksIPAddress() {
+        String strIP = properties.getProperty("cc.ip");
+        if (strIP == null) {
+            strIP = HYRACKS_IP;
+        }
+        return strIP;
+    }
+
+    public int getHyracksPort() {
+        String strPort = properties.getProperty("cc.port");
+        int port = HYRACKS_PORT;
+        if (strPort != null) {
+            port = Integer.parseInt(strPort);
+        }
+        return port;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/QueryAPIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/QueryAPIServlet.java b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/QueryAPIServlet.java
new file mode 100644
index 0000000..c3dc1f9
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/QueryAPIServlet.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.http.servlet;
+
+import java.util.Arrays;
+import java.util.List;
+
+import javax.servlet.http.HttpServletRequest;
+
+import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.base.Statement.Kind;
+
+public class QueryAPIServlet extends RESTAPIServlet {
+    private static final long serialVersionUID = 1L;
+
+    protected String getQueryParameter(HttpServletRequest request) {
+        return request.getParameter("query");
+    }
+
+    protected List<Statement.Kind> getAllowedStatements() {
+        Kind[] statementsArray = { Kind.DATAVERSE_DECL, Kind.FUNCTION_DECL, Kind.QUERY, Kind.SET, Kind.WRITE, Kind.RUN };
+        return Arrays.asList(statementsArray);
+    }
+
+    protected String getErrorMessage() {
+        return "Invalid statement: Non-query statement %s to the query API.";
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/QueryResultAPIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/QueryResultAPIServlet.java b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/QueryResultAPIServlet.java
new file mode 100644
index 0000000..3caed6b
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/QueryResultAPIServlet.java
@@ -0,0 +1,95 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.http.servlet;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+
+import javax.servlet.ServletContext;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.json.JSONArray;
+import org.json.JSONObject;
+
+import edu.uci.ics.asterix.api.common.SessionConfig;
+import edu.uci.ics.asterix.result.ResultReader;
+import edu.uci.ics.asterix.result.ResultUtils;
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.client.dataset.HyracksDataset;
+
+public class QueryResultAPIServlet extends HttpServlet {
+    private static final long serialVersionUID = 1L;
+
+    private static final String HYRACKS_CONNECTION_ATTR = "edu.uci.ics.asterix.HYRACKS_CONNECTION";
+
+    private static final String HYRACKS_DATASET_ATTR = "edu.uci.ics.asterix.HYRACKS_DATASET";
+
+    @Override
+    public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
+        response.setContentType("text/html");
+        response.setCharacterEncoding("utf-8");
+        String strHandle = request.getParameter("handle");
+        PrintWriter out = response.getWriter();
+        ServletContext context = getServletContext();
+        IHyracksClientConnection hcc;
+        IHyracksDataset hds;
+
+        try {
+            HyracksProperties hp = new HyracksProperties();
+            String strIP = hp.getHyracksIPAddress();
+            int port = hp.getHyracksPort();
+
+            synchronized (context) {
+                hcc = (IHyracksClientConnection) context.getAttribute(HYRACKS_CONNECTION_ATTR);
+                if (hcc == null) {
+                    hcc = new HyracksConnection(strIP, port);
+                    context.setAttribute(HYRACKS_CONNECTION_ATTR, hcc);
+                }
+
+                hds = (IHyracksDataset) context.getAttribute(HYRACKS_DATASET_ATTR);
+                if (hds == null) {
+                    hds = new HyracksDataset(hcc, ResultReader.FRAME_SIZE, ResultReader.NUM_READERS);
+                    context.setAttribute(HYRACKS_DATASET_ATTR, hds);
+                }
+            }
+            JSONObject handleObj = new JSONObject(strHandle);
+            JSONArray handle = handleObj.getJSONArray("handle");
+            JobId jobId = new JobId(handle.getLong(0));
+            ResultSetId rsId = new ResultSetId(handle.getLong(1));
+
+            ResultReader resultReader = new ResultReader(hcc, hds);
+            resultReader.open(jobId, rsId);
+
+            // QQQ The output format is determined by the initial
+            // query and cannot be modified here, so calling back to
+            // initResponse() is really an error. We need to find a
+            // way to send the same OutputFormat value here as was
+            // originally determined there. Need to save this value on
+            // some object that we can obtain here.
+            SessionConfig sessionConfig = RESTAPIServlet.initResponse(request, response);
+            ResultUtils.displayResults(resultReader, sessionConfig);
+
+        } catch (Exception e) {
+            out.println(e.getMessage());
+            e.printStackTrace(out);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/QueryStatusAPIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/QueryStatusAPIServlet.java b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/QueryStatusAPIServlet.java
new file mode 100644
index 0000000..d793bfb
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/QueryStatusAPIServlet.java
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.http.servlet;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+
+import javax.servlet.ServletContext;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.json.JSONArray;
+import org.json.JSONObject;
+
+import edu.uci.ics.asterix.result.ResultReader;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.client.dataset.HyracksDataset;
+
+public class QueryStatusAPIServlet extends HttpServlet {
+    private static final long serialVersionUID = 1L;
+
+    private static final String HYRACKS_CONNECTION_ATTR = "edu.uci.ics.asterix.HYRACKS_CONNECTION";
+
+    private static final String HYRACKS_DATASET_ATTR = "edu.uci.ics.asterix.HYRACKS_DATASET";
+
+    @Override
+    public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
+        response.setContentType("text/html");
+        response.setCharacterEncoding("utf-8");
+        String strHandle = request.getParameter("handle");
+        PrintWriter out = response.getWriter();
+        ServletContext context = getServletContext();
+        IHyracksClientConnection hcc;
+        IHyracksDataset hds;
+
+        try {
+            synchronized (context) {
+                hcc = (IHyracksClientConnection) context.getAttribute(HYRACKS_CONNECTION_ATTR);
+
+                hds = (IHyracksDataset) context.getAttribute(HYRACKS_DATASET_ATTR);
+                if (hds == null) {
+                    hds = new HyracksDataset(hcc, ResultReader.FRAME_SIZE, ResultReader.NUM_READERS);
+                    context.setAttribute(HYRACKS_DATASET_ATTR, hds);
+                }
+            }
+            JSONObject handleObj = new JSONObject(strHandle);
+            JSONArray handle = handleObj.getJSONArray("handle");
+            JobId jobId = new JobId(handle.getLong(0));
+            ResultSetId rsId = new ResultSetId(handle.getLong(1));
+
+            /* TODO(madhusudancs): We need to find a way to JSON serialize default format obtained from
+             * metadataProvider in the AQLTranslator and store it as part of the result handle.
+             */
+            ResultReader resultReader = new ResultReader(hcc, hds);
+            resultReader.open(jobId, rsId);
+
+            JSONObject jsonResponse = new JSONObject();
+            String status;
+            switch (resultReader.getStatus()) {
+                case RUNNING:
+                    status = "RUNNING";
+                    break;
+                case FAILED:
+                    status = "ERROR";
+                    break;
+                case SUCCESS:
+                    status = "SUCCESS";
+                    break;
+                default:
+                    status = "ERROR";
+                    break;
+            }
+            jsonResponse.put("status", status);
+            out.write(jsonResponse.toString());
+
+        } catch (Exception e) {
+            out.println(e.getMessage());
+            e.printStackTrace(out);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/RESTAPIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/RESTAPIServlet.java b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/RESTAPIServlet.java
new file mode 100644
index 0000000..dd19c97
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/RESTAPIServlet.java
@@ -0,0 +1,197 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.http.servlet;
+
+import java.io.IOException;
+import java.io.StringWriter;
+import java.nio.charset.StandardCharsets;
+import java.util.List;
+import java.util.logging.Level;
+
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.io.IOUtils;
+import org.json.JSONObject;
+
+import edu.uci.ics.asterix.api.common.SessionConfig;
+import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
+import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.base.Statement.Kind;
+import edu.uci.ics.asterix.aql.parser.AQLParser;
+import edu.uci.ics.asterix.aql.parser.ParseException;
+import edu.uci.ics.asterix.aql.parser.TokenMgrError;
+import edu.uci.ics.asterix.aql.translator.AqlTranslator;
+import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.result.ResultReader;
+import edu.uci.ics.asterix.result.ResultUtils;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
+import edu.uci.ics.hyracks.client.dataset.HyracksDataset;
+
+abstract class RESTAPIServlet extends HttpServlet {
+    private static final long serialVersionUID = 1L;
+
+    private static final String HYRACKS_CONNECTION_ATTR = "edu.uci.ics.asterix.HYRACKS_CONNECTION";
+
+    private static final String HYRACKS_DATASET_ATTR = "edu.uci.ics.asterix.HYRACKS_DATASET";
+
+    /**
+     * Initialize the Content-Type of the response, and construct a
+     * SessionConfig with the appropriate output writer and output-format
+     * based on the Accept: header and other servlet parameters.
+     */
+    static SessionConfig initResponse(HttpServletRequest request, HttpServletResponse response)
+        throws IOException {
+        response.setCharacterEncoding("utf-8");
+
+        // JSON output is the default; most generally useful for a
+        // programmatic HTTP API
+        OutputFormat format = OutputFormat.JSON;
+
+        // First check the "output" servlet parameter.
+        String output = request.getParameter("output");
+        String accept = request.getHeader("Accept");
+        if (output != null) {
+            if (output.equals("CSV")) {
+                format = OutputFormat.CSV;
+            }
+            else if (output.equals("ADM")) {
+                format = OutputFormat.ADM;
+            }
+        }
+        else {
+            // Second check the Accept: HTTP header.
+            if (accept != null) {
+                if (accept.contains("application/x-adm")) {
+                    format = OutputFormat.ADM;
+                } else if (accept.contains("text/csv")) {
+                    format = OutputFormat.CSV;
+                }
+            }
+        }
+
+        SessionConfig sessionConfig = new SessionConfig(response.getWriter(), format);
+
+        // Now that format is set, output the content-type
+        switch (format) {
+            case ADM:
+                response.setContentType("application/x-adm");
+                break;
+            case JSON:
+                response.setContentType("application/json");
+                break;
+            case CSV: {
+                // Check for header parameter or in Accept:.
+                if ("present".equals(request.getParameter("header")) ||
+                    (accept != null && accept.contains("header=present"))) {
+                    response.setContentType("text/csv; header=present");
+                    sessionConfig.set(SessionConfig.FORMAT_CSV_HEADER, true);
+                }
+                else {
+                    response.setContentType("text/csv; header=absent");
+                }
+            }
+        };
+
+        return sessionConfig;
+    }
+
+    @Override
+    protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException,
+            IOException {
+        StringWriter sw = new StringWriter();
+        IOUtils.copy(request.getInputStream(), sw, StandardCharsets.UTF_8.name());
+        String query = sw.toString();
+        handleRequest(request, response, query);
+    }
+
+    @Override
+    public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
+        String query = getQueryParameter(request);
+        handleRequest(request, response, query);
+    }
+
+    public void handleRequest(HttpServletRequest request, HttpServletResponse response, String query)
+            throws IOException {
+        SessionConfig sessionConfig = initResponse(request, response);
+        AqlTranslator.ResultDelivery resultDelivery = whichResultDelivery(request);
+
+        ServletContext context = getServletContext();
+        IHyracksClientConnection hcc;
+        IHyracksDataset hds;
+
+        try {
+            synchronized (context) {
+                hcc = (IHyracksClientConnection) context.getAttribute(HYRACKS_CONNECTION_ATTR);
+                hds = (IHyracksDataset) context.getAttribute(HYRACKS_DATASET_ATTR);
+                if (hds == null) {
+                    hds = new HyracksDataset(hcc, ResultReader.FRAME_SIZE, ResultReader.NUM_READERS);
+                    context.setAttribute(HYRACKS_DATASET_ATTR, hds);
+                }
+            }
+
+            AQLParser parser = new AQLParser(query);
+            List<Statement> aqlStatements = parser.parse();
+            if (!containsForbiddenStatements(aqlStatements)) {
+                MetadataManager.INSTANCE.init();
+                AqlTranslator aqlTranslator = new AqlTranslator(aqlStatements, sessionConfig);
+                aqlTranslator.compileAndExecute(hcc, hds, resultDelivery);
+            }
+        } catch (ParseException | TokenMgrError | edu.uci.ics.asterix.aqlplus.parser.TokenMgrError pe) {
+            GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, pe.getMessage(), pe);
+            String errorMessage = ResultUtils.buildParseExceptionMessage(pe, query);
+            JSONObject errorResp = ResultUtils.getErrorResponse(2, errorMessage, "", "");
+            sessionConfig.out().write(errorResp.toString());
+            response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+        } catch (Exception e) {
+            GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, e.getMessage(), e);
+            ResultUtils.apiErrorHandler(sessionConfig.out(), e);
+            response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+        }
+    }
+
+    private boolean containsForbiddenStatements(List<Statement> aqlStatements) throws AsterixException {
+        for (Statement st : aqlStatements) {
+            if (!getAllowedStatements().contains(st.getKind())) {
+                throw new AsterixException(String.format(getErrorMessage(), st.getKind()));
+            }
+        }
+        return false;
+    }
+
+    protected AqlTranslator.ResultDelivery whichResultDelivery(HttpServletRequest request) {
+        String mode = request.getParameter("mode");
+        if (mode != null) {
+            if (mode.equals("asynchronous")) {
+                return AqlTranslator.ResultDelivery.ASYNC;
+            } else if (mode.equals("asynchronous-deferred")) {
+                return AqlTranslator.ResultDelivery.ASYNC_DEFERRED;
+            }
+        }
+        return AqlTranslator.ResultDelivery.SYNC;
+    }
+
+    protected abstract String getQueryParameter(HttpServletRequest request);
+
+    protected abstract List<Kind> getAllowedStatements();
+
+    protected abstract String getErrorMessage();
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ShutdownAPIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ShutdownAPIServlet.java b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ShutdownAPIServlet.java
new file mode 100644
index 0000000..4e16254
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ShutdownAPIServlet.java
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.http.servlet;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.nio.charset.StandardCharsets;
+import java.util.logging.Level;
+
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.io.IOUtils;
+
+import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
+import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.result.ResultUtils;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+
+public class ShutdownAPIServlet extends HttpServlet {
+    private static final long serialVersionUID = 1L;
+
+    private static final String HYRACKS_CONNECTION_ATTR = "edu.uci.ics.asterix.HYRACKS_CONNECTION";
+
+    private static final String HYRACKS_DATASET_ATTR = "edu.uci.ics.asterix.HYRACKS_DATASET";
+
+    @Override
+    protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException,
+            IOException {
+
+        response.setContentType("application/json");
+        response.setCharacterEncoding("utf-8");
+
+        PrintWriter out = response.getWriter();
+        OutputFormat format = OutputFormat.JSON;
+        String accept = request.getHeader("Accept");
+        if ((accept == null) || (accept.contains("application/x-adm"))) {
+            format = OutputFormat.ADM;
+        } else if (accept.contains("application/json")) {
+            format = OutputFormat.JSON;
+        }
+        StringWriter sw = new StringWriter();
+        IOUtils.copy(request.getInputStream(), sw, StandardCharsets.UTF_8.name());
+
+        ServletContext context = getServletContext();
+        IHyracksClientConnection hcc;
+        try {
+            synchronized (context) {
+                hcc = (IHyracksClientConnection) context.getAttribute(HYRACKS_CONNECTION_ATTR);
+                response.setStatus(HttpServletResponse.SC_ACCEPTED);
+                hcc.stopCluster();
+            }
+        } catch (Exception e) {
+            GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, e.getMessage(), e);
+            ResultUtils.apiErrorHandler(out, e);
+            response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+        }
+    }
+
+    @Override
+    public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/UpdateAPIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/UpdateAPIServlet.java b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/UpdateAPIServlet.java
new file mode 100644
index 0000000..4bbd712
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/UpdateAPIServlet.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.http.servlet;
+
+import java.util.Arrays;
+import java.util.List;
+
+import javax.servlet.http.HttpServletRequest;
+
+import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.base.Statement.Kind;
+
+public class UpdateAPIServlet extends RESTAPIServlet {
+    private static final long serialVersionUID = 1L;
+
+    protected String getQueryParameter(HttpServletRequest request) {
+        return request.getParameter("statements");
+    }
+
+    protected List<Statement.Kind> getAllowedStatements() {
+        Kind[] statementsArray = { Kind.DATAVERSE_DECL, Kind.DELETE, Kind.INSERT, Kind.UPDATE, Kind.DML_CMD_LIST,
+                Kind.LOAD, Kind.CONNECT_FEED, Kind.DISCONNECT_FEED, Kind.SET, Kind.COMPACT, Kind.EXTERNAL_DATASET_REFRESH };
+        return Arrays.asList(statementsArray);
+    }
+
+    protected String getErrorMessage() {
+        return "Invalid statement: Non-Update statement %s to the Update API.";
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/java/AsterixJavaClient.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/java/AsterixJavaClient.java b/asterix-app/src/main/java/org/apache/asterix/api/java/AsterixJavaClient.java
new file mode 100644
index 0000000..54804e7
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/java/AsterixJavaClient.java
@@ -0,0 +1,100 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.java;
+
+import java.io.PrintWriter;
+import java.io.Reader;
+import java.util.List;
+
+import edu.uci.ics.asterix.api.common.APIFramework;
+import edu.uci.ics.asterix.api.common.Job;
+import edu.uci.ics.asterix.api.common.SessionConfig;
+import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
+import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.parser.AQLParser;
+import edu.uci.ics.asterix.aql.parser.ParseException;
+import edu.uci.ics.asterix.aql.translator.AqlTranslator;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+public class AsterixJavaClient {
+    private IHyracksClientConnection hcc;
+    private Reader queryText;
+    private PrintWriter writer;
+
+    private Job[] dmlJobs;
+    private JobSpecification queryJobSpec;
+
+    public AsterixJavaClient(IHyracksClientConnection hcc, Reader queryText, PrintWriter writer) {
+        this.hcc = hcc;
+        this.queryText = queryText;
+        this.writer = writer;
+    }
+
+    public AsterixJavaClient(IHyracksClientConnection hcc, Reader queryText) {
+        this(hcc, queryText, new PrintWriter(System.out, true));
+    }
+
+    public void compile() throws Exception {
+        compile(true, false, false, false, false, false, false);
+    }
+
+    public void compile(boolean optimize, boolean printRewrittenExpressions, boolean printLogicalPlan,
+            boolean printOptimizedPlan, boolean printPhysicalOpsOnly, boolean generateBinaryRuntime, boolean printJob)
+            throws Exception {
+        queryJobSpec = null;
+        dmlJobs = null;
+
+        if (queryText == null) {
+            return;
+        }
+        int ch;
+        StringBuilder builder = new StringBuilder();
+        while ((ch = queryText.read()) != -1) {
+            builder.append((char) ch);
+        }
+        AQLParser parser = new AQLParser(builder.toString());
+        List<Statement> aqlStatements;
+        try {
+            aqlStatements = parser.parse();
+        } catch (ParseException pe) {
+            throw new AsterixException(pe);
+        }
+        MetadataManager.INSTANCE.init();
+
+        SessionConfig conf = new SessionConfig(writer, OutputFormat.ADM, optimize, true, generateBinaryRuntime);
+        conf.setOOBData(false, printRewrittenExpressions, printLogicalPlan,
+                        printOptimizedPlan, printJob);
+        if (printPhysicalOpsOnly) {
+            conf.set(SessionConfig.FORMAT_ONLY_PHYSICAL_OPS, true);
+        }
+
+        AqlTranslator aqlTranslator = new AqlTranslator(aqlStatements, conf);
+        aqlTranslator.compileAndExecute(hcc, null, AqlTranslator.ResultDelivery.SYNC);
+        writer.flush();
+    }
+
+    public void execute() throws Exception {
+        if (dmlJobs != null) {
+            APIFramework.executeJobArray(hcc, dmlJobs, writer);
+        }
+        if (queryJobSpec != null) {
+            APIFramework.executeJobArray(hcc, new JobSpecification[] { queryJobSpec }, writer);
+        }
+    }
+
+}


[36/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java
deleted file mode 100644
index bfa0331..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java
+++ /dev/null
@@ -1,625 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.translator;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-import edu.uci.ics.asterix.aql.base.Clause;
-import edu.uci.ics.asterix.aql.base.Expression;
-import edu.uci.ics.asterix.aql.base.Statement.Kind;
-import edu.uci.ics.asterix.aql.expression.CallExpr;
-import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
-import edu.uci.ics.asterix.aql.expression.FieldAccessor;
-import edu.uci.ics.asterix.aql.expression.FieldBinding;
-import edu.uci.ics.asterix.aql.expression.ForClause;
-import edu.uci.ics.asterix.aql.expression.Identifier;
-import edu.uci.ics.asterix.aql.expression.LiteralExpr;
-import edu.uci.ics.asterix.aql.expression.Query;
-import edu.uci.ics.asterix.aql.expression.RecordConstructor;
-import edu.uci.ics.asterix.aql.expression.VariableExpr;
-import edu.uci.ics.asterix.aql.expression.WhereClause;
-import edu.uci.ics.asterix.aql.literal.StringLiteral;
-import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
-import edu.uci.ics.asterix.common.feeds.FeedConnectionRequest;
-import edu.uci.ics.asterix.common.functions.FunctionConstants;
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-
-/**
- * An AQL statement instance is translated into an instance of type CompileX
- * that has additional fields for use by the AqlTranslator.
- */
-public class CompiledStatements {
-
-    public static interface ICompiledStatement {
-
-        public Kind getKind();
-    }
-
-    public static class CompiledDatasetDropStatement implements ICompiledStatement {
-        private final String dataverseName;
-        private final String datasetName;
-
-        public CompiledDatasetDropStatement(String dataverseName, String datasetName) {
-            this.dataverseName = dataverseName;
-            this.datasetName = datasetName;
-        }
-
-        public String getDataverseName() {
-            return dataverseName;
-        }
-
-        public String getDatasetName() {
-            return datasetName;
-        }
-
-        @Override
-        public Kind getKind() {
-            return Kind.DATASET_DROP;
-        }
-    }
-
-    // added by yasser
-    public static class CompiledCreateDataverseStatement implements ICompiledStatement {
-        private String dataverseName;
-        private String format;
-
-        public CompiledCreateDataverseStatement(String dataverseName, String format) {
-            this.dataverseName = dataverseName;
-            this.format = format;
-        }
-
-        public String getDataverseName() {
-            return dataverseName;
-        }
-
-        public String getFormat() {
-            return format;
-        }
-
-        @Override
-        public Kind getKind() {
-            return Kind.CREATE_DATAVERSE;
-        }
-    }
-
-    public static class CompiledNodeGroupDropStatement implements ICompiledStatement {
-        private String nodeGroupName;
-
-        public CompiledNodeGroupDropStatement(String nodeGroupName) {
-            this.nodeGroupName = nodeGroupName;
-        }
-
-        public String getNodeGroupName() {
-            return nodeGroupName;
-        }
-
-        @Override
-        public Kind getKind() {
-            return Kind.NODEGROUP_DROP;
-        }
-    }
-
-    public static class CompiledIndexDropStatement implements ICompiledStatement {
-        private String dataverseName;
-        private String datasetName;
-        private String indexName;
-
-        public CompiledIndexDropStatement(String dataverseName, String datasetName, String indexName) {
-            this.dataverseName = dataverseName;
-            this.datasetName = datasetName;
-            this.indexName = indexName;
-        }
-
-        public String getDataverseName() {
-            return dataverseName;
-        }
-
-        public String getDatasetName() {
-            return datasetName;
-        }
-
-        public String getIndexName() {
-            return indexName;
-        }
-
-        @Override
-        public Kind getKind() {
-            return Kind.INDEX_DROP;
-        }
-    }
-
-    public static class CompiledDataverseDropStatement implements ICompiledStatement {
-        private String dataverseName;
-        private boolean ifExists;
-
-        public CompiledDataverseDropStatement(String dataverseName, boolean ifExists) {
-            this.dataverseName = dataverseName;
-            this.ifExists = ifExists;
-        }
-
-        public String getDataverseName() {
-            return dataverseName;
-        }
-
-        public boolean getIfExists() {
-            return ifExists;
-        }
-
-        @Override
-        public Kind getKind() {
-            return Kind.DATAVERSE_DROP;
-        }
-    }
-
-    public static class CompiledTypeDropStatement implements ICompiledStatement {
-        private String typeName;
-
-        public CompiledTypeDropStatement(String nodeGroupName) {
-            this.typeName = nodeGroupName;
-        }
-
-        public String getTypeName() {
-            return typeName;
-        }
-
-        @Override
-        public Kind getKind() {
-            return Kind.TYPE_DROP;
-        }
-    }
-
-    public static interface ICompiledDmlStatement extends ICompiledStatement {
-
-        public String getDataverseName();
-
-        public String getDatasetName();
-    }
-
-    public static class CompiledCreateIndexStatement implements ICompiledDmlStatement {
-        private final String indexName;
-        private final String dataverseName;
-        private final String datasetName;
-        private final List<List<String>> keyFields;
-        private final List<IAType> keyTypes;
-        private final boolean isEnforced;
-        private final IndexType indexType;
-
-        // Specific to NGram index.
-        private final int gramLength;
-
-        public CompiledCreateIndexStatement(String indexName, String dataverseName, String datasetName,
-                List<List<String>> keyFields, List<IAType> keyTypes, boolean isEnforced, int gramLength, IndexType indexType) {
-            this.indexName = indexName;
-            this.dataverseName = dataverseName;
-            this.datasetName = datasetName;
-            this.keyFields = keyFields;
-            this.keyTypes = keyTypes;
-            this.gramLength = gramLength;
-            this.isEnforced = isEnforced;
-            this.indexType = indexType;
-        }
-
-        public String getDatasetName() {
-            return datasetName;
-        }
-
-        public String getDataverseName() {
-            return dataverseName;
-        }
-
-        public String getIndexName() {
-            return indexName;
-        }
-
-        public List<List<String>> getKeyFields() {
-            return keyFields;
-        }
-
-        public List<IAType> getKeyFieldTypes() {
-            return keyTypes;
-        }
-
-        public IndexType getIndexType() {
-            return indexType;
-        }
-
-        public int getGramLength() {
-            return gramLength;
-        }
-
-        public boolean isEnforced() {
-            return isEnforced;
-        }
-
-        @Override
-        public Kind getKind() {
-            return Kind.CREATE_INDEX;
-        }
-    }
-
-    public static class CompiledLoadFromFileStatement implements ICompiledDmlStatement {
-        private String dataverseName;
-        private String datasetName;
-        private boolean alreadySorted;
-        private String adapter;
-        private Map<String, String> properties;
-
-        public CompiledLoadFromFileStatement(String dataverseName, String datasetName, String adapter,
-                Map<String, String> properties, boolean alreadySorted) {
-            this.dataverseName = dataverseName;
-            this.datasetName = datasetName;
-            this.alreadySorted = alreadySorted;
-            this.adapter = adapter;
-            this.properties = properties;
-        }
-
-        public String getDataverseName() {
-            return dataverseName;
-        }
-
-        public String getDatasetName() {
-            return datasetName;
-        }
-
-        public boolean alreadySorted() {
-            return alreadySorted;
-        }
-
-        public String getAdapter() {
-            return adapter;
-        }
-
-        public Map<String, String> getProperties() {
-            return properties;
-        }
-
-        @Override
-        public Kind getKind() {
-            return Kind.LOAD;
-        }
-    }
-
-    public static class CompiledInsertStatement implements ICompiledDmlStatement {
-        private final String dataverseName;
-        private final String datasetName;
-        private final Query query;
-        private final int varCounter;
-
-        public CompiledInsertStatement(String dataverseName, String datasetName, Query query, int varCounter) {
-            this.dataverseName = dataverseName;
-            this.datasetName = datasetName;
-            this.query = query;
-            this.varCounter = varCounter;
-        }
-
-        public String getDataverseName() {
-            return dataverseName;
-        }
-
-        public String getDatasetName() {
-            return datasetName;
-        }
-
-        public int getVarCounter() {
-            return varCounter;
-        }
-
-        public Query getQuery() {
-            return query;
-        }
-
-        @Override
-        public Kind getKind() {
-            return Kind.INSERT;
-        }
-    }
-
-    public static class CompiledConnectFeedStatement implements ICompiledDmlStatement {
-        private String dataverseName;
-        private String feedName;
-        private String datasetName;
-        private String policyName;
-        private Query query;
-        private int varCounter;
-
-        public CompiledConnectFeedStatement(String dataverseName, String feedName, String datasetName,
-                String policyName, Query query, int varCounter) {
-            this.dataverseName = dataverseName;
-            this.feedName = feedName;
-            this.datasetName = datasetName;
-            this.policyName = policyName;
-            this.query = query;
-            this.varCounter = varCounter;
-        }
-
-        @Override
-        public String getDataverseName() {
-            return dataverseName;
-        }
-
-        public String getFeedName() {
-            return feedName;
-        }
-
-        @Override
-        public String getDatasetName() {
-            return datasetName;
-        }
-
-        public int getVarCounter() {
-            return varCounter;
-        }
-
-        public Query getQuery() {
-            return query;
-        }
-
-        public void setQuery(Query query) {
-            this.query = query;
-        }
-
-        @Override
-        public Kind getKind() {
-            return Kind.CONNECT_FEED;
-        }
-
-        public String getPolicyName() {
-            return policyName;
-        }
-    }
-    
-    public static class CompiledSubscribeFeedStatement implements ICompiledDmlStatement {
-
-        private final FeedConnectionRequest request;
-        private Query query;
-        private final int varCounter;
-
-        public CompiledSubscribeFeedStatement(FeedConnectionRequest request, Query query, int varCounter) {
-            this.request = request;
-            this.query = query;
-            this.varCounter = varCounter;
-        }
-
-        @Override
-        public String getDataverseName() {
-            return request.getReceivingFeedId().getDataverse();
-        }
-
-        @Override
-        public String getDatasetName() {
-            return request.getTargetDataset();
-        }
-
-        public int getVarCounter() {
-            return varCounter;
-        }
-
-        public Query getQuery() {
-            return query;
-        }
-
-        public void setQuery(Query query) {
-            this.query = query;
-        }
-
-        @Override
-        public Kind getKind() {
-            return Kind.SUBSCRIBE_FEED;
-        }
-
-    }
-
-
-    public static class CompiledDisconnectFeedStatement implements ICompiledDmlStatement {
-        private String dataverseName;
-        private String datasetName;
-        private String feedName;
-        private Query query;
-        private int varCounter;
-
-        public CompiledDisconnectFeedStatement(String dataverseName, String feedName, String datasetName) {
-            this.dataverseName = dataverseName;
-            this.feedName = feedName;
-            this.datasetName = datasetName;
-        }
-
-        @Override
-        public String getDataverseName() {
-            return dataverseName;
-        }
-
-        @Override
-        public String getDatasetName() {
-            return datasetName;
-        }
-
-        public String getFeedName() {
-            return feedName;
-        }
-
-        public int getVarCounter() {
-            return varCounter;
-        }
-
-        public Query getQuery() {
-            return query;
-        }
-
-        @Override
-        public Kind getKind() {
-            return Kind.DISCONNECT_FEED;
-        }
-
-    }
-
-    public static class CompiledDeleteStatement implements ICompiledDmlStatement {
-        private VariableExpr var;
-        private String dataverseName;
-        private String datasetName;
-        private Expression condition;
-        private int varCounter;
-        private AqlMetadataProvider metadataProvider;
-
-        public CompiledDeleteStatement(VariableExpr var, String dataverseName, String datasetName,
-                Expression condition, int varCounter, AqlMetadataProvider metadataProvider) {
-            this.var = var;
-            this.dataverseName = dataverseName;
-            this.datasetName = datasetName;
-            this.condition = condition;
-            this.varCounter = varCounter;
-            this.metadataProvider = metadataProvider;
-        }
-
-        @Override
-        public String getDatasetName() {
-            return datasetName;
-        }
-
-        @Override
-        public String getDataverseName() {
-            return dataverseName;
-        }
-
-        public int getVarCounter() {
-            return varCounter;
-        }
-
-        public Expression getCondition() {
-            return condition;
-        }
-
-        public Query getQuery() throws AlgebricksException {
-
-            List<Expression> arguments = new ArrayList<Expression>();
-            String arg = dataverseName == null ? datasetName : dataverseName + "." + datasetName;
-            LiteralExpr argumentLiteral = new LiteralExpr(new StringLiteral(arg));
-            arguments.add(argumentLiteral);
-
-            CallExpr callExpression = new CallExpr(new FunctionSignature(FunctionConstants.ASTERIX_NS, "dataset", 1),
-                    arguments);
-            List<Clause> clauseList = new ArrayList<Clause>();
-            Clause forClause = new ForClause(var, callExpression);
-            clauseList.add(forClause);
-            Clause whereClause = null;
-            if (condition != null) {
-                whereClause = new WhereClause(condition);
-                clauseList.add(whereClause);
-            }
-
-            Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
-            if (dataset == null) {
-                throw new AlgebricksException("Unknown dataset " + datasetName);
-            }
-            String itemTypeName = dataset.getItemTypeName();
-            IAType itemType = metadataProvider.findType(dataset.getDataverseName(), itemTypeName);
-            ARecordType recType = (ARecordType) itemType;
-            String[] fieldNames = recType.getFieldNames();
-            List<FieldBinding> fieldBindings = new ArrayList<FieldBinding>();
-            for (int i = 0; i < fieldNames.length; i++) {
-                FieldAccessor fa = new FieldAccessor(var, new Identifier(fieldNames[i]));
-                FieldBinding fb = new FieldBinding(new LiteralExpr(new StringLiteral(fieldNames[i])), fa);
-                fieldBindings.add(fb);
-            }
-            RecordConstructor rc = new RecordConstructor(fieldBindings);
-
-            FLWOGRExpression flowgr = new FLWOGRExpression(clauseList, rc);
-            Query query = new Query();
-            query.setBody(flowgr);
-            return query;
-        }
-
-        @Override
-        public Kind getKind() {
-            return Kind.DELETE;
-        }
-
-    }
-
-    public static class CompiledCompactStatement implements ICompiledStatement {
-        private final String dataverseName;
-        private final String datasetName;
-
-        public CompiledCompactStatement(String dataverseName, String datasetName) {
-            this.dataverseName = dataverseName;
-            this.datasetName = datasetName;
-        }
-
-        public String getDataverseName() {
-            return dataverseName;
-        }
-
-        public String getDatasetName() {
-            return datasetName;
-        }
-
-        @Override
-        public Kind getKind() {
-            return Kind.COMPACT;
-        }
-    }
-
-    public static class CompiledIndexCompactStatement extends CompiledCompactStatement {
-        private final String indexName;
-        private final List<List<String>> keyFields;
-        private final List<IAType> keyTypes;
-        private final IndexType indexType;
-        private final boolean isEnforced;
-
-        // Specific to NGram index.
-        private final int gramLength;
-
-        public CompiledIndexCompactStatement(String dataverseName, String datasetName, String indexName,
-                List<List<String>> keyFields, List<IAType> keyTypes, boolean isEnforced, int gramLength, IndexType indexType) {
-            super(dataverseName, datasetName);
-            this.indexName = indexName;
-            this.keyFields = keyFields;
-            this.keyTypes = keyTypes;
-            this.gramLength = gramLength;
-            this.indexType = indexType;
-            this.isEnforced = isEnforced;
-        }
-
-        public String getIndexName() {
-            return indexName;
-        }
-
-        public List<List<String>> getKeyFields() {
-            return keyFields;
-        }
-
-        public List<IAType> getKeyTypes() {
-            return keyTypes;
-        }
-
-        public IndexType getIndexType() {
-            return indexType;
-        }
-
-        public int getGramLength() {
-            return gramLength;
-        }
-
-        public boolean isEnforced() {
-            return isEnforced;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/ConstantHelper.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/ConstantHelper.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/ConstantHelper.java
deleted file mode 100644
index dc75889..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/ConstantHelper.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.translator;
-
-import edu.uci.ics.asterix.aql.base.Literal;
-import edu.uci.ics.asterix.aql.literal.DoubleLiteral;
-import edu.uci.ics.asterix.aql.literal.FloatLiteral;
-import edu.uci.ics.asterix.aql.literal.IntegerLiteral;
-import edu.uci.ics.asterix.aql.literal.LongIntegerLiteral;
-import edu.uci.ics.asterix.aql.literal.StringLiteral;
-import edu.uci.ics.asterix.om.base.ABoolean;
-import edu.uci.ics.asterix.om.base.ADouble;
-import edu.uci.ics.asterix.om.base.AFloat;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.AInt64;
-import edu.uci.ics.asterix.om.base.ANull;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.base.IAObject;
-
-public final class ConstantHelper {
-
-    public static IAObject objectFromLiteral(Literal valLiteral) {
-        switch (valLiteral.getLiteralType()) {
-            case DOUBLE: {
-                DoubleLiteral d = (DoubleLiteral) valLiteral;
-                return new ADouble(d.getValue());
-            }
-            case FALSE: {
-                return ABoolean.FALSE;
-            }
-            case FLOAT: {
-                FloatLiteral fl = (FloatLiteral) valLiteral;
-                return new AFloat(fl.getValue());
-            }
-            case INTEGER: {
-                IntegerLiteral il = (IntegerLiteral) valLiteral;
-                return new AInt32(il.getValue());
-            }
-            case LONG: {
-                LongIntegerLiteral il = (LongIntegerLiteral) valLiteral;
-                return new AInt64(il.getValue());
-            }
-            case NULL: {
-                return ANull.NULL;
-            }
-            case STRING: {
-                StringLiteral sl = (StringLiteral) valLiteral;
-                return new AString(sl.getValue());
-            }
-            case TRUE: {
-                return ABoolean.TRUE;
-            }
-            default: {
-                throw new IllegalStateException();
-            }
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/TranslationContext.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/TranslationContext.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/TranslationContext.java
deleted file mode 100644
index 986b5ca..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/TranslationContext.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.translator;
-
-import java.util.HashMap;
-
-import edu.uci.ics.asterix.aql.expression.VariableExpr;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.Counter;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-
-public final class TranslationContext {
-
-    private Counter varCounter;
-    private HashMap<Integer, LogicalVariable> varEnv = new HashMap<Integer, LogicalVariable>();
-    private boolean topFlwor = true;
-
-    public TranslationContext(Counter varCounter) {
-        this.varCounter = varCounter;
-    }
-
-    public int getVarCounter() {
-        return varCounter.get();
-    }
-
-    public boolean isTopFlwor() {
-        return topFlwor;
-    }
-
-    public void setTopFlwor(boolean b) {
-        topFlwor = b;
-    }
-
-    public LogicalVariable getVar(Integer varId) {
-        return varEnv.get(varId);
-    }
-
-    public LogicalVariable getVar(VariableExpr v) {
-        return varEnv.get(v.getVar().getId());
-    }
-
-    public LogicalVariable newVar(VariableExpr v) {
-        Integer i = v.getVar().getId();
-        if (i > varCounter.get()) {
-            varCounter.set(i);
-        }
-        LogicalVariable var = new LogicalVariable(i);
-        varEnv.put(i, var);
-        return var;
-    }
-
-    public void setVar(VariableExpr v, LogicalVariable var) {
-        varEnv.put(v.getVar().getId(), var);
-    }
-
-    public LogicalVariable newVar() {
-        varCounter.inc();
-        LogicalVariable var = new LogicalVariable(varCounter.get());
-        varEnv.put(varCounter.get(), var);
-        return var;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/TranslationException.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/TranslationException.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/TranslationException.java
deleted file mode 100644
index 2819961..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/TranslationException.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.translator;
-
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class TranslationException extends AsterixException {
-    /**
-     * 
-     */
-    private static final long serialVersionUID = 685960054131778068L;
-
-    public TranslationException() {
-        super();
-    }
-
-    public TranslationException(String msg) {
-        super(msg);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/TypeTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/TypeTranslator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/TypeTranslator.java
deleted file mode 100644
index e6f3d17..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/TypeTranslator.java
+++ /dev/null
@@ -1,392 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.translator;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-
-import edu.uci.ics.asterix.aql.expression.OrderedListTypeDefinition;
-import edu.uci.ics.asterix.aql.expression.RecordTypeDefinition;
-import edu.uci.ics.asterix.aql.expression.RecordTypeDefinition.RecordKind;
-import edu.uci.ics.asterix.aql.expression.TypeExpression;
-import edu.uci.ics.asterix.aql.expression.TypeReferenceExpression;
-import edu.uci.ics.asterix.aql.expression.UnorderedListTypeDefinition;
-import edu.uci.ics.asterix.common.annotations.IRecordFieldDataGen;
-import edu.uci.ics.asterix.common.annotations.RecordDataGenAnnotation;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.metadata.MetadataException;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
-import edu.uci.ics.asterix.metadata.entities.AsterixBuiltinTypeMap;
-import edu.uci.ics.asterix.metadata.entities.Datatype;
-import edu.uci.ics.asterix.om.types.AOrderedListType;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.AUnionType;
-import edu.uci.ics.asterix.om.types.AUnorderedListType;
-import edu.uci.ics.asterix.om.types.AbstractCollectionType;
-import edu.uci.ics.asterix.om.types.AbstractComplexType;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.types.TypeSignature;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public class TypeTranslator {
-
-    public static Map<TypeSignature, IAType> computeTypes(MetadataTransactionContext mdTxnCtx, TypeExpression typeExpr,
-            String typeName, String typeDataverse) throws AlgebricksException, MetadataException {
-        Map<TypeSignature, IAType> typeMap = new HashMap<TypeSignature, IAType>();
-        return computeTypes(mdTxnCtx, typeExpr, typeName, typeDataverse, typeMap);
-    }
-
-    public static Map<TypeSignature, IAType> computeTypes(MetadataTransactionContext mdTxnCtx, TypeExpression typeExpr,
-            String typeName, String typeDataverse, Map<TypeSignature, IAType> typeMap) throws AlgebricksException,
-            MetadataException {
-        Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes = new HashMap<String, Map<ARecordType, List<Integer>>>();
-        Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes = new HashMap<TypeSignature, List<AbstractCollectionType>>();
-        Map<TypeSignature, List<TypeSignature>> incompleteTopLevelTypeReferences = new HashMap<TypeSignature, List<TypeSignature>>();
-        firstPass(typeExpr, typeName, typeMap, incompleteFieldTypes, incompleteItemTypes,
-                incompleteTopLevelTypeReferences, typeDataverse);
-        secondPass(mdTxnCtx, typeMap, incompleteFieldTypes, incompleteItemTypes, incompleteTopLevelTypeReferences,
-                typeDataverse);
-
-        for (IAType type : typeMap.values())
-            if (type.getTypeTag().isDerivedType())
-                ((AbstractComplexType) type).generateNestedDerivedTypeNames();
-        return typeMap;
-    }
-
-    private static Map<String, BuiltinType> builtinTypeMap = AsterixBuiltinTypeMap.getBuiltinTypes();
-
-    private static void firstPass(TypeExpression typeExpr, String typeName, Map<TypeSignature, IAType> typeMap,
-            Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes,
-            Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
-            Map<TypeSignature, List<TypeSignature>> incompleteTopLevelTypeReferences, String typeDataverse)
-            throws AlgebricksException {
-
-        if (builtinTypeMap.get(typeName) != null) {
-            throw new AlgebricksException("Cannot redefine builtin type " + typeName + " .");
-        }
-        TypeSignature typeSignature = new TypeSignature(typeDataverse, typeName);
-        try {
-            switch (typeExpr.getTypeKind()) {
-                case TYPEREFERENCE: {
-                    TypeReferenceExpression tre = (TypeReferenceExpression) typeExpr;
-                    IAType t = solveTypeReference(new TypeSignature(typeDataverse, tre.getIdent().getValue()), typeMap);
-                    if (t != null) {
-                        typeMap.put(typeSignature, t);
-                    } else {
-                        addIncompleteTopLevelTypeReference(typeName, tre, incompleteTopLevelTypeReferences,
-                                typeDataverse);
-                    }
-                    break;
-                }
-                case RECORD: {
-                    RecordTypeDefinition rtd = (RecordTypeDefinition) typeExpr;
-                    ARecordType recType = computeRecordType(typeSignature, rtd, typeMap, incompleteFieldTypes,
-                            incompleteItemTypes, typeDataverse);
-                    typeMap.put(typeSignature, recType);
-                    break;
-                }
-                case ORDEREDLIST: {
-                    OrderedListTypeDefinition oltd = (OrderedListTypeDefinition) typeExpr;
-                    AOrderedListType olType = computeOrderedListType(typeSignature, oltd, typeMap, incompleteItemTypes,
-                            incompleteFieldTypes, typeDataverse);
-                    typeMap.put(typeSignature, olType);
-                    break;
-                }
-                case UNORDEREDLIST: {
-                    UnorderedListTypeDefinition ultd = (UnorderedListTypeDefinition) typeExpr;
-                    AUnorderedListType ulType = computeUnorderedListType(typeSignature, ultd, typeMap,
-                            incompleteItemTypes, incompleteFieldTypes, typeDataverse);
-                    typeMap.put(typeSignature, ulType);
-                    break;
-                }
-                default: {
-                    throw new IllegalStateException();
-                }
-            }
-        } catch (AsterixException e) {
-            throw new AlgebricksException(e);
-        }
-    }
-
-    private static void secondPass(MetadataTransactionContext mdTxnCtx, Map<TypeSignature, IAType> typeMap,
-            Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes,
-            Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
-            Map<TypeSignature, List<TypeSignature>> incompleteTopLevelTypeReferences, String typeDataverse)
-            throws AlgebricksException, MetadataException {
-        // solve remaining top level references
-
-        for (TypeSignature typeSignature : incompleteTopLevelTypeReferences.keySet()) {
-            IAType t;// = typeMap.get(trefName);
-            Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, typeSignature.getNamespace(),
-                    typeSignature.getName());
-            if (dt == null) {
-                throw new AlgebricksException("Could not resolve type " + typeSignature);
-            } else
-                t = dt.getDatatype();
-            for (TypeSignature sign : incompleteTopLevelTypeReferences.get(typeSignature)) {
-                typeMap.put(sign, t);
-            }
-        }
-        // solve remaining field type references
-        for (String trefName : incompleteFieldTypes.keySet()) {
-            IAType t;// = typeMap.get(trefName);
-            Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, typeDataverse, trefName);
-            if (dt == null) {
-                throw new AlgebricksException("Could not resolve type " + trefName);
-            } else
-                t = dt.getDatatype();
-            Map<ARecordType, List<Integer>> fieldsToFix = incompleteFieldTypes.get(trefName);
-            for (ARecordType recType : fieldsToFix.keySet()) {
-                List<Integer> positions = fieldsToFix.get(recType);
-                IAType[] fldTypes = recType.getFieldTypes();
-                for (Integer pos : positions) {
-                    if (fldTypes[pos] == null) {
-                        fldTypes[pos] = t;
-                    } else { // nullable
-                        AUnionType nullableUnion = (AUnionType) fldTypes[pos];
-                        nullableUnion.setTypeAtIndex(t, 1);
-                    }
-                }
-            }
-        }
-
-        // solve remaining item type references
-        for (TypeSignature typeSignature : incompleteItemTypes.keySet()) {
-            IAType t;// = typeMap.get(trefName);
-            Datatype dt = null;
-            if (MetadataManager.INSTANCE != null) {
-                dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, typeSignature.getNamespace(),
-                        typeSignature.getName());
-                if (dt == null) {
-                    throw new AlgebricksException("Could not resolve type " + typeSignature);
-                }
-                t = dt.getDatatype();
-            } else {
-                t = typeMap.get(typeSignature);
-            }
-            for (AbstractCollectionType act : incompleteItemTypes.get(typeSignature)) {
-                act.setItemType(t);
-            }
-        }
-    }
-
-    private static AOrderedListType computeOrderedListType(TypeSignature typeSignature, OrderedListTypeDefinition oltd,
-            Map<TypeSignature, IAType> typeMap, Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
-            Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes, String defaultDataverse)
-            throws AsterixException {
-        TypeExpression tExpr = oltd.getItemTypeExpression();
-        String typeName = typeSignature != null ? typeSignature.getName() : null;
-        AOrderedListType aolt = new AOrderedListType(null, typeName);
-        setCollectionItemType(tExpr, typeMap, incompleteItemTypes, incompleteFieldTypes, aolt, defaultDataverse);
-        return aolt;
-    }
-
-    private static AUnorderedListType computeUnorderedListType(TypeSignature typeSignature,
-            UnorderedListTypeDefinition ultd, Map<TypeSignature, IAType> typeMap,
-            Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
-            Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes, String defaulDataverse)
-            throws AsterixException {
-        TypeExpression tExpr = ultd.getItemTypeExpression();
-        String typeName = typeSignature != null ? typeSignature.getName() : null;
-        AUnorderedListType ault = new AUnorderedListType(null, typeName);
-        setCollectionItemType(tExpr, typeMap, incompleteItemTypes, incompleteFieldTypes, ault, defaulDataverse);
-        return ault;
-    }
-
-    private static void setCollectionItemType(TypeExpression tExpr, Map<TypeSignature, IAType> typeMap,
-            Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
-            Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes, AbstractCollectionType act,
-            String defaultDataverse) throws AsterixException {
-        switch (tExpr.getTypeKind()) {
-            case ORDEREDLIST: {
-                OrderedListTypeDefinition oltd = (OrderedListTypeDefinition) tExpr;
-                IAType t = computeOrderedListType(null, oltd, typeMap, incompleteItemTypes, incompleteFieldTypes,
-                        defaultDataverse);
-                act.setItemType(t);
-                break;
-            }
-            case UNORDEREDLIST: {
-                UnorderedListTypeDefinition ultd = (UnorderedListTypeDefinition) tExpr;
-                IAType t = computeUnorderedListType(null, ultd, typeMap, incompleteItemTypes, incompleteFieldTypes,
-                        defaultDataverse);
-                act.setItemType(t);
-                break;
-            }
-            case RECORD: {
-                RecordTypeDefinition rtd = (RecordTypeDefinition) tExpr;
-                IAType t = computeRecordType(null, rtd, typeMap, incompleteFieldTypes, incompleteItemTypes,
-                        defaultDataverse);
-                act.setItemType(t);
-                break;
-            }
-            case TYPEREFERENCE: {
-                TypeReferenceExpression tre = (TypeReferenceExpression) tExpr;
-                TypeSignature signature = new TypeSignature(defaultDataverse, tre.getIdent().getValue());
-                IAType tref = solveTypeReference(signature, typeMap);
-                if (tref != null) {
-                    act.setItemType(tref);
-                } else {
-                    addIncompleteCollectionTypeReference(act, tre, incompleteItemTypes, defaultDataverse);
-                }
-                break;
-            }
-            default: {
-                throw new IllegalStateException();
-            }
-        }
-    }
-
-    private static void addIncompleteCollectionTypeReference(AbstractCollectionType collType,
-            TypeReferenceExpression tre, Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
-            String defaultDataverse) {
-        String typeName = tre.getIdent().getValue();
-        TypeSignature typeSignature = new TypeSignature(defaultDataverse, typeName);
-        List<AbstractCollectionType> typeList = incompleteItemTypes.get(typeSignature);
-        if (typeList == null) {
-            typeList = new LinkedList<AbstractCollectionType>();
-            incompleteItemTypes.put(typeSignature, typeList);
-        }
-        typeList.add(collType);
-    }
-
-    private static void addIncompleteFieldTypeReference(ARecordType recType, int fldPosition,
-            TypeReferenceExpression tre, Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes) {
-        String typeName = tre.getIdent().getValue();
-        Map<ARecordType, List<Integer>> refMap = incompleteFieldTypes.get(typeName);
-        if (refMap == null) {
-            refMap = new HashMap<ARecordType, List<Integer>>();
-            incompleteFieldTypes.put(typeName, refMap);
-        }
-        List<Integer> typeList = refMap.get(recType);
-        if (typeList == null) {
-            typeList = new ArrayList<Integer>();
-            refMap.put(recType, typeList);
-        }
-        typeList.add(fldPosition);
-    }
-
-    private static void addIncompleteTopLevelTypeReference(String tdeclName, TypeReferenceExpression tre,
-            Map<TypeSignature, List<TypeSignature>> incompleteTopLevelTypeReferences, String defaultDataverse) {
-        String name = tre.getIdent().getValue();
-        TypeSignature typeSignature = new TypeSignature(defaultDataverse, name);
-        List<TypeSignature> refList = incompleteTopLevelTypeReferences.get(name);
-        if (refList == null) {
-            refList = new LinkedList<TypeSignature>();
-            incompleteTopLevelTypeReferences.put(new TypeSignature(defaultDataverse, tre.getIdent().getValue()),
-                    refList);
-        }
-        refList.add(typeSignature);
-    }
-
-    private static IAType solveTypeReference(TypeSignature typeSignature, Map<TypeSignature, IAType> typeMap) {
-        IAType builtin = builtinTypeMap.get(typeSignature.getName());
-        if (builtin != null) {
-            return builtin;
-        } else {
-            return typeMap.get(typeSignature);
-        }
-    }
-
-    private static ARecordType computeRecordType(TypeSignature typeSignature, RecordTypeDefinition rtd,
-            Map<TypeSignature, IAType> typeMap, Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes,
-            Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes, String defaultDataverse)
-            throws AsterixException {
-        List<String> names = rtd.getFieldNames();
-        int n = names.size();
-        String[] fldNames = new String[n];
-        IAType[] fldTypes = new IAType[n];
-        int i = 0;
-        for (String s : names) {
-            fldNames[i++] = s;
-        }
-        boolean isOpen = rtd.getRecordKind() == RecordKind.OPEN;
-        ARecordType recType;
-        try {
-            recType = new ARecordType(typeSignature == null ? null : typeSignature.getName(), fldNames, fldTypes,
-                    isOpen);
-        } catch (HyracksDataException e) {
-            throw new AsterixException(e);
-        }
-
-        List<IRecordFieldDataGen> fieldDataGen = rtd.getFieldDataGen();
-        if (fieldDataGen.size() == n) {
-            IRecordFieldDataGen[] rfdg = new IRecordFieldDataGen[n];
-            rfdg = fieldDataGen.toArray(rfdg);
-            recType.getAnnotations().add(new RecordDataGenAnnotation(rfdg, rtd.getUndeclaredFieldsDataGen()));
-        }
-
-        for (int j = 0; j < n; j++) {
-            TypeExpression texpr = rtd.getFieldTypes().get(j);
-            switch (texpr.getTypeKind()) {
-                case TYPEREFERENCE: {
-                    TypeReferenceExpression tre = (TypeReferenceExpression) texpr;
-                    TypeSignature signature = new TypeSignature(defaultDataverse, tre.getIdent().getValue());
-                    IAType tref = solveTypeReference(signature, typeMap);
-                    if (tref != null) {
-                        if (!rtd.getNullableFields().get(j)) { // not nullable
-                            fldTypes[j] = tref;
-                        } else { // nullable
-                            fldTypes[j] = AUnionType.createNullableType(tref);
-                        }
-                    } else {
-                        addIncompleteFieldTypeReference(recType, j, tre, incompleteFieldTypes);
-                        if (rtd.getNullableFields().get(j)) {
-                            fldTypes[j] = AUnionType.createNullableType(null);
-                        }
-                    }
-                    break;
-                }
-                case RECORD: {
-                    RecordTypeDefinition recTypeDef2 = (RecordTypeDefinition) texpr;
-                    IAType t2 = computeRecordType(null, recTypeDef2, typeMap, incompleteFieldTypes,
-                            incompleteItemTypes, defaultDataverse);
-                    if (!rtd.getNullableFields().get(j)) { // not nullable
-                        fldTypes[j] = t2;
-                    } else { // nullable
-                        fldTypes[j] = AUnionType.createNullableType(t2);
-                    }
-                    break;
-                }
-                case ORDEREDLIST: {
-                    OrderedListTypeDefinition oltd = (OrderedListTypeDefinition) texpr;
-                    IAType t2 = computeOrderedListType(null, oltd, typeMap, incompleteItemTypes, incompleteFieldTypes,
-                            defaultDataverse);
-                    fldTypes[j] = (rtd.getNullableFields().get(j)) ? AUnionType.createNullableType(t2) : t2;
-                    break;
-                }
-                case UNORDEREDLIST: {
-                    UnorderedListTypeDefinition ultd = (UnorderedListTypeDefinition) texpr;
-                    IAType t2 = computeUnorderedListType(null, ultd, typeMap, incompleteItemTypes,
-                            incompleteFieldTypes, defaultDataverse);
-                    fldTypes[j] = (rtd.getNullableFields().get(j)) ? AUnionType.createNullableType(t2) : t2;
-                    break;
-                }
-                default: {
-                    throw new IllegalStateException();
-                }
-            }
-
-        }
-
-        return recType;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/algebra/base/AsterixOperatorAnnotations.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/algebra/base/AsterixOperatorAnnotations.java b/asterix-algebra/src/main/java/org/apache/asterix/algebra/base/AsterixOperatorAnnotations.java
new file mode 100644
index 0000000..1ce5b5f
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/algebra/base/AsterixOperatorAnnotations.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.algebra.base;
+
+public interface AsterixOperatorAnnotations {
+    public static final String FIELD_ACCESS = "FIELD_ACCESS";
+    public static final String PUSHED_FIELD_ACCESS = "PUSHED_FIELD_ACCESS";
+    public static final String PUSHED_RUNNABLE_FIELD_ACCESS = "PUSHED_RUNNABLE_FIELD_ACCESS";
+    public static final String FIELD_TYPE = "FIELD_TYPE";
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/algebra/base/LogicalExpressionDeepCopyVisitor.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/algebra/base/LogicalExpressionDeepCopyVisitor.java b/asterix-algebra/src/main/java/org/apache/asterix/algebra/base/LogicalExpressionDeepCopyVisitor.java
new file mode 100644
index 0000000..a6ae0b1
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/algebra/base/LogicalExpressionDeepCopyVisitor.java
@@ -0,0 +1,149 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.algebra.base;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.Counter;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionAnnotation;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.StatefulFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionVisitor;
+
+public class LogicalExpressionDeepCopyVisitor implements ILogicalExpressionVisitor<ILogicalExpression, Void> {
+    private final Counter counter;
+    private final Map<LogicalVariable, LogicalVariable> inVarMapping;
+    private final Map<LogicalVariable, LogicalVariable> outVarMapping;
+
+    public LogicalExpressionDeepCopyVisitor(Counter counter, Map<LogicalVariable, LogicalVariable> inVarMapping,
+            Map<LogicalVariable, LogicalVariable> variableMapping) {
+        this.counter = counter;
+        this.inVarMapping = inVarMapping;
+        this.outVarMapping = variableMapping;
+    }
+
+    public ILogicalExpression deepCopy(ILogicalExpression expr) throws AlgebricksException {
+        return expr.accept(this, null);
+    }
+
+    private void deepCopyAnnotations(AbstractFunctionCallExpression src, AbstractFunctionCallExpression dest) {
+        Map<Object, IExpressionAnnotation> srcAnnotations = src.getAnnotations();
+        Map<Object, IExpressionAnnotation> destAnnotations = dest.getAnnotations();
+        for (Object k : srcAnnotations.keySet()) {
+            IExpressionAnnotation annotation = srcAnnotations.get(k).copy();
+            destAnnotations.put(k, annotation);
+        }
+    }
+
+    private void deepCopyOpaqueParameters(AbstractFunctionCallExpression src, AbstractFunctionCallExpression dest) {
+        Object[] srcOpaqueParameters = src.getOpaqueParameters();
+        Object[] newOpaqueParameters = null;
+        if (srcOpaqueParameters != null) {
+            newOpaqueParameters = new Object[srcOpaqueParameters.length];
+            for (int i = 0; i < srcOpaqueParameters.length; i++) {
+                newOpaqueParameters[i] = srcOpaqueParameters[i];
+            }
+        }
+        dest.setOpaqueParameters(newOpaqueParameters);
+    }
+
+    public MutableObject<ILogicalExpression> deepCopyExpressionReference(Mutable<ILogicalExpression> exprRef)
+            throws AlgebricksException {
+        return new MutableObject<ILogicalExpression>(deepCopy(exprRef.getValue()));
+    }
+
+    // TODO return List<...>
+    public ArrayList<Mutable<ILogicalExpression>> deepCopyExpressionReferenceList(List<Mutable<ILogicalExpression>> list)
+            throws AlgebricksException {
+        ArrayList<Mutable<ILogicalExpression>> listCopy = new ArrayList<Mutable<ILogicalExpression>>(list.size());
+        for (Mutable<ILogicalExpression> exprRef : list) {
+            listCopy.add(deepCopyExpressionReference(exprRef));
+        }
+        return listCopy;
+    }
+
+    @Override
+    public ILogicalExpression visitAggregateFunctionCallExpression(AggregateFunctionCallExpression expr, Void arg)
+            throws AlgebricksException {
+        AggregateFunctionCallExpression exprCopy = new AggregateFunctionCallExpression(expr.getFunctionInfo(),
+                expr.isTwoStep(), deepCopyExpressionReferenceList(expr.getArguments()));
+        deepCopyAnnotations(expr, exprCopy);
+        deepCopyOpaqueParameters(expr, exprCopy);
+        return exprCopy;
+    }
+
+    @Override
+    public ILogicalExpression visitConstantExpression(ConstantExpression expr, Void arg) throws AlgebricksException {
+        return new ConstantExpression(expr.getValue());
+    }
+
+    @Override
+    public ILogicalExpression visitScalarFunctionCallExpression(ScalarFunctionCallExpression expr, Void arg)
+            throws AlgebricksException {
+        ScalarFunctionCallExpression exprCopy = new ScalarFunctionCallExpression(expr.getFunctionInfo(),
+                deepCopyExpressionReferenceList(expr.getArguments()));
+        deepCopyAnnotations(expr, exprCopy);
+        deepCopyOpaqueParameters(expr, exprCopy);
+        return exprCopy;
+
+    }
+
+    @Override
+    public ILogicalExpression visitStatefulFunctionCallExpression(StatefulFunctionCallExpression expr, Void arg)
+            throws AlgebricksException {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public ILogicalExpression visitUnnestingFunctionCallExpression(UnnestingFunctionCallExpression expr, Void arg)
+            throws AlgebricksException {
+        UnnestingFunctionCallExpression exprCopy = new UnnestingFunctionCallExpression(expr.getFunctionInfo(),
+                deepCopyExpressionReferenceList(expr.getArguments()));
+        deepCopyAnnotations(expr, exprCopy);
+        deepCopyOpaqueParameters(expr, exprCopy);
+        return exprCopy;
+    }
+
+    @Override
+    public ILogicalExpression visitVariableReferenceExpression(VariableReferenceExpression expr, Void arg)
+            throws AlgebricksException {
+        LogicalVariable var = expr.getVariableReference();
+        LogicalVariable givenVarReplacement = inVarMapping.get(var);
+        if (givenVarReplacement != null) {
+            outVarMapping.put(var, givenVarReplacement);
+            return new VariableReferenceExpression(givenVarReplacement);
+        }
+        LogicalVariable varCopy = outVarMapping.get(var);
+        if (varCopy == null) {
+            counter.inc();
+            varCopy = new LogicalVariable(counter.get());
+            outVarMapping.put(var, varCopy);
+        }
+        return new VariableReferenceExpression(varCopy);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/algebra/base/LogicalOperatorDeepCopyVisitor.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/algebra/base/LogicalOperatorDeepCopyVisitor.java b/asterix-algebra/src/main/java/org/apache/asterix/algebra/base/LogicalOperatorDeepCopyVisitor.java
new file mode 100644
index 0000000..cedc962
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/algebra/base/LogicalOperatorDeepCopyVisitor.java
@@ -0,0 +1,469 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.algebra.base;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.Counter;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExternalDataLookupOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteResultOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.FunctionalDependency;
+import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
+
+public class LogicalOperatorDeepCopyVisitor implements ILogicalOperatorVisitor<ILogicalOperator, ILogicalOperator> {
+    private final Counter counter;
+    private final LogicalExpressionDeepCopyVisitor exprDeepCopyVisitor;
+
+    // Key: Variable in the original plan. Value: New variable replacing the original one in the copied plan.
+    private final Map<LogicalVariable, LogicalVariable> outVarMapping = new HashMap<LogicalVariable, LogicalVariable>();
+
+    // Key: Variable in the original plan. Value: Variable with which to replace original variable in the plan copy.
+    private final Map<LogicalVariable, LogicalVariable> inVarMapping;
+
+    public LogicalOperatorDeepCopyVisitor(Counter counter) {
+        this.counter = counter;
+        this.inVarMapping = Collections.emptyMap();
+        exprDeepCopyVisitor = new LogicalExpressionDeepCopyVisitor(counter, inVarMapping, outVarMapping);
+    }
+
+    /**
+     * @param counter
+     *            Starting variable counter.
+     * @param inVarMapping
+     *            Variable mapping keyed by variables in the original plan.
+     *            Those variables are replaced by their corresponding value in the map in the copied plan.
+     */
+    public LogicalOperatorDeepCopyVisitor(Counter counter, Map<LogicalVariable, LogicalVariable> inVarMapping) {
+        this.counter = counter;
+        this.inVarMapping = inVarMapping;
+        exprDeepCopyVisitor = new LogicalExpressionDeepCopyVisitor(counter, inVarMapping, outVarMapping);
+    }
+
+    private void copyAnnotations(ILogicalOperator src, ILogicalOperator dest) {
+        dest.getAnnotations().putAll(src.getAnnotations());
+    }
+
+    public ILogicalOperator deepCopy(ILogicalOperator op, ILogicalOperator arg) throws AlgebricksException {
+        return op.accept(this, arg);
+    }
+
+    private void deepCopyInputs(ILogicalOperator src, ILogicalOperator dest, ILogicalOperator arg)
+            throws AlgebricksException {
+        List<Mutable<ILogicalOperator>> inputs = src.getInputs();
+        List<Mutable<ILogicalOperator>> inputsCopy = dest.getInputs();
+        for (Mutable<ILogicalOperator> input : inputs) {
+            inputsCopy.add(deepCopyOperatorReference(input, arg));
+        }
+    }
+
+    private Mutable<ILogicalOperator> deepCopyOperatorReference(Mutable<ILogicalOperator> opRef, ILogicalOperator arg)
+            throws AlgebricksException {
+        return new MutableObject<ILogicalOperator>(deepCopy(opRef.getValue(), arg));
+    }
+
+    private List<Mutable<ILogicalOperator>> deepCopyOperatorReferenceList(List<Mutable<ILogicalOperator>> list,
+            ILogicalOperator arg) throws AlgebricksException {
+        List<Mutable<ILogicalOperator>> listCopy = new ArrayList<Mutable<ILogicalOperator>>(list.size());
+        for (Mutable<ILogicalOperator> opRef : list) {
+            listCopy.add(deepCopyOperatorReference(opRef, arg));
+        }
+        return listCopy;
+    }
+
+    private IOrder deepCopyOrder(IOrder order) {
+        switch (order.getKind()) {
+            case ASC:
+            case DESC:
+                return order;
+            case FUNCTIONCALL:
+            default:
+                throw new UnsupportedOperationException();
+        }
+    }
+
+    private List<Pair<IOrder, Mutable<ILogicalExpression>>> deepCopyOrderExpressionReferencePairList(
+            List<Pair<IOrder, Mutable<ILogicalExpression>>> list) throws AlgebricksException {
+        ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>> listCopy = new ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>>(
+                list.size());
+        for (Pair<IOrder, Mutable<ILogicalExpression>> pair : list) {
+            listCopy.add(new Pair<OrderOperator.IOrder, Mutable<ILogicalExpression>>(deepCopyOrder(pair.first),
+                    exprDeepCopyVisitor.deepCopyExpressionReference(pair.second)));
+        }
+        return listCopy;
+    }
+
+    private ILogicalPlan deepCopyPlan(ILogicalPlan plan, ILogicalOperator arg) throws AlgebricksException {
+        List<Mutable<ILogicalOperator>> rootsCopy = deepCopyOperatorReferenceList(plan.getRoots(), arg);
+        ILogicalPlan planCopy = new ALogicalPlanImpl(rootsCopy);
+        return planCopy;
+    }
+
+    private List<ILogicalPlan> deepCopyPlanList(List<ILogicalPlan> list, List<ILogicalPlan> listCopy,
+            ILogicalOperator arg) throws AlgebricksException {
+        for (ILogicalPlan plan : list) {
+            listCopy.add(deepCopyPlan(plan, arg));
+        }
+        return listCopy;
+    }
+
+    private LogicalVariable deepCopyVariable(LogicalVariable var) {
+        if (var == null) {
+            return null;
+        }
+        LogicalVariable givenVarReplacement = inVarMapping.get(var);
+        if (givenVarReplacement != null) {
+            outVarMapping.put(var, givenVarReplacement);
+            return givenVarReplacement;
+        }
+        LogicalVariable varCopy = outVarMapping.get(var);
+        if (varCopy == null) {
+            counter.inc();
+            varCopy = new LogicalVariable(counter.get());
+            outVarMapping.put(var, varCopy);
+        }
+        return varCopy;
+    }
+
+    private List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> deepCopyVariableExpressionReferencePairList(
+            List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> list) throws AlgebricksException {
+        List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> listCopy = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>(
+                list.size());
+        for (Pair<LogicalVariable, Mutable<ILogicalExpression>> pair : list) {
+            listCopy.add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(deepCopyVariable(pair.first),
+                    exprDeepCopyVisitor.deepCopyExpressionReference(pair.second)));
+        }
+        return listCopy;
+    }
+
+    // TODO return List<...>
+    private ArrayList<LogicalVariable> deepCopyVariableList(List<LogicalVariable> list) {
+        ArrayList<LogicalVariable> listCopy = new ArrayList<LogicalVariable>(list.size());
+        for (LogicalVariable var : list) {
+            listCopy.add(deepCopyVariable(var));
+        }
+        return listCopy;
+    }
+
+    public void reset() {
+        outVarMapping.clear();
+    }
+
+    public void updatePrimaryKeys(IOptimizationContext context) {
+        for (Map.Entry<LogicalVariable, LogicalVariable> entry : outVarMapping.entrySet()) {
+            List<LogicalVariable> primaryKey = context.findPrimaryKey(entry.getKey());
+            if (primaryKey != null) {
+                List<LogicalVariable> head = new ArrayList<LogicalVariable>();
+                for (LogicalVariable variable : primaryKey) {
+                    head.add(outVarMapping.get(variable));
+                }
+                List<LogicalVariable> tail = new ArrayList<LogicalVariable>(1);
+                tail.add(entry.getValue());
+                context.addPrimaryKey(new FunctionalDependency(head, tail));
+            }
+        }
+    }
+
+    public LogicalVariable varCopy(LogicalVariable var) throws AlgebricksException {
+        return outVarMapping.get(var);
+    }
+
+    @Override
+    public ILogicalOperator visitAggregateOperator(AggregateOperator op, ILogicalOperator arg)
+            throws AlgebricksException {
+        AggregateOperator opCopy = new AggregateOperator(deepCopyVariableList(op.getVariables()),
+                exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getExpressions()));
+        deepCopyInputs(op, opCopy, arg);
+        copyAnnotations(op, opCopy);
+        opCopy.setExecutionMode(op.getExecutionMode());
+        return opCopy;
+    }
+
+    @Override
+    public ILogicalOperator visitAssignOperator(AssignOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AssignOperator opCopy = new AssignOperator(deepCopyVariableList(op.getVariables()),
+                exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getExpressions()));
+        deepCopyInputs(op, opCopy, arg);
+        copyAnnotations(op, opCopy);
+        opCopy.setExecutionMode(op.getExecutionMode());
+        return opCopy;
+    }
+
+    @Override
+    public ILogicalOperator visitDataScanOperator(DataSourceScanOperator op, ILogicalOperator arg)
+            throws AlgebricksException {
+        DataSourceScanOperator opCopy = new DataSourceScanOperator(deepCopyVariableList(op.getVariables()),
+                op.getDataSource());
+        deepCopyInputs(op, opCopy, arg);
+        copyAnnotations(op, opCopy);
+        opCopy.setExecutionMode(op.getExecutionMode());
+        return opCopy;
+    }
+
+    @Override
+    public ILogicalOperator visitDistinctOperator(DistinctOperator op, ILogicalOperator arg) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public ILogicalOperator visitEmptyTupleSourceOperator(EmptyTupleSourceOperator op, ILogicalOperator arg) {
+        EmptyTupleSourceOperator opCopy = new EmptyTupleSourceOperator();
+        opCopy.setExecutionMode(op.getExecutionMode());
+        return opCopy;
+    }
+
+    @Override
+    public ILogicalOperator visitExchangeOperator(ExchangeOperator op, ILogicalOperator arg) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public ILogicalOperator visitGroupByOperator(GroupByOperator op, ILogicalOperator arg) throws AlgebricksException {
+        List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> groupByListCopy = deepCopyVariableExpressionReferencePairList(op
+                .getGroupByList());
+        List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorListCopy = deepCopyVariableExpressionReferencePairList(op
+                .getDecorList());
+        List<ILogicalPlan> nestedPlansCopy = new ArrayList<ILogicalPlan>();
+
+        GroupByOperator opCopy = new GroupByOperator(groupByListCopy, decorListCopy, nestedPlansCopy);
+        deepCopyPlanList(op.getNestedPlans(), nestedPlansCopy, opCopy);
+        deepCopyInputs(op, opCopy, arg);
+        copyAnnotations(op, opCopy);
+        opCopy.setExecutionMode(op.getExecutionMode());
+        return opCopy;
+    }
+
+    @Override
+    public ILogicalOperator visitInnerJoinOperator(InnerJoinOperator op, ILogicalOperator arg)
+            throws AlgebricksException {
+        InnerJoinOperator opCopy = new InnerJoinOperator(exprDeepCopyVisitor.deepCopyExpressionReference(op
+                .getCondition()), deepCopyOperatorReference(op.getInputs().get(0), null), deepCopyOperatorReference(op
+                .getInputs().get(1), null));
+        copyAnnotations(op, opCopy);
+        opCopy.setExecutionMode(op.getExecutionMode());
+        return opCopy;
+    }
+
+    @Override
+    public ILogicalOperator visitLeftOuterJoinOperator(LeftOuterJoinOperator op, ILogicalOperator arg) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public ILogicalOperator visitLimitOperator(LimitOperator op, ILogicalOperator arg) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public ILogicalOperator visitNestedTupleSourceOperator(NestedTupleSourceOperator op, ILogicalOperator arg)
+            throws AlgebricksException {
+        NestedTupleSourceOperator opCopy = new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(arg));
+        deepCopyInputs(op, opCopy, arg);
+        copyAnnotations(op, opCopy);
+        opCopy.setExecutionMode(op.getExecutionMode());
+        return opCopy;
+    }
+
+    @Override
+    public ILogicalOperator visitOrderOperator(OrderOperator op, ILogicalOperator arg) throws AlgebricksException {
+        OrderOperator opCopy = new OrderOperator(deepCopyOrderExpressionReferencePairList(op.getOrderExpressions()));
+        deepCopyInputs(op, opCopy, arg);
+        copyAnnotations(op, opCopy);
+        opCopy.setExecutionMode(op.getExecutionMode());
+        return opCopy;
+    }
+
+    @Override
+    public ILogicalOperator visitPartitioningSplitOperator(PartitioningSplitOperator op, ILogicalOperator arg) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public ILogicalOperator visitProjectOperator(ProjectOperator op, ILogicalOperator arg) throws AlgebricksException {
+        ProjectOperator opCopy = new ProjectOperator(deepCopyVariableList(op.getVariables()));
+        deepCopyInputs(op, opCopy, arg);
+        copyAnnotations(op, opCopy);
+        opCopy.setExecutionMode(op.getExecutionMode());
+        return opCopy;
+    }
+
+    @Override
+    public ILogicalOperator visitReplicateOperator(ReplicateOperator op, ILogicalOperator arg)
+            throws AlgebricksException {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public ILogicalOperator visitMaterializeOperator(MaterializeOperator op, ILogicalOperator arg)
+            throws AlgebricksException {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public ILogicalOperator visitRunningAggregateOperator(RunningAggregateOperator op, ILogicalOperator arg) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public ILogicalOperator visitScriptOperator(ScriptOperator op, ILogicalOperator arg) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public ILogicalOperator visitSelectOperator(SelectOperator op, ILogicalOperator arg) throws AlgebricksException {
+        SelectOperator opCopy = new SelectOperator(exprDeepCopyVisitor.deepCopyExpressionReference(op.getCondition()),
+                op.getRetainNull(), deepCopyVariable(op.getNullPlaceholderVariable()));
+        deepCopyInputs(op, opCopy, arg);
+        copyAnnotations(op, opCopy);
+        opCopy.setExecutionMode(op.getExecutionMode());
+        return opCopy;
+    }
+
+    @Override
+    public ILogicalOperator visitSubplanOperator(SubplanOperator op, ILogicalOperator arg) throws AlgebricksException {
+        List<ILogicalPlan> nestedPlansCopy = new ArrayList<ILogicalPlan>();
+
+        SubplanOperator opCopy = new SubplanOperator(nestedPlansCopy);
+        deepCopyPlanList(op.getNestedPlans(), nestedPlansCopy, opCopy);
+        deepCopyInputs(op, opCopy, arg);
+        copyAnnotations(op, opCopy);
+        opCopy.setExecutionMode(op.getExecutionMode());
+        return opCopy;
+    }
+
+    @Override
+    public ILogicalOperator visitUnionOperator(UnionAllOperator op, ILogicalOperator arg) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public ILogicalOperator visitUnnestMapOperator(UnnestMapOperator op, ILogicalOperator arg)
+            throws AlgebricksException {
+        UnnestMapOperator opCopy = new UnnestMapOperator(deepCopyVariableList(op.getVariables()),
+                exprDeepCopyVisitor.deepCopyExpressionReference(op.getExpressionRef()), op.getVariableTypes(),
+                op.propagatesInput());
+        deepCopyInputs(op, opCopy, arg);
+        copyAnnotations(op, opCopy);
+        opCopy.setExecutionMode(op.getExecutionMode());
+        return opCopy;
+    }
+
+    @Override
+    public ILogicalOperator visitUnnestOperator(UnnestOperator op, ILogicalOperator arg) throws AlgebricksException {
+        UnnestOperator opCopy = new UnnestOperator(deepCopyVariable(op.getVariable()),
+                exprDeepCopyVisitor.deepCopyExpressionReference(op.getExpressionRef()),
+                deepCopyVariable(op.getPositionalVariable()), op.getPositionalVariableType(), op.getPositionWriter());
+        deepCopyInputs(op, opCopy, arg);
+        copyAnnotations(op, opCopy);
+        opCopy.setExecutionMode(op.getExecutionMode());
+        return opCopy;
+    }
+
+    @Override
+    public ILogicalOperator visitWriteOperator(WriteOperator op, ILogicalOperator arg) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public ILogicalOperator visitDistributeResultOperator(DistributeResultOperator op, ILogicalOperator arg) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public ILogicalOperator visitWriteResultOperator(WriteResultOperator op, ILogicalOperator arg) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public ILogicalOperator visitInsertDeleteOperator(InsertDeleteOperator op, ILogicalOperator arg) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public ILogicalOperator visitIndexInsertDeleteOperator(IndexInsertDeleteOperator op, ILogicalOperator arg) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public ILogicalOperator visitTokenizeOperator(TokenizeOperator op, ILogicalOperator arg) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public ILogicalOperator visitSinkOperator(SinkOperator op, ILogicalOperator arg) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public ILogicalOperator visitExtensionOperator(ExtensionOperator op, ILogicalOperator arg)
+            throws AlgebricksException {
+        throw new UnsupportedOperationException();
+    }
+
+    public Map<LogicalVariable, LogicalVariable> getVariableMapping() {
+        return outVarMapping;
+    }
+
+    @Override
+    public ILogicalOperator visitExternalDataLookupOperator(ExternalDataLookupOperator op, ILogicalOperator arg)
+            throws AlgebricksException {
+        throw new UnsupportedOperationException();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/CommitOperator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/CommitOperator.java b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/CommitOperator.java
new file mode 100644
index 0000000..d65c67d
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/CommitOperator.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.algebra.operators;
+
+import java.util.Collection;
+import java.util.List;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractExtensibleLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorExtension;
+import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
+
+public class CommitOperator extends AbstractExtensibleLogicalOperator {
+
+    private final List<LogicalVariable> primaryKeyLogicalVars;
+
+    public CommitOperator(List<LogicalVariable> primaryKeyLogicalVars) {
+        this.primaryKeyLogicalVars = primaryKeyLogicalVars;
+    }
+
+    @Override
+    public boolean isMap() {
+        // TODO Auto-generated method stub
+        return false;
+    }
+
+    @Override
+    public IOperatorExtension newInstance() {
+        return new CommitOperator(primaryKeyLogicalVars);
+    }
+
+    @Override
+    public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform transform) throws AlgebricksException {
+        // TODO Auto-generated method stub
+        return false;
+    }
+
+    @Override
+    public String toString() {
+        return "commit";
+    }
+
+    @Override
+    public void getUsedVariables(Collection<LogicalVariable> usedVars) {
+        usedVars.addAll(primaryKeyLogicalVars);
+    }
+
+    @Override
+    public void getProducedVariables(Collection<LogicalVariable> producedVars) {
+        // No produced variables.
+    }
+}


[20/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixClientConfig.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixClientConfig.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixClientConfig.java
deleted file mode 100644
index 9c3223e..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixClientConfig.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.common;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.kohsuke.args4j.Argument;
-import org.kohsuke.args4j.Option;
-
-public class AsterixClientConfig {
-    @Option(name = "-optimize", usage = "Turns compiler optimizations on (if set to true) or off (if set to false). It is true by default.")
-    public String optimize = "true";
-
-    @Option(name = "-only-physical", usage = "Prints only the physical annotations, not the entire operators. It is false by default.")
-    public String onlyPhysical = "false";
-
-    @Option(name = "-execute", usage = "Executes the job produced by the compiler. It is false by default.")
-    public String execute = "false";
-
-    @Option(name = "-hyracks-job", usage = "Generates and prints the Hyracks job. It is false by default.")
-    public String hyracksJob = "false";
-
-    @Option(name = "-hyracks-port", usage = "The port used to connect to the Hyracks server.")
-    public int hyracksPort = AsterixHyracksIntegrationUtil.DEFAULT_HYRACKS_CC_CLIENT_PORT;
-
-    @Argument
-    private List<String> arguments = new ArrayList<String>();
-
-    public List<String> getArguments() {
-        return arguments;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java
deleted file mode 100644
index 91aa897..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixHyracksIntegrationUtil.java
+++ /dev/null
@@ -1,155 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.common;
-
-import java.io.File;
-import java.util.EnumSet;
-
-import edu.uci.ics.asterix.common.config.GlobalConfig;
-import edu.uci.ics.asterix.hyracks.bootstrap.CCApplicationEntryPoint;
-import edu.uci.ics.asterix.hyracks.bootstrap.NCApplicationEntryPoint;
-import edu.uci.ics.hyracks.api.client.HyracksConnection;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.job.JobFlag;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
-import edu.uci.ics.hyracks.control.common.controllers.CCConfig;
-import edu.uci.ics.hyracks.control.common.controllers.NCConfig;
-import edu.uci.ics.hyracks.control.nc.NodeControllerService;
-
-public class AsterixHyracksIntegrationUtil {
-
-    public static final int NODES = 2;
-    public static final int PARTITONS = 2;
-
-    public static final int DEFAULT_HYRACKS_CC_CLIENT_PORT = 1098;
-
-    public static final int DEFAULT_HYRACKS_CC_CLUSTER_PORT = 1099;
-
-    private static ClusterControllerService cc;
-    private static NodeControllerService[] ncs = new NodeControllerService[NODES];
-    private static IHyracksClientConnection hcc;
-
-    public static void init() throws Exception {
-        CCConfig ccConfig = new CCConfig();
-        ccConfig.clusterNetIpAddress = "127.0.0.1";
-        ccConfig.clientNetIpAddress = "127.0.0.1";
-        ccConfig.clientNetPort = DEFAULT_HYRACKS_CC_CLIENT_PORT;
-        ccConfig.clusterNetPort = DEFAULT_HYRACKS_CC_CLUSTER_PORT;
-        ccConfig.defaultMaxJobAttempts = 0;
-        ccConfig.resultTTL = 30000;
-        ccConfig.resultSweepThreshold = 1000;
-        ccConfig.appCCMainClass = CCApplicationEntryPoint.class.getName();
-        // ccConfig.useJOL = true;
-        cc = new ClusterControllerService(ccConfig);
-        cc.start();
-
-        int n = 0;
-        for (String ncName : getNcNames()) {
-            NCConfig ncConfig1 = new NCConfig();
-            ncConfig1.ccHost = "localhost";
-            ncConfig1.ccPort = DEFAULT_HYRACKS_CC_CLUSTER_PORT;
-            ncConfig1.clusterNetIPAddress = "127.0.0.1";
-            ncConfig1.dataIPAddress = "127.0.0.1";
-            ncConfig1.resultIPAddress = "127.0.0.1";
-            ncConfig1.nodeId = ncName;
-            ncConfig1.resultTTL = 30000;
-            ncConfig1.resultSweepThreshold = 1000;
-            for (int p = 0; p < PARTITONS; ++p) {
-                if (p == 0) {
-                    ncConfig1.ioDevices = System.getProperty("java.io.tmpdir") + File.separator + ncConfig1.nodeId
-                            + "/iodevice" + p;
-                } else {
-                    ncConfig1.ioDevices += "," + System.getProperty("java.io.tmpdir") + File.separator
-                            + ncConfig1.nodeId + "/iodevice" + p;
-                }
-            }
-            ncConfig1.appNCMainClass = NCApplicationEntryPoint.class.getName();
-            ncs[n] = new NodeControllerService(ncConfig1);
-            ncs[n].start();
-            ++n;
-        }
-
-        hcc = new HyracksConnection(cc.getConfig().clientNetIpAddress, cc.getConfig().clientNetPort);
-    }
-
-    public static String[] getNcNames() {
-        String[] names = new String[NODES];
-        for (int n = 0; n < NODES; ++n) {
-            names[n] = "nc" + (n + 1);
-        }
-        return names;
-    }
-
-    public static String[] getDataDirs() {
-        String[] names = new String[NODES];
-        for (int n = 0; n < NODES; ++n) {
-            names[n] = "nc" + (n + 1) + "data";
-        }
-        return names;
-    }
-
-    public static IHyracksClientConnection getHyracksClientConnection() {
-        return hcc;
-    }
-
-    public static void deinit() throws Exception {
-        for (int n = 0; n < ncs.length; ++n) {
-            if (ncs[n] != null)
-                ncs[n].stop();
-
-        }
-        if (cc != null)
-            cc.stop();
-    }
-
-    public static void runJob(JobSpecification spec) throws Exception {
-        GlobalConfig.ASTERIX_LOGGER.info(spec.toJSON().toString());
-        JobId jobId = hcc.startJob(spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
-        GlobalConfig.ASTERIX_LOGGER.info(jobId.toString());
-        hcc.waitForCompletion(jobId);
-    }
-
-    /**
-     * main method to run a simple 2 node cluster in-process
-     * suggested VM arguments: <code>-enableassertions -Xmx2048m -Dfile.encoding=UTF-8</code>
-     *
-     * @param args
-     *            unused
-     */
-    public static void main(String[] args) {
-        Runtime.getRuntime().addShutdownHook(new Thread() {
-            public void run() {
-                try {
-                    deinit();
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
-            }
-        });
-        try {
-            System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, "asterix-build-configuration.xml");
-
-            init();
-            while (true) {
-                Thread.sleep(10000);
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/FeedWorkCollection.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/FeedWorkCollection.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/FeedWorkCollection.java
deleted file mode 100644
index 736a270..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/FeedWorkCollection.java
+++ /dev/null
@@ -1,197 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.common;
-
-import java.io.PrintWriter;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.DataverseDecl;
-import edu.uci.ics.asterix.aql.expression.Identifier;
-import edu.uci.ics.asterix.aql.expression.SubscribeFeedStatement;
-import edu.uci.ics.asterix.aql.translator.AqlTranslator;
-import edu.uci.ics.asterix.common.feeds.FeedConnectionRequest;
-import edu.uci.ics.asterix.common.feeds.FeedConnectionRequest.ConnectionStatus;
-import edu.uci.ics.asterix.common.feeds.api.IFeedWork;
-import edu.uci.ics.asterix.common.feeds.api.IFeedWorkEventListener;
-import edu.uci.ics.asterix.feeds.FeedCollectInfo;
-import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
-import edu.uci.ics.hyracks.api.job.JobId;
-
-/**
- * A collection of feed management related task, each represented as an implementation of {@code IFeedWork}.
- */
-public class FeedWorkCollection {
-
-    private static Logger LOGGER = Logger.getLogger(FeedWorkCollection.class.getName());
-
-    /**
-     * The task of subscribing to a feed to obtain data.
-     */
-    public static class SubscribeFeedWork implements IFeedWork {
-
-        private final Runnable runnable;
-
-        private final FeedConnectionRequest request;
-
-        @Override
-        public Runnable getRunnable() {
-            return runnable;
-        }
-
-        public SubscribeFeedWork(String[] locations, FeedConnectionRequest request) {
-            this.runnable = new SubscribeFeedWorkRunnable(locations, request);
-            this.request = request;
-        }
-
-        private static class SubscribeFeedWorkRunnable implements Runnable {
-
-            private final FeedConnectionRequest request;
-            private final String[] locations;
-
-            public SubscribeFeedWorkRunnable(String[] locations, FeedConnectionRequest request) {
-                this.request = request;
-                this.locations = locations;
-            }
-
-            @Override
-            public void run() {
-                try {
-                    PrintWriter writer = new PrintWriter(System.out, true);
-                    SessionConfig pc = new SessionConfig(writer, OutputFormat.ADM);
-                    DataverseDecl dataverseDecl = new DataverseDecl(new Identifier(request.getReceivingFeedId()
-                            .getDataverse()));
-                    SubscribeFeedStatement subscribeStmt = new SubscribeFeedStatement(locations, request);
-                    List<Statement> statements = new ArrayList<Statement>();
-                    statements.add(dataverseDecl);
-                    statements.add(subscribeStmt);
-                    AqlTranslator translator = new AqlTranslator(statements, pc);
-                    translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null, AqlTranslator.ResultDelivery.SYNC);
-                    if (LOGGER.isLoggable(Level.INFO)) {
-                        LOGGER.info("Submitted connection requests for execution: " + request);
-                    }
-                } catch (Exception e) {
-                    if (LOGGER.isLoggable(Level.SEVERE)) {
-                        LOGGER.severe("Exception in executing " + request);
-                    }
-                    throw new RuntimeException(e);
-                }
-            }
-        }
-
-        public static class FeedSubscribeWorkEventListener implements IFeedWorkEventListener {
-
-            @Override
-            public void workFailed(IFeedWork work, Exception e) {
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning(" Feed subscription request " + ((SubscribeFeedWork) work).request
-                            + " failed with exception " + e);
-                }
-            }
-
-            @Override
-            public void workCompleted(IFeedWork work) {
-                ((SubscribeFeedWork) work).request.setSubscriptionStatus(ConnectionStatus.ACTIVE);
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.warning(" Feed subscription request " + ((SubscribeFeedWork) work).request + " completed ");
-                }
-            }
-
-        }
-
-        public FeedConnectionRequest getRequest() {
-            return request;
-        }
-
-        @Override
-        public String toString() {
-            return "SubscribeFeedWork for [" + request + "]";
-        }
-
-    }
-
-    /**
-     * The task of activating a set of feeds.
-     */
-    public static class ActivateFeedWork implements IFeedWork {
-
-        private final Runnable runnable;
-
-        @Override
-        public Runnable getRunnable() {
-            return runnable;
-        }
-
-        public ActivateFeedWork(List<FeedCollectInfo> feedsToRevive) {
-            this.runnable = new FeedsActivateRunnable(feedsToRevive);
-        }
-
-        public ActivateFeedWork() {
-            this.runnable = new FeedsActivateRunnable();
-        }
-
-        private static class FeedsActivateRunnable implements Runnable {
-
-            private List<FeedCollectInfo> feedsToRevive;
-            private Mode mode;
-
-            public enum Mode {
-                REVIVAL_POST_NODE_REJOIN
-            }
-
-            public FeedsActivateRunnable(List<FeedCollectInfo> feedsToRevive) {
-                this.feedsToRevive = feedsToRevive;
-            }
-
-            public FeedsActivateRunnable() {
-            }
-
-            @Override
-            public void run() {
-                switch (mode) {
-                    case REVIVAL_POST_NODE_REJOIN:
-                        try {
-                            Thread.sleep(10000);
-                        } catch (InterruptedException e1) {
-                            if (LOGGER.isLoggable(Level.INFO)) {
-                                LOGGER.info("Attempt to resume feed interrupted");
-                            }
-                            throw new IllegalStateException(e1.getMessage());
-                        }
-                        for (FeedCollectInfo finfo : feedsToRevive) {
-                            try {
-                                JobId jobId = AsterixAppContextInfo.getInstance().getHcc().startJob(finfo.jobSpec);
-                                if (LOGGER.isLoggable(Level.INFO)) {
-                                    LOGGER.info("Resumed feed :" + finfo.feedConnectionId + " job id " + jobId);
-                                    LOGGER.info("Job:" + finfo.jobSpec);
-                                }
-                            } catch (Exception e) {
-                                if (LOGGER.isLoggable(Level.WARNING)) {
-                                    LOGGER.warning("Unable to resume feed " + finfo.feedConnectionId + " "
-                                            + e.getMessage());
-                                }
-                            }
-                        }
-                }
-            }
-
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/Job.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/Job.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/Job.java
deleted file mode 100644
index f45a6c0..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/Job.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.common;
-
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-
-public class Job {
-
-    public enum SubmissionMode {
-        SYNCHRONOUS,
-        ASYNCHRONOUS
-    }
-
-    private final JobSpecification jobSpec;
-    private final SubmissionMode submissionMode;
-
-    public Job(JobSpecification jobSpecification, SubmissionMode submissionMode) {
-        this.jobSpec = jobSpecification;
-        this.submissionMode = submissionMode;
-    }
-
-    public Job(JobSpecification jobSpec) {
-        this.jobSpec = jobSpec;
-        this.submissionMode = SubmissionMode.SYNCHRONOUS;
-    }
-
-    public JobSpecification getJobSpec() {
-        return jobSpec;
-    }
-
-    public SubmissionMode getSubmissionMode() {
-        return submissionMode;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/SessionConfig.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/SessionConfig.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/SessionConfig.java
deleted file mode 100644
index 27d981e..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/SessionConfig.java
+++ /dev/null
@@ -1,201 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.common;
-
-import java.io.PrintWriter;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * SessionConfig captures several different parameters for controlling
- * the execution of an APIFramework call.
- * <li> It specifies how the execution will proceed (for instance,
- * whether to optimize, or whether to execute at all).
- * <li> It allows you specify where the primary execution output will
- * be sent.
- * <li> It also allows you to request additional output for optional
- * out-of-band data about the execution (query plan, etc).
- * <li> It allows you to specify the output format for the primary
- * execution output - JSON, CSV, etc.
- * <li> It allows you to specify output format-specific parameters.
- */
-
-public class SessionConfig {
-    /**
-     * Used to specify the output format for the primary execution.
-     */
-    public enum OutputFormat {
-        ADM,
-        JSON,
-        CSV
-    };
-
-    /**
-     * Produce out-of-band output for Hyracks Job.
-     */
-    public static final String OOB_HYRACKS_JOB = "oob-hyracks-job";
-
-    /**
-     * Produce out-of-band output for Expression Tree.
-     */
-    public static final String OOB_EXPR_TREE = "oob-expr-tree";
-
-    /**
-     * Produce out-of-band output for Rewritten Expression Tree.
-     */
-    public static final String OOB_REWRITTEN_EXPR_TREE = "oob-rewritten-expr-tree";
-
-    /**
-     * Produce out-of-band output for Logical Plan.
-     */
-    public static final String OOB_LOGICAL_PLAN = "oob-logical-plan";
-
-    /**
-     * Produce out-of-band output for Optimized Logical Plan.
-     */
-    public static final String OOB_OPTIMIZED_LOGICAL_PLAN = "oob-optimized-logical-plan";
-
-    /**
-     * Format flag: print only physical ops (for optimizer tests).
-     */
-    public static final String FORMAT_ONLY_PHYSICAL_OPS = "format-only-physical-ops";
-
-    /**
-     * Format flag: wrap out-of-band data in HTML.
-     */
-    public static final String FORMAT_HTML = "format-html";
-
-    /**
-     * Format flag: print CSV header line.
-     */
-    public static final String FORMAT_CSV_HEADER = "format-csv-header";
-
-    // Standard execution flags.
-    private final boolean executeQuery;
-    private final boolean generateJobSpec;
-    private final boolean optimize;
-
-    // Output path for primary execution.
-    private final PrintWriter out;
-
-    // Output format.
-    private final OutputFormat fmt;
-
-    // Flags.
-    private final Map<String,Boolean> flags;
-
-    /**
-     * Create a SessionConfig object with all default values:
-     *
-     * - All format flags set to "false".
-     * - All out-of-band outputs set to "null".
-     * - "Optimize" set to "true".
-     * - "Execute Query" set to "true".
-     * - "Generate Job Spec" set to "true".
-     * @param out PrintWriter for execution output.
-     * @param fmt Output format for execution output.
-     */
-    public SessionConfig(PrintWriter out, OutputFormat fmt) {
-        this(out, fmt, true, true, true);
-    }
-
-    /**
-     * Create a SessionConfig object with all optional values set to defaults:
-     *
-     * - All format flags set to "false".
-     * - All out-of-band outputs set to "false".
-     * @param out PrintWriter for execution output.
-     * @param fmt Output format for execution output.
-     * @param optimize Whether to optimize the execution.
-     * @param executeQuery Whether to execute the query or not.
-     * @param generateJobSpec Whether to generate the Hyracks job specification (if
-     *    false, job cannot be executed).
-     */
-    public SessionConfig(PrintWriter out, OutputFormat fmt, boolean optimize, boolean executeQuery, boolean generateJobSpec) {
-        this.out = out;
-        this.fmt = fmt;
-        this.optimize = optimize;
-        this.executeQuery = executeQuery;
-        this.generateJobSpec = generateJobSpec;
-        this.flags = new HashMap<String,Boolean>();
-    }
-
-    /**
-     * Retrieve the PrintWriter to produce output to.
-     */
-    public PrintWriter out() {
-        return this.out;
-    }
-
-    /**
-     * Retrieve the OutputFormat for this execution.
-     */
-    public OutputFormat fmt() {
-        return this.fmt;
-    }
-
-    /**
-     * Retrieve the value of the "execute query" flag.
-     */
-    public boolean isExecuteQuery() {
-        return executeQuery;
-    }
-
-    /**
-     * Retrieve the value of the "optimize" flag.
-     */
-    public boolean isOptimize() {
-        return optimize;
-    }
-
-    /**
-     * Retrieve the value of the "generate job spec" flag.
-     */
-    public boolean isGenerateJobSpec() {
-        return generateJobSpec;
-    }
-
-    /**
-     * Specify all out-of-band settings at once. For convenience of older code.
-     */
-    public void setOOBData(boolean expr_tree, boolean rewritten_expr_tree,
-                           boolean logical_plan, boolean optimized_logical_plan,
-                           boolean hyracks_job) {
-        this.set(OOB_EXPR_TREE, expr_tree);
-        this.set(OOB_REWRITTEN_EXPR_TREE, rewritten_expr_tree);
-        this.set(OOB_LOGICAL_PLAN, logical_plan);
-        this.set(OOB_OPTIMIZED_LOGICAL_PLAN, optimized_logical_plan);
-        this.set(OOB_HYRACKS_JOB, hyracks_job);
-    }
-
-    /**
-     * Specify a flag.
-     * @param flag One of the OOB_ or FORMAT_ constants from this class.
-     * @param value Value for the flag (all flags default to "false").
-     */
-    public void set(String flag, boolean value) {
-        flags.put(flag, Boolean.valueOf(value));
-    }
-
-    /**
-     * Retrieve the setting of a format-specific flag.
-     * @param flag One of the FORMAT_ constants from this class.
-     * @returns true or false (all flags default to "false").
-     */
-    public boolean is(String flag) {
-        Boolean value = flags.get(flag);
-        return value == null ? false : value.booleanValue();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/APIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/APIServlet.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/APIServlet.java
deleted file mode 100644
index 4984741..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/APIServlet.java
+++ /dev/null
@@ -1,173 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.http.servlet;
-
-import java.awt.image.BufferedImage;
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.io.PrintWriter;
-import java.util.List;
-import java.util.logging.Level;
-
-import javax.imageio.ImageIO;
-import javax.servlet.ServletContext;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import edu.uci.ics.asterix.api.common.SessionConfig;
-import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.parser.AQLParser;
-import edu.uci.ics.asterix.aql.parser.ParseException;
-import edu.uci.ics.asterix.aql.parser.TokenMgrError;
-import edu.uci.ics.asterix.aql.translator.AqlTranslator;
-import edu.uci.ics.asterix.common.config.GlobalConfig;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.result.ResultReader;
-import edu.uci.ics.asterix.result.ResultUtils;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
-import edu.uci.ics.hyracks.client.dataset.HyracksDataset;
-
-public class APIServlet extends HttpServlet {
-    private static final long serialVersionUID = 1L;
-
-    private static final String HYRACKS_CONNECTION_ATTR = "edu.uci.ics.asterix.HYRACKS_CONNECTION";
-
-    private static final String HYRACKS_DATASET_ATTR = "edu.uci.ics.asterix.HYRACKS_DATASET";
-
-    @Override
-    public void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException {
-        OutputFormat format;
-        boolean csv_and_header = false;
-        String output = request.getParameter("output-format");
-        if (output.equals("ADM")) {
-            format = OutputFormat.ADM;
-        }
-        else if (output.equals("CSV")) {
-            format = OutputFormat.CSV;
-        }
-        else if (output.equals("CSV-Header")) {
-            format = OutputFormat.CSV;
-            csv_and_header = true;
-        }
-        else {
-            // Default output format
-            format = OutputFormat.JSON;
-        }
-
-        String query = request.getParameter("query");
-        String printExprParam = request.getParameter("print-expr-tree");
-        String printRewrittenExprParam = request.getParameter("print-rewritten-expr-tree");
-        String printLogicalPlanParam = request.getParameter("print-logical-plan");
-        String printOptimizedLogicalPlanParam = request.getParameter("print-optimized-logical-plan");
-        String printJob = request.getParameter("print-job");
-        String executeQuery = request.getParameter("execute-query");
-        response.setCharacterEncoding("utf-8");
-        response.setContentType("text/html");
-        PrintWriter out = response.getWriter();
-        ServletContext context = getServletContext();
-        IHyracksClientConnection hcc;
-        IHyracksDataset hds;
-
-        try {
-            synchronized (context) {
-                hcc = (IHyracksClientConnection) context.getAttribute(HYRACKS_CONNECTION_ATTR);
-
-                hds = (IHyracksDataset) context.getAttribute(HYRACKS_DATASET_ATTR);
-                if (hds == null) {
-                    hds = new HyracksDataset(hcc, ResultReader.FRAME_SIZE, ResultReader.NUM_READERS);
-                    context.setAttribute(HYRACKS_DATASET_ATTR, hds);
-                }
-            }
-            AQLParser parser = new AQLParser(query);
-            List<Statement> aqlStatements = parser.parse();
-            SessionConfig sessionConfig = new SessionConfig(out, format, true, isSet(executeQuery), true);
-            sessionConfig.set(SessionConfig.FORMAT_HTML, true);
-            sessionConfig.set(SessionConfig.FORMAT_CSV_HEADER, csv_and_header);
-            sessionConfig.setOOBData(isSet(printExprParam), isSet(printRewrittenExprParam),
-                                     isSet(printLogicalPlanParam), isSet(printOptimizedLogicalPlanParam),
-                                     isSet(printJob));
-            MetadataManager.INSTANCE.init();
-            AqlTranslator aqlTranslator = new AqlTranslator(aqlStatements, sessionConfig);
-            double duration = 0;
-            long startTime = System.currentTimeMillis();
-            aqlTranslator.compileAndExecute(hcc, hds, AqlTranslator.ResultDelivery.SYNC);
-            long endTime = System.currentTimeMillis();
-            duration = (endTime - startTime) / 1000.00;
-            out.println("<PRE>Duration of all jobs: " + duration + " sec</PRE>");
-        } catch (ParseException | TokenMgrError | edu.uci.ics.asterix.aqlplus.parser.TokenMgrError pe) {
-            GlobalConfig.ASTERIX_LOGGER.log(Level.INFO, pe.toString(), pe);
-            ResultUtils.webUIParseExceptionHandler(out, pe, query);
-        } catch (Exception e) {
-            GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, e.getMessage(), e);
-            ResultUtils.webUIErrorHandler(out, e);
-        }
-    }
-
-    @Override
-    public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
-        String resourcePath = null;
-        String requestURI = request.getRequestURI();
-
-        if (requestURI.equals("/")) {
-            response.setContentType("text/html");
-            resourcePath = "/webui/querytemplate.html";
-        } else {
-            resourcePath = requestURI;
-        }
-
-        InputStream is = APIServlet.class.getResourceAsStream(resourcePath);
-        if (is == null) {
-            response.sendError(HttpServletResponse.SC_NOT_FOUND);
-            return;
-        }
-
-        // Special handler for font files and .png resources
-        if (resourcePath.endsWith(".png")) {
-
-            BufferedImage img = ImageIO.read(is);
-            OutputStream outputStream = response.getOutputStream();
-            String formatName = "png";
-            response.setContentType("image/png");
-            ImageIO.write(img, formatName, outputStream);
-            outputStream.close();
-            return;
-
-        }
-
-        response.setCharacterEncoding("utf-8");
-        InputStreamReader isr = new InputStreamReader(is);
-        StringBuilder sb = new StringBuilder();
-        BufferedReader br = new BufferedReader(isr);
-        String line = br.readLine();
-
-        while (line != null) {
-            sb.append(line);
-            line = br.readLine();
-        }
-
-        PrintWriter out = response.getWriter();
-        out.println(sb.toString());
-    }
-
-    private static boolean isSet(String requestParameter) {
-        return (requestParameter != null && requestParameter.equals("true"));
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/AQLAPIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/AQLAPIServlet.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/AQLAPIServlet.java
deleted file mode 100644
index 60e1261..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/AQLAPIServlet.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.http.servlet;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.servlet.http.HttpServletRequest;
-
-import edu.uci.ics.asterix.aql.base.Statement.Kind;
-
-public class AQLAPIServlet extends RESTAPIServlet {
-
-    private static final long serialVersionUID = 1L;
-
-    private static final String AQL_STMT_PARAM_NAME = "aql";
-
-    private static final List<Kind> allowedStatements = new ArrayList<>();
-
-    static {
-        for (Kind k : Kind.values()) {
-            allowedStatements.add(k);
-        }
-    }
-
-    @Override
-    protected String getQueryParameter(HttpServletRequest request) {
-        return request.getParameter(AQL_STMT_PARAM_NAME);
-    }
-
-    @Override
-    protected List<Kind> getAllowedStatements() {
-        return allowedStatements;
-    }
-
-    @Override
-    protected String getErrorMessage() {
-        throw new IllegalStateException();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/ConnectorAPIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/ConnectorAPIServlet.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/ConnectorAPIServlet.java
deleted file mode 100644
index 267aac5..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/ConnectorAPIServlet.java
+++ /dev/null
@@ -1,168 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.api.http.servlet;
-
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.util.List;
-import java.util.Map;
-
-import javax.servlet.ServletContext;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import edu.uci.ics.asterix.feeds.CentralFeedManager;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-
-/***
- * The REST API that takes a dataverse name and a dataset name as the input
- * and returns an array of file splits (IP, file-path) of the dataset in JSON.
- * It is mostly used by external runtime, e.g., Pregelix or IMRU to pull data
- * in parallel from existing AsterixDB datasets.
- *
- * @author yingyi
- */
-public class ConnectorAPIServlet extends HttpServlet {
-    private static final long serialVersionUID = 1L;
-
-    private static final String HYRACKS_CONNECTION_ATTR = "edu.uci.ics.asterix.HYRACKS_CONNECTION";
-
-    @Override
-    public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
-        response.setContentType("text/html");
-        response.setCharacterEncoding("utf-8");
-        PrintWriter out = response.getWriter();
-        try {
-            JSONObject jsonResponse = new JSONObject();
-            String dataverseName = request.getParameter("dataverseName");
-            String datasetName = request.getParameter("datasetName");
-            if (dataverseName == null || datasetName == null) {
-                jsonResponse.put("error", "Parameter dataverseName or datasetName is null,");
-                out.write(jsonResponse.toString());
-                out.flush();
-                return;
-            }
-            ServletContext context = getServletContext();
-
-            IHyracksClientConnection hcc = null;
-            synchronized (context) {
-                hcc = (IHyracksClientConnection) context.getAttribute(HYRACKS_CONNECTION_ATTR);
-            }
-
-            // Metadata transaction begins.
-            MetadataManager.INSTANCE.init();
-            MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-
-            // Retrieves file splits of the dataset.
-            AqlMetadataProvider metadataProvider = new AqlMetadataProvider(null, CentralFeedManager.getInstance());
-            metadataProvider.setMetadataTxnContext(mdTxnCtx);
-            Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
-            if (dataset == null) {
-                jsonResponse.put("error", "Dataset " + datasetName + " does not exist in " + "dataverse "
-                        + dataverseName);
-                out.write(jsonResponse.toString());
-                out.flush();
-                return;
-            }
-            boolean temp = dataset.getDatasetDetails().isTemp();
-            FileSplit[] fileSplits = metadataProvider.splitsForDataset(mdTxnCtx, dataverseName, datasetName,
-                    datasetName, temp);
-            ARecordType recordType = (ARecordType) metadataProvider.findType(dataverseName, dataset.getItemTypeName());
-            List<List<String>> primaryKeys = DatasetUtils.getPartitioningKeys(dataset);
-            StringBuilder pkStrBuf = new StringBuilder();
-            for (List<String> keys : primaryKeys) {
-                for (String key : keys) {
-                    pkStrBuf.append(key).append(",");
-                }
-            }
-            pkStrBuf.delete(pkStrBuf.length() - 1, pkStrBuf.length());
-
-            // Constructs the returned json object.
-            formResponseObject(jsonResponse, fileSplits, recordType, pkStrBuf.toString(), hcc.getNodeControllerInfos());
-            // Metadata transaction commits.
-            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-            // Writes file splits.
-            out.write(jsonResponse.toString());
-            out.flush();
-        } catch (Exception e) {
-            e.printStackTrace();
-            out.println(e.getMessage());
-            out.flush();
-            e.printStackTrace(out);
-        }
-    }
-
-    private void formResponseObject(JSONObject jsonResponse, FileSplit[] fileSplits, ARecordType recordType,
-            String primaryKeys, Map<String, NodeControllerInfo> nodeMap) throws Exception {
-        JSONArray partititons = new JSONArray();
-        // Adds a primary key.
-        jsonResponse.put("keys", primaryKeys);
-        // Adds record type.
-        jsonResponse.put("type", recordType.toJSON());
-        // Generates file partitions.
-        for (FileSplit split : fileSplits) {
-            String ipAddress = nodeMap.get(split.getNodeName()).getNetworkAddress().getAddress().toString();
-            String path = split.getLocalFile().getFile().getAbsolutePath();
-            FilePartition partition = new FilePartition(ipAddress, path);
-            partititons.put(partition.toJSONObject());
-        }
-        // Generates the response object which contains the splits.
-        jsonResponse.put("splits", partititons);
-    }
-}
-
-class FilePartition {
-    private final String ipAddress;
-    private final String path;
-
-    public FilePartition(String ipAddress, String path) {
-        this.ipAddress = ipAddress;
-        this.path = path;
-    }
-
-    public String getIPAddress() {
-        return ipAddress;
-    }
-
-    public String getPath() {
-        return path;
-    }
-
-    @Override
-    public String toString() {
-        return ipAddress + ":" + path;
-    }
-
-    public JSONObject toJSONObject() throws JSONException {
-        JSONObject partition = new JSONObject();
-        partition.put("ip", ipAddress);
-        partition.put("path", path);
-        return partition;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/DDLAPIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/DDLAPIServlet.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/DDLAPIServlet.java
deleted file mode 100644
index e43a15a..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/DDLAPIServlet.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.http.servlet;
-
-import java.util.Arrays;
-import java.util.List;
-
-import javax.servlet.http.HttpServletRequest;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.base.Statement.Kind;
-
-public class DDLAPIServlet extends RESTAPIServlet {
-    private static final long serialVersionUID = 1L;
-
-    protected String getQueryParameter(HttpServletRequest request) {
-        return request.getParameter("ddl");
-    }
-
-    protected List<Statement.Kind> getAllowedStatements() {
-        Kind[] statementsArray = { Kind.DATAVERSE_DECL, Kind.DATAVERSE_DROP, Kind.DATASET_DECL, Kind.NODEGROUP_DECL,
-                Kind.NODEGROUP_DROP, Kind.TYPE_DECL, Kind.TYPE_DROP, Kind.CREATE_INDEX, Kind.INDEX_DECL,
-                Kind.CREATE_DATAVERSE, Kind.DATASET_DROP, Kind.INDEX_DROP, Kind.CREATE_FUNCTION, Kind.FUNCTION_DROP,
-                Kind.CREATE_PRIMARY_FEED, Kind.CREATE_SECONDARY_FEED, Kind.DROP_FEED, Kind.CREATE_FEED_POLICY, Kind.DROP_FEED_POLICY };
-        return Arrays.asList(statementsArray);
-    }
-
-    protected String getErrorMessage() {
-        return "Invalid statement: Non-DDL statement %s to the DDL API.";
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/FeedServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/FeedServlet.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/FeedServlet.java
deleted file mode 100644
index 3fe1ff7..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/FeedServlet.java
+++ /dev/null
@@ -1,166 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.http.servlet;
-
-import java.awt.image.BufferedImage;
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.io.PrintWriter;
-import java.util.Collection;
-
-import javax.imageio.ImageIO;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import edu.uci.ics.asterix.common.feeds.FeedActivity;
-import edu.uci.ics.asterix.common.feeds.FeedActivity.FeedActivityDetails;
-import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
-import edu.uci.ics.asterix.common.feeds.FeedId;
-import edu.uci.ics.asterix.common.feeds.api.IFeedLoadManager;
-import edu.uci.ics.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import edu.uci.ics.asterix.feeds.CentralFeedManager;
-
-public class FeedServlet extends HttpServlet {
-    private static final long serialVersionUID = 1L;
-
-    @Override
-    public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
-        String resourcePath = null;
-        String requestURI = request.getRequestURI();
-
-        if (requestURI.equals("/")) {
-            response.setContentType("text/html");
-            resourcePath = "/feed/home.html";
-        } else {
-            resourcePath = requestURI;
-        }
-
-        InputStream is = FeedServlet.class.getResourceAsStream(resourcePath);
-        if (is == null) {
-            response.sendError(HttpServletResponse.SC_NOT_FOUND);
-            return;
-        }
-
-        // Special handler for font files and .png resources
-        if (resourcePath.endsWith(".png")) {
-
-            BufferedImage img = ImageIO.read(is);
-            OutputStream outputStream = response.getOutputStream();
-            String formatName = "png";
-            response.setContentType("image/png");
-            ImageIO.write(img, formatName, outputStream);
-            outputStream.close();
-            return;
-
-        }
-
-        response.setCharacterEncoding("utf-8");
-        InputStreamReader isr = new InputStreamReader(is);
-        StringBuilder sb = new StringBuilder();
-        BufferedReader br = new BufferedReader(isr);
-        String line = br.readLine();
-
-        while (line != null) {
-            sb.append(line + "\n");
-            line = br.readLine();
-        }
-
-        String outStr = null;
-        if (requestURI.startsWith("/webui/static")) {
-            outStr = sb.toString();
-        } else {
-            Collection<FeedActivity> lfa = CentralFeedManager.getInstance().getFeedLoadManager().getFeedActivities();
-            StringBuilder ldStr = new StringBuilder();
-            ldStr.append("<br />");
-            ldStr.append("<br />");
-            if (lfa == null || lfa.isEmpty()) {
-                ldStr.append("Currently there are no active feeds in AsterixDB");
-            } else {
-                ldStr.append("Active Feeds");
-            }
-            insertTable(ldStr, lfa);
-            outStr = String.format(sb.toString(), ldStr.toString());
-
-        }
-
-        PrintWriter out = response.getWriter();
-        out.println(outStr);
-    }
-
-    private void insertTable(StringBuilder html, Collection<FeedActivity> list) {
-        html.append("<table style=\"width:100%\">");
-        html.append("<th>" + FeedServletUtil.Constants.TABLE_HEADER_FEED_NAME + "</th>");
-        html.append("<th>" + FeedServletUtil.Constants.TABLE_HEADER_DATASET_NAME + "</th>");
-        html.append("<th>" + FeedServletUtil.Constants.TABLE_HEADER_ACTIVE_SINCE + "</th>");
-        html.append("<th>" + FeedServletUtil.Constants.TABLE_HEADER_INTAKE_STAGE + "</th>");
-        html.append("<th>" + FeedServletUtil.Constants.TABLE_HEADER_COMPUTE_STAGE + "</th>");
-        html.append("<th>" + FeedServletUtil.Constants.TABLE_HEADER_STORE_STAGE + "</th>");
-        html.append("<th>" + FeedServletUtil.Constants.TABLE_HEADER_INTAKE_RATE + "</th>");
-        html.append("<th>" + FeedServletUtil.Constants.TABLE_HEADER_STORE_RATE + "</th>");
-        for (FeedActivity activity : list) {
-            insertRow(html, activity);
-        }
-    }
-
-    private void insertRow(StringBuilder html, FeedActivity activity) {
-        String intake = activity.getFeedActivityDetails().get(FeedActivityDetails.INTAKE_LOCATIONS);
-        String compute = activity.getFeedActivityDetails().get(FeedActivityDetails.COMPUTE_LOCATIONS);
-        String store = activity.getFeedActivityDetails().get(FeedActivityDetails.STORAGE_LOCATIONS);
-
-        IFeedLoadManager loadManager = CentralFeedManager.getInstance().getFeedLoadManager();
-        FeedConnectionId connectionId = new FeedConnectionId(new FeedId(activity.getDataverseName(),
-                activity.getFeedName()), activity.getDatasetName());
-        int intakeRate = loadManager.getOutflowRate(connectionId, FeedRuntimeType.COLLECT) * intake.split(",").length;
-        int storeRate = loadManager.getOutflowRate(connectionId, FeedRuntimeType.STORE) * store.split(",").length;
-
-        html.append("<tr>");
-        html.append("<td>" + activity.getFeedName() + "</td>");
-        html.append("<td>" + activity.getDatasetName() + "</td>");
-        html.append("<td>" + activity.getConnectTimestamp() + "</td>");
-        //html.append("<td>" + insertLink(html, FeedDashboardServlet.getParameterizedURL(activity), "Details") + "</td>");
-        html.append("<td>" + intake + "</td>");
-        html.append("<td>" + compute + "</td>");
-        html.append("<td>" + store + "</td>");
-        String color = "black";
-        if (intakeRate > storeRate) {
-            color = "red";
-        }
-        if (intakeRate < 0) {
-            html.append("<td>" + "UNKNOWN" + "</td>");
-        } else {
-            html.append("<td>" + insertColoredText("" + intakeRate, color) + " rec/sec" + "</td>");
-        }
-        if (storeRate < 0) {
-            html.append("<td>" + "UNKNOWN" + "</td>");
-        } else {
-            html.append("<td>" + insertColoredText("" + storeRate, color) + " rec/sec" + "</td>");
-        }
-        html.append("</tr>");
-    }
-
-    private String insertLink(StringBuilder html, String url, String displayText) {
-        return ("<a href=\"" + url + "\">" + displayText + "</a>");
-    }
-
-    private String insertColoredText(String s, String color) {
-        return "<font color=\"" + color + "\">" + s + "</font>";
-    }
-}
-
-

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/FeedServletUtil.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/FeedServletUtil.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/FeedServletUtil.java
deleted file mode 100644
index 76e2614..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/FeedServletUtil.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.http.servlet;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.Socket;
-import java.nio.CharBuffer;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
-import edu.uci.ics.asterix.feeds.FeedLifecycleListener;
-import edu.uci.ics.asterix.metadata.feeds.RemoteSocketMessageListener;
-
-public class FeedServletUtil {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedServletUtil.class.getName());
-    private static final char EOL = (char) "\n".getBytes()[0];
-
-    public static final class Constants {
-        public static final String TABLE_HEADER_FEED_NAME = "Feed";
-        public static final String TABLE_HEADER_DATASET_NAME = "Dataset";
-        public static final String TABLE_HEADER_ACTIVE_SINCE = "Timestamp";
-        public static final String TABLE_HEADER_INTAKE_STAGE = "Intake Stage";
-        public static final String TABLE_HEADER_COMPUTE_STAGE = "Compute Stage";
-        public static final String TABLE_HEADER_STORE_STAGE = "Store Stage";
-        public static final String TABLE_HEADER_INTAKE_RATE = "Intake";
-        public static final String TABLE_HEADER_STORE_RATE = "Store";
-    }
-
-    public static void initiateSubscription(FeedConnectionId feedId, String host, int port) throws IOException {
-        LinkedBlockingQueue<String> outbox = new LinkedBlockingQueue<String>();
-        int subscriptionPort = port + 1;
-        Socket sc = new Socket(host, subscriptionPort);
-        InputStream in = sc.getInputStream();
-
-        CharBuffer buffer = CharBuffer.allocate(50);
-        char ch = 0;
-        while (ch != EOL) {
-            buffer.put(ch);
-            ch = (char) in.read();
-        }
-        buffer.flip();
-        String s = new String(buffer.array());
-        int feedSubscriptionPort = Integer.parseInt(s.trim());
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Response from Super Feed Manager Report Service " + port + " will connect at " + host + " "
-                    + port);
-        }
-
-        // register the feed subscription queue with FeedLifecycleListener
-        FeedLifecycleListener.INSTANCE.registerFeedReportQueue(feedId, outbox);
-        RemoteSocketMessageListener listener = new RemoteSocketMessageListener(host, feedSubscriptionPort, outbox);
-        listener.start();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/HyracksProperties.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/HyracksProperties.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/HyracksProperties.java
deleted file mode 100644
index 2805d7f..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/HyracksProperties.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.http.servlet;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Properties;
-
-public class HyracksProperties {
-    private final InputStream is;
-
-    private final Properties properties;
-
-    private static String HYRACKS_IP = "127.0.0.1";
-
-    private static int HYRACKS_PORT = 1098;
-
-    public HyracksProperties() throws IOException {
-        is = HyracksProperties.class.getClassLoader().getResourceAsStream("hyracks-deployment.properties");
-        properties = new Properties();
-        properties.load(is);
-    }
-
-    public String getHyracksIPAddress() {
-        String strIP = properties.getProperty("cc.ip");
-        if (strIP == null) {
-            strIP = HYRACKS_IP;
-        }
-        return strIP;
-    }
-
-    public int getHyracksPort() {
-        String strPort = properties.getProperty("cc.port");
-        int port = HYRACKS_PORT;
-        if (strPort != null) {
-            port = Integer.parseInt(strPort);
-        }
-        return port;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/QueryAPIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/QueryAPIServlet.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/QueryAPIServlet.java
deleted file mode 100644
index c3dc1f9..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/QueryAPIServlet.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.http.servlet;
-
-import java.util.Arrays;
-import java.util.List;
-
-import javax.servlet.http.HttpServletRequest;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.base.Statement.Kind;
-
-public class QueryAPIServlet extends RESTAPIServlet {
-    private static final long serialVersionUID = 1L;
-
-    protected String getQueryParameter(HttpServletRequest request) {
-        return request.getParameter("query");
-    }
-
-    protected List<Statement.Kind> getAllowedStatements() {
-        Kind[] statementsArray = { Kind.DATAVERSE_DECL, Kind.FUNCTION_DECL, Kind.QUERY, Kind.SET, Kind.WRITE, Kind.RUN };
-        return Arrays.asList(statementsArray);
-    }
-
-    protected String getErrorMessage() {
-        return "Invalid statement: Non-query statement %s to the query API.";
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/QueryResultAPIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/QueryResultAPIServlet.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/QueryResultAPIServlet.java
deleted file mode 100644
index 3caed6b..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/QueryResultAPIServlet.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.http.servlet;
-
-import java.io.IOException;
-import java.io.PrintWriter;
-
-import javax.servlet.ServletContext;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.json.JSONArray;
-import org.json.JSONObject;
-
-import edu.uci.ics.asterix.api.common.SessionConfig;
-import edu.uci.ics.asterix.result.ResultReader;
-import edu.uci.ics.asterix.result.ResultUtils;
-import edu.uci.ics.hyracks.api.client.HyracksConnection;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
-import edu.uci.ics.hyracks.api.dataset.ResultSetId;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.client.dataset.HyracksDataset;
-
-public class QueryResultAPIServlet extends HttpServlet {
-    private static final long serialVersionUID = 1L;
-
-    private static final String HYRACKS_CONNECTION_ATTR = "edu.uci.ics.asterix.HYRACKS_CONNECTION";
-
-    private static final String HYRACKS_DATASET_ATTR = "edu.uci.ics.asterix.HYRACKS_DATASET";
-
-    @Override
-    public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
-        response.setContentType("text/html");
-        response.setCharacterEncoding("utf-8");
-        String strHandle = request.getParameter("handle");
-        PrintWriter out = response.getWriter();
-        ServletContext context = getServletContext();
-        IHyracksClientConnection hcc;
-        IHyracksDataset hds;
-
-        try {
-            HyracksProperties hp = new HyracksProperties();
-            String strIP = hp.getHyracksIPAddress();
-            int port = hp.getHyracksPort();
-
-            synchronized (context) {
-                hcc = (IHyracksClientConnection) context.getAttribute(HYRACKS_CONNECTION_ATTR);
-                if (hcc == null) {
-                    hcc = new HyracksConnection(strIP, port);
-                    context.setAttribute(HYRACKS_CONNECTION_ATTR, hcc);
-                }
-
-                hds = (IHyracksDataset) context.getAttribute(HYRACKS_DATASET_ATTR);
-                if (hds == null) {
-                    hds = new HyracksDataset(hcc, ResultReader.FRAME_SIZE, ResultReader.NUM_READERS);
-                    context.setAttribute(HYRACKS_DATASET_ATTR, hds);
-                }
-            }
-            JSONObject handleObj = new JSONObject(strHandle);
-            JSONArray handle = handleObj.getJSONArray("handle");
-            JobId jobId = new JobId(handle.getLong(0));
-            ResultSetId rsId = new ResultSetId(handle.getLong(1));
-
-            ResultReader resultReader = new ResultReader(hcc, hds);
-            resultReader.open(jobId, rsId);
-
-            // QQQ The output format is determined by the initial
-            // query and cannot be modified here, so calling back to
-            // initResponse() is really an error. We need to find a
-            // way to send the same OutputFormat value here as was
-            // originally determined there. Need to save this value on
-            // some object that we can obtain here.
-            SessionConfig sessionConfig = RESTAPIServlet.initResponse(request, response);
-            ResultUtils.displayResults(resultReader, sessionConfig);
-
-        } catch (Exception e) {
-            out.println(e.getMessage());
-            e.printStackTrace(out);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/QueryStatusAPIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/QueryStatusAPIServlet.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/QueryStatusAPIServlet.java
deleted file mode 100644
index d793bfb..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/QueryStatusAPIServlet.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.http.servlet;
-
-import java.io.IOException;
-import java.io.PrintWriter;
-
-import javax.servlet.ServletContext;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.json.JSONArray;
-import org.json.JSONObject;
-
-import edu.uci.ics.asterix.result.ResultReader;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
-import edu.uci.ics.hyracks.api.dataset.ResultSetId;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.client.dataset.HyracksDataset;
-
-public class QueryStatusAPIServlet extends HttpServlet {
-    private static final long serialVersionUID = 1L;
-
-    private static final String HYRACKS_CONNECTION_ATTR = "edu.uci.ics.asterix.HYRACKS_CONNECTION";
-
-    private static final String HYRACKS_DATASET_ATTR = "edu.uci.ics.asterix.HYRACKS_DATASET";
-
-    @Override
-    public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
-        response.setContentType("text/html");
-        response.setCharacterEncoding("utf-8");
-        String strHandle = request.getParameter("handle");
-        PrintWriter out = response.getWriter();
-        ServletContext context = getServletContext();
-        IHyracksClientConnection hcc;
-        IHyracksDataset hds;
-
-        try {
-            synchronized (context) {
-                hcc = (IHyracksClientConnection) context.getAttribute(HYRACKS_CONNECTION_ATTR);
-
-                hds = (IHyracksDataset) context.getAttribute(HYRACKS_DATASET_ATTR);
-                if (hds == null) {
-                    hds = new HyracksDataset(hcc, ResultReader.FRAME_SIZE, ResultReader.NUM_READERS);
-                    context.setAttribute(HYRACKS_DATASET_ATTR, hds);
-                }
-            }
-            JSONObject handleObj = new JSONObject(strHandle);
-            JSONArray handle = handleObj.getJSONArray("handle");
-            JobId jobId = new JobId(handle.getLong(0));
-            ResultSetId rsId = new ResultSetId(handle.getLong(1));
-
-            /* TODO(madhusudancs): We need to find a way to JSON serialize default format obtained from
-             * metadataProvider in the AQLTranslator and store it as part of the result handle.
-             */
-            ResultReader resultReader = new ResultReader(hcc, hds);
-            resultReader.open(jobId, rsId);
-
-            JSONObject jsonResponse = new JSONObject();
-            String status;
-            switch (resultReader.getStatus()) {
-                case RUNNING:
-                    status = "RUNNING";
-                    break;
-                case FAILED:
-                    status = "ERROR";
-                    break;
-                case SUCCESS:
-                    status = "SUCCESS";
-                    break;
-                default:
-                    status = "ERROR";
-                    break;
-            }
-            jsonResponse.put("status", status);
-            out.write(jsonResponse.toString());
-
-        } catch (Exception e) {
-            out.println(e.getMessage());
-            e.printStackTrace(out);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/RESTAPIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/RESTAPIServlet.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/RESTAPIServlet.java
deleted file mode 100644
index dd19c97..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/RESTAPIServlet.java
+++ /dev/null
@@ -1,197 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.http.servlet;
-
-import java.io.IOException;
-import java.io.StringWriter;
-import java.nio.charset.StandardCharsets;
-import java.util.List;
-import java.util.logging.Level;
-
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.commons.io.IOUtils;
-import org.json.JSONObject;
-
-import edu.uci.ics.asterix.api.common.SessionConfig;
-import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.base.Statement.Kind;
-import edu.uci.ics.asterix.aql.parser.AQLParser;
-import edu.uci.ics.asterix.aql.parser.ParseException;
-import edu.uci.ics.asterix.aql.parser.TokenMgrError;
-import edu.uci.ics.asterix.aql.translator.AqlTranslator;
-import edu.uci.ics.asterix.common.config.GlobalConfig;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.result.ResultReader;
-import edu.uci.ics.asterix.result.ResultUtils;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
-import edu.uci.ics.hyracks.client.dataset.HyracksDataset;
-
-abstract class RESTAPIServlet extends HttpServlet {
-    private static final long serialVersionUID = 1L;
-
-    private static final String HYRACKS_CONNECTION_ATTR = "edu.uci.ics.asterix.HYRACKS_CONNECTION";
-
-    private static final String HYRACKS_DATASET_ATTR = "edu.uci.ics.asterix.HYRACKS_DATASET";
-
-    /**
-     * Initialize the Content-Type of the response, and construct a
-     * SessionConfig with the appropriate output writer and output-format
-     * based on the Accept: header and other servlet parameters.
-     */
-    static SessionConfig initResponse(HttpServletRequest request, HttpServletResponse response)
-        throws IOException {
-        response.setCharacterEncoding("utf-8");
-
-        // JSON output is the default; most generally useful for a
-        // programmatic HTTP API
-        OutputFormat format = OutputFormat.JSON;
-
-        // First check the "output" servlet parameter.
-        String output = request.getParameter("output");
-        String accept = request.getHeader("Accept");
-        if (output != null) {
-            if (output.equals("CSV")) {
-                format = OutputFormat.CSV;
-            }
-            else if (output.equals("ADM")) {
-                format = OutputFormat.ADM;
-            }
-        }
-        else {
-            // Second check the Accept: HTTP header.
-            if (accept != null) {
-                if (accept.contains("application/x-adm")) {
-                    format = OutputFormat.ADM;
-                } else if (accept.contains("text/csv")) {
-                    format = OutputFormat.CSV;
-                }
-            }
-        }
-
-        SessionConfig sessionConfig = new SessionConfig(response.getWriter(), format);
-
-        // Now that format is set, output the content-type
-        switch (format) {
-            case ADM:
-                response.setContentType("application/x-adm");
-                break;
-            case JSON:
-                response.setContentType("application/json");
-                break;
-            case CSV: {
-                // Check for header parameter or in Accept:.
-                if ("present".equals(request.getParameter("header")) ||
-                    (accept != null && accept.contains("header=present"))) {
-                    response.setContentType("text/csv; header=present");
-                    sessionConfig.set(SessionConfig.FORMAT_CSV_HEADER, true);
-                }
-                else {
-                    response.setContentType("text/csv; header=absent");
-                }
-            }
-        };
-
-        return sessionConfig;
-    }
-
-    @Override
-    protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException,
-            IOException {
-        StringWriter sw = new StringWriter();
-        IOUtils.copy(request.getInputStream(), sw, StandardCharsets.UTF_8.name());
-        String query = sw.toString();
-        handleRequest(request, response, query);
-    }
-
-    @Override
-    public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
-        String query = getQueryParameter(request);
-        handleRequest(request, response, query);
-    }
-
-    public void handleRequest(HttpServletRequest request, HttpServletResponse response, String query)
-            throws IOException {
-        SessionConfig sessionConfig = initResponse(request, response);
-        AqlTranslator.ResultDelivery resultDelivery = whichResultDelivery(request);
-
-        ServletContext context = getServletContext();
-        IHyracksClientConnection hcc;
-        IHyracksDataset hds;
-
-        try {
-            synchronized (context) {
-                hcc = (IHyracksClientConnection) context.getAttribute(HYRACKS_CONNECTION_ATTR);
-                hds = (IHyracksDataset) context.getAttribute(HYRACKS_DATASET_ATTR);
-                if (hds == null) {
-                    hds = new HyracksDataset(hcc, ResultReader.FRAME_SIZE, ResultReader.NUM_READERS);
-                    context.setAttribute(HYRACKS_DATASET_ATTR, hds);
-                }
-            }
-
-            AQLParser parser = new AQLParser(query);
-            List<Statement> aqlStatements = parser.parse();
-            if (!containsForbiddenStatements(aqlStatements)) {
-                MetadataManager.INSTANCE.init();
-                AqlTranslator aqlTranslator = new AqlTranslator(aqlStatements, sessionConfig);
-                aqlTranslator.compileAndExecute(hcc, hds, resultDelivery);
-            }
-        } catch (ParseException | TokenMgrError | edu.uci.ics.asterix.aqlplus.parser.TokenMgrError pe) {
-            GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, pe.getMessage(), pe);
-            String errorMessage = ResultUtils.buildParseExceptionMessage(pe, query);
-            JSONObject errorResp = ResultUtils.getErrorResponse(2, errorMessage, "", "");
-            sessionConfig.out().write(errorResp.toString());
-            response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
-        } catch (Exception e) {
-            GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, e.getMessage(), e);
-            ResultUtils.apiErrorHandler(sessionConfig.out(), e);
-            response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
-        }
-    }
-
-    private boolean containsForbiddenStatements(List<Statement> aqlStatements) throws AsterixException {
-        for (Statement st : aqlStatements) {
-            if (!getAllowedStatements().contains(st.getKind())) {
-                throw new AsterixException(String.format(getErrorMessage(), st.getKind()));
-            }
-        }
-        return false;
-    }
-
-    protected AqlTranslator.ResultDelivery whichResultDelivery(HttpServletRequest request) {
-        String mode = request.getParameter("mode");
-        if (mode != null) {
-            if (mode.equals("asynchronous")) {
-                return AqlTranslator.ResultDelivery.ASYNC;
-            } else if (mode.equals("asynchronous-deferred")) {
-                return AqlTranslator.ResultDelivery.ASYNC_DEFERRED;
-            }
-        }
-        return AqlTranslator.ResultDelivery.SYNC;
-    }
-
-    protected abstract String getQueryParameter(HttpServletRequest request);
-
-    protected abstract List<Kind> getAllowedStatements();
-
-    protected abstract String getErrorMessage();
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/ShutdownAPIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/ShutdownAPIServlet.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/ShutdownAPIServlet.java
deleted file mode 100644
index 4e16254..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/ShutdownAPIServlet.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.http.servlet;
-
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.nio.charset.StandardCharsets;
-import java.util.logging.Level;
-
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.commons.io.IOUtils;
-
-import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
-import edu.uci.ics.asterix.common.config.GlobalConfig;
-import edu.uci.ics.asterix.result.ResultUtils;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-
-public class ShutdownAPIServlet extends HttpServlet {
-    private static final long serialVersionUID = 1L;
-
-    private static final String HYRACKS_CONNECTION_ATTR = "edu.uci.ics.asterix.HYRACKS_CONNECTION";
-
-    private static final String HYRACKS_DATASET_ATTR = "edu.uci.ics.asterix.HYRACKS_DATASET";
-
-    @Override
-    protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException,
-            IOException {
-
-        response.setContentType("application/json");
-        response.setCharacterEncoding("utf-8");
-
-        PrintWriter out = response.getWriter();
-        OutputFormat format = OutputFormat.JSON;
-        String accept = request.getHeader("Accept");
-        if ((accept == null) || (accept.contains("application/x-adm"))) {
-            format = OutputFormat.ADM;
-        } else if (accept.contains("application/json")) {
-            format = OutputFormat.JSON;
-        }
-        StringWriter sw = new StringWriter();
-        IOUtils.copy(request.getInputStream(), sw, StandardCharsets.UTF_8.name());
-
-        ServletContext context = getServletContext();
-        IHyracksClientConnection hcc;
-        try {
-            synchronized (context) {
-                hcc = (IHyracksClientConnection) context.getAttribute(HYRACKS_CONNECTION_ATTR);
-                response.setStatus(HttpServletResponse.SC_ACCEPTED);
-                hcc.stopCluster();
-            }
-        } catch (Exception e) {
-            GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, e.getMessage(), e);
-            ResultUtils.apiErrorHandler(out, e);
-            response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
-        }
-    }
-
-    @Override
-    public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/UpdateAPIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/UpdateAPIServlet.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/UpdateAPIServlet.java
deleted file mode 100644
index 4bbd712..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/http/servlet/UpdateAPIServlet.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.http.servlet;
-
-import java.util.Arrays;
-import java.util.List;
-
-import javax.servlet.http.HttpServletRequest;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.base.Statement.Kind;
-
-public class UpdateAPIServlet extends RESTAPIServlet {
-    private static final long serialVersionUID = 1L;
-
-    protected String getQueryParameter(HttpServletRequest request) {
-        return request.getParameter("statements");
-    }
-
-    protected List<Statement.Kind> getAllowedStatements() {
-        Kind[] statementsArray = { Kind.DATAVERSE_DECL, Kind.DELETE, Kind.INSERT, Kind.UPDATE, Kind.DML_CMD_LIST,
-                Kind.LOAD, Kind.CONNECT_FEED, Kind.DISCONNECT_FEED, Kind.SET, Kind.COMPACT, Kind.EXTERNAL_DATASET_REFRESH };
-        return Arrays.asList(statementsArray);
-    }
-
-    protected String getErrorMessage() {
-        return "Invalid statement: Non-Update statement %s to the Update API.";
-    }
-}


[04/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
new file mode 100644
index 0000000..956b447
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
@@ -0,0 +1,279 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.hyracks.bootstrap;
+
+import java.io.File;
+import java.rmi.server.UnicastRemoteObject;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.kohsuke.args4j.CmdLineException;
+import org.kohsuke.args4j.CmdLineParser;
+import org.kohsuke.args4j.Option;
+
+import edu.uci.ics.asterix.api.common.AsterixAppRuntimeContext;
+import edu.uci.ics.asterix.common.api.AsterixThreadFactory;
+import edu.uci.ics.asterix.common.api.IAsterixAppRuntimeContext;
+import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
+import edu.uci.ics.asterix.common.config.AsterixTransactionProperties;
+import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
+import edu.uci.ics.asterix.common.transactions.IRecoveryManager;
+import edu.uci.ics.asterix.common.transactions.IRecoveryManager.SystemState;
+import edu.uci.ics.asterix.event.schema.cluster.Cluster;
+import edu.uci.ics.asterix.event.schema.cluster.Node;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataNode;
+import edu.uci.ics.asterix.metadata.api.IAsterixStateProxy;
+import edu.uci.ics.asterix.metadata.api.IMetadataNode;
+import edu.uci.ics.asterix.metadata.bootstrap.MetadataBootstrap;
+import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
+import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceRepository;
+import edu.uci.ics.hyracks.api.application.INCApplicationContext;
+import edu.uci.ics.hyracks.api.application.INCApplicationEntryPoint;
+import edu.uci.ics.hyracks.api.lifecycle.ILifeCycleComponentManager;
+import edu.uci.ics.hyracks.api.lifecycle.LifeCycleComponentManager;
+
+public class NCApplicationEntryPoint implements INCApplicationEntryPoint {
+    private static final Logger LOGGER = Logger.getLogger(NCApplicationEntryPoint.class.getName());
+
+    @Option(name = "-metadata-port", usage = "IP port to bind metadata listener (default: random port)", required = false)
+    public int metadataRmiPort = 0;
+
+    @Option(name = "-initial-run", usage = "A flag indicating if it's the first time the NC is started (default: false)", required = false)
+    public boolean initialRun = false;
+
+    private INCApplicationContext ncApplicationContext = null;
+    private IAsterixAppRuntimeContext runtimeContext;
+    private String nodeId;
+    private boolean isMetadataNode = false;
+    private boolean stopInitiated = false;
+    private SystemState systemState = SystemState.NEW_UNIVERSE;
+    private final long NON_SHARP_CHECKPOINT_TARGET_LSN = -1;
+
+    @Override
+    public void start(INCApplicationContext ncAppCtx, String[] args) throws Exception {
+        CmdLineParser parser = new CmdLineParser(this);
+
+        try {
+            parser.parseArgument(args);
+        } catch (CmdLineException e) {
+            System.err.println(e.getMessage());
+            System.err.println("Usage:");
+            parser.printUsage(System.err);
+            throw e;
+        }
+
+        ncAppCtx.setThreadFactory(new AsterixThreadFactory(ncAppCtx.getLifeCycleComponentManager()));
+        ncApplicationContext = ncAppCtx;
+        nodeId = ncApplicationContext.getNodeId();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Starting Asterix node controller: " + nodeId);
+        }
+
+        runtimeContext = new AsterixAppRuntimeContext(ncApplicationContext);
+        AsterixMetadataProperties metadataProperties = ((IAsterixPropertiesProvider) runtimeContext)
+                .getMetadataProperties();
+        if (!metadataProperties.getNodeNames().contains(ncApplicationContext.getNodeId())) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Substitute node joining : " + ncApplicationContext.getNodeId());
+            }
+            updateOnNodeJoin();
+        }
+        runtimeContext.initialize();
+        ncApplicationContext.setApplicationObject(runtimeContext);
+
+        if (initialRun) {
+            LOGGER.info("System is being initialized. (first run)");
+            systemState = SystemState.NEW_UNIVERSE;
+        } else {
+            // #. recover if the system is corrupted by checking system state.
+            IRecoveryManager recoveryMgr = runtimeContext.getTransactionSubsystem().getRecoveryManager();
+            systemState = recoveryMgr.getSystemState();
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("System is in a state: " + systemState);
+            }
+
+            if (systemState != SystemState.NEW_UNIVERSE) {
+                PersistentLocalResourceRepository localResourceRepository = (PersistentLocalResourceRepository) runtimeContext
+                        .getLocalResourceRepository();
+                localResourceRepository.initialize(nodeId, null, false, runtimeContext.getResourceIdFactory());
+            }
+            
+            if (systemState == SystemState.CORRUPTED) {
+                recoveryMgr.startRecovery(true);
+            }
+        }
+
+    }
+
+    @Override
+    public void stop() throws Exception {
+        if (!stopInitiated) {
+            runtimeContext.setShuttingdown(true);
+            stopInitiated = true;
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Stopping Asterix node controller: " + nodeId);
+            }
+
+            IRecoveryManager recoveryMgr = runtimeContext.getTransactionSubsystem().getRecoveryManager();
+            recoveryMgr.checkpoint(true, NON_SHARP_CHECKPOINT_TARGET_LSN);
+
+            if (isMetadataNode) {
+                MetadataBootstrap.stopUniverse();
+            }
+
+            ncApplicationContext.getLifeCycleComponentManager().stopAll(false);
+            runtimeContext.deinitialize();
+        } else {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Duplicate attempt to stop ignored: " + nodeId);
+            }
+        }
+    }
+
+    @Override
+    public void notifyStartupComplete() throws Exception {
+        AsterixMetadataProperties metadataProperties = ((IAsterixPropertiesProvider) runtimeContext)
+                .getMetadataProperties();
+
+        if (systemState == SystemState.NEW_UNIVERSE) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("System state: " + SystemState.NEW_UNIVERSE);
+                LOGGER.info("Node ID: " + nodeId);
+                LOGGER.info("Stores: " + metadataProperties.getStores());
+                LOGGER.info("Root Metadata Store: " + metadataProperties.getStores().get(nodeId)[0]);
+            }
+
+            PersistentLocalResourceRepository localResourceRepository = (PersistentLocalResourceRepository) runtimeContext
+                    .getLocalResourceRepository();
+            localResourceRepository.initialize(nodeId, metadataProperties.getStores().get(nodeId)[0], true, null);
+        }
+
+        IAsterixStateProxy proxy = null;
+        isMetadataNode = nodeId.equals(metadataProperties.getMetadataNodeName());
+        if (isMetadataNode) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Bootstrapping metadata");
+            }
+            MetadataNode.INSTANCE.initialize(runtimeContext);
+
+            proxy = (IAsterixStateProxy) ncApplicationContext.getDistributedState();
+            if (proxy == null) {
+                throw new IllegalStateException("Metadata node cannot access distributed state");
+            }
+
+            // This is a special case, we just give the metadataNode directly.
+            // This way we can delay the registration of the metadataNode until
+            // it is completely initialized.
+            MetadataManager.INSTANCE = new MetadataManager(proxy, MetadataNode.INSTANCE);
+            MetadataBootstrap.startUniverse(((IAsterixPropertiesProvider) runtimeContext), ncApplicationContext,
+                    systemState == SystemState.NEW_UNIVERSE);
+            MetadataBootstrap.startDDLRecovery();
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Metadata node bound");
+            }
+        }
+
+        ExternalLibraryBootstrap.setUpExternaLibraries(isMetadataNode);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Starting lifecycle components");
+        }
+
+        Map<String, String> lifecycleMgmtConfiguration = new HashMap<String, String>();
+        String dumpPathKey = LifeCycleComponentManager.Config.DUMP_PATH_KEY;
+        String dumpPath = metadataProperties.getCoredumpPath(nodeId);
+        lifecycleMgmtConfiguration.put(dumpPathKey, dumpPath);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Coredump directory for NC is: " + dumpPath);
+        }
+        ILifeCycleComponentManager lccm = ncApplicationContext.getLifeCycleComponentManager();
+        lccm.configure(lifecycleMgmtConfiguration);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Configured:" + lccm);
+        }
+        ncApplicationContext.setStateDumpHandler(new AsterixStateDumpHandler(ncApplicationContext.getNodeId(), lccm
+                .getDumpPath(), lccm));
+
+        lccm.startAll();
+
+        IRecoveryManager recoveryMgr = runtimeContext.getTransactionSubsystem().getRecoveryManager();
+        recoveryMgr.checkpoint(true, NON_SHARP_CHECKPOINT_TARGET_LSN);
+
+        if (isMetadataNode) {
+            IMetadataNode stub = null;
+            stub = (IMetadataNode) UnicastRemoteObject.exportObject(MetadataNode.INSTANCE, metadataRmiPort);
+            proxy.setMetadataNode(stub);
+        }
+
+        // Reclaim storage for temporary datasets.
+        String[] ioDevices = AsterixClusterProperties.INSTANCE.getIODevices(nodeId);
+        String[] nodeStores = metadataProperties.getStores().get(nodeId);
+        int numIoDevices = AsterixClusterProperties.INSTANCE.getNumberOfIODevices(nodeId);
+        for (int j = 0; j < nodeStores.length; j++) {
+            for (int k = 0; k < numIoDevices; k++) {
+                File f = new File(ioDevices[k] + File.separator + nodeStores[j] + File.separator + "temp");
+                f.delete();
+            }
+        }
+
+        // TODO
+        // reclaim storage for orphaned index artifacts in NCs.
+
+    }
+
+    private void updateOnNodeJoin() {
+        AsterixMetadataProperties metadataProperties = ((IAsterixPropertiesProvider) runtimeContext)
+                .getMetadataProperties();
+        if (!metadataProperties.getNodeNames().contains(nodeId)) {
+            metadataProperties.getNodeNames().add(nodeId);
+            Cluster cluster = AsterixClusterProperties.INSTANCE.getCluster();
+            String asterixInstanceName = cluster.getInstanceName();
+            AsterixTransactionProperties txnProperties = ((IAsterixPropertiesProvider) runtimeContext)
+                    .getTransactionProperties();
+            Node self = null;
+            for (Node node : cluster.getSubstituteNodes().getNode()) {
+                String ncId = asterixInstanceName + "_" + node.getId();
+                if (ncId.equalsIgnoreCase(nodeId)) {
+                    String storeDir = node.getStore() == null ? cluster.getStore() : node.getStore();
+                    metadataProperties.getStores().put(nodeId, storeDir.split(","));
+
+                    String coredumpPath = node.getLogDir() == null ? cluster.getLogDir() : node.getLogDir();
+                    metadataProperties.getCoredumpPaths().put(nodeId, coredumpPath);
+
+                    String txnLogDir = node.getTxnLogDir() == null ? cluster.getTxnLogDir() : node.getTxnLogDir();
+                    txnProperties.getLogDirectories().put(nodeId, txnLogDir);
+
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info("Store set to : " + storeDir);
+                        LOGGER.info("Coredump dir set to : " + coredumpPath);
+                        LOGGER.info("Transaction log dir set to :" + txnLogDir);
+                    }
+                    self = node;
+                    break;
+                }
+            }
+            if (self != null) {
+                cluster.getSubstituteNodes().getNode().remove(self);
+                cluster.getNode().add(self);
+            } else {
+                throw new IllegalStateException("Unknown node joining the cluster");
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/result/ResultReader.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/result/ResultReader.java b/asterix-app/src/main/java/org/apache/asterix/result/ResultReader.java
new file mode 100644
index 0000000..c27f859
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/result/ResultReader.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.result;
+
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.comm.IFrame;
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.dataset.DatasetJobRecord.Status;
+import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
+import edu.uci.ics.hyracks.api.dataset.IHyracksDatasetReader;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ResultFrameTupleAccessor;
+
+public class ResultReader {
+    private final IHyracksDataset hyracksDataset;
+
+    private IHyracksDatasetReader reader;
+
+    private IFrameTupleAccessor frameTupleAccessor;
+
+    // Number of parallel result reader buffers
+    public static final int NUM_READERS = 1;
+
+    public static final int FRAME_SIZE = AsterixAppContextInfo.getInstance().getCompilerProperties().getFrameSize();
+
+    public ResultReader(IHyracksClientConnection hcc, IHyracksDataset hdc) throws Exception {
+        hyracksDataset = hdc;
+    }
+
+    public void open(JobId jobId, ResultSetId resultSetId) throws HyracksDataException {
+        reader = hyracksDataset.createReader(jobId, resultSetId);
+        frameTupleAccessor = new ResultFrameTupleAccessor();
+    }
+
+    public Status getStatus() {
+        return reader.getResultStatus();
+    }
+
+    public int read(IFrame frame) throws HyracksDataException {
+        return reader.read(frame);
+    }
+
+    public IFrameTupleAccessor getFrameTupleAccessor() {
+        return frameTupleAccessor;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/result/ResultUtils.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/result/ResultUtils.java b/asterix-app/src/main/java/org/apache/asterix/result/ResultUtils.java
new file mode 100644
index 0000000..3a4fd5f
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/result/ResultUtils.java
@@ -0,0 +1,343 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.result;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.nio.charset.Charset;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.http.ParseException;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import edu.uci.ics.asterix.api.common.SessionConfig;
+import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
+import edu.uci.ics.asterix.api.http.servlet.APIServlet;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.api.comm.IFrame;
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.comm.VSizeFrame;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.control.nc.resources.memory.FrameManager;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
+
+public class ResultUtils {
+    private static final Charset UTF_8 = Charset.forName("UTF-8");
+
+    static Map<Character, String> HTML_ENTITIES = new HashMap<Character, String>();
+
+    static {
+        HTML_ENTITIES.put('"', "&quot;");
+        HTML_ENTITIES.put('&', "&amp;");
+        HTML_ENTITIES.put('<', "&lt;");
+        HTML_ENTITIES.put('>', "&gt;");
+    }
+
+    public static String escapeHTML(String s) {
+        for (Character c : HTML_ENTITIES.keySet()) {
+            if (s.indexOf(c) >= 0) {
+                s = s.replace(c.toString(), HTML_ENTITIES.get(c));
+            }
+        }
+        return s;
+    }
+
+    public static void displayCSVHeader(ARecordType recordType, SessionConfig conf) {
+        // If HTML-ifying, we have to output this here before the header -
+        // pretty ugly
+        if (conf.is(SessionConfig.FORMAT_HTML)) {
+            conf.out().println("<h4>Results:</h4>");
+            conf.out().println("<pre>");
+        }
+
+        String[] fieldNames = recordType.getFieldNames();
+        boolean notfirst = false;
+        for (String name : fieldNames) {
+            if (notfirst) {
+                conf.out().print(',');
+            }
+            notfirst = true;
+            conf.out().print('"');
+            conf.out().print(name.replace("\"", "\"\""));
+            conf.out().print('"');
+        }
+        conf.out().print("\r\n");
+    }
+
+    public static FrameManager resultDisplayFrameMgr = new FrameManager(ResultReader.FRAME_SIZE);
+
+    public static void displayResults(ResultReader resultReader, SessionConfig conf)
+            throws HyracksDataException {
+        IFrameTupleAccessor fta = resultReader.getFrameTupleAccessor();
+
+        IFrame frame = new VSizeFrame(resultDisplayFrameMgr);
+        int bytesRead = resultReader.read(frame);
+        ByteBufferInputStream bbis = new ByteBufferInputStream();
+
+        // Whether we need to separate top-level ADM instances with commas
+        boolean need_commas = true;
+        // Whether this is the first instance being output
+        boolean notfirst = false;
+
+        // If we're outputting CSV with a header, the HTML header was already
+        // output by displayCSVHeader(), so skip it here
+        if (conf.is(SessionConfig.FORMAT_HTML) &&
+            ! (conf.fmt() == OutputFormat.CSV && conf.is(SessionConfig.FORMAT_CSV_HEADER))) {
+            conf.out().println("<h4>Results:</h4>");
+            conf.out().println("<pre>");
+        }
+
+        switch (conf.fmt()) {
+            case CSV:
+                need_commas = false;
+                break;
+            case JSON:
+            case ADM:
+                // Conveniently, JSON and ADM have the same syntax for an
+                // "ordered list", and our representation of the result of a
+                // statement is an ordered list of instances.
+                conf.out().print("[ ");
+                break;
+        }
+
+        if (bytesRead > 0) {
+            do {
+                try {
+                    fta.reset(frame.getBuffer());
+                    int last = fta.getTupleCount();
+                    String result;
+                    for (int tIndex = 0; tIndex < last; tIndex++) {
+                        int start = fta.getTupleStartOffset(tIndex);
+                        int length = fta.getTupleEndOffset(tIndex) - start;
+                        bbis.setByteBuffer(frame.getBuffer(), start);
+                        byte[] recordBytes = new byte[length];
+                        int numread = bbis.read(recordBytes, 0, length);
+                        if (conf.fmt() == OutputFormat.CSV) {
+                            if ( (numread > 0) && (recordBytes[numread-1] == '\n') ) {
+                                numread--;
+                            }
+                        }
+                        result = new String(recordBytes, 0, numread, UTF_8);
+                        if (need_commas && notfirst) {
+                            conf.out().print(", ");
+                        }
+                        notfirst = true;
+                        conf.out().print(result);
+                        if (conf.fmt() == OutputFormat.CSV) {
+                            conf.out().print("\r\n");
+                        }
+                    }
+                    frame.getBuffer().clear();
+                } finally {
+                    try {
+                        bbis.close();
+                    } catch (IOException e) {
+                        throw new HyracksDataException(e);
+                    }
+                }
+            } while (resultReader.read(frame) > 0);
+        }
+
+        conf.out().flush();
+
+        switch (conf.fmt()) {
+            case JSON:
+            case ADM:
+                conf.out().println(" ]");
+                break;
+            case CSV:
+                // Nothing to do
+                break;
+        }
+
+        if (conf.is(SessionConfig.FORMAT_HTML)) {
+            conf.out().println("</pre>");
+        }
+    }
+
+    public static JSONObject getErrorResponse(int errorCode, String errorMessage, String errorSummary,
+            String errorStackTrace) {
+        JSONObject errorResp = new JSONObject();
+        JSONArray errorArray = new JSONArray();
+        errorArray.put(errorCode);
+        errorArray.put(errorMessage);
+        try {
+            errorResp.put("error-code", errorArray);
+            if (!errorSummary.equals(""))
+                errorResp.put("summary", errorSummary);
+            if (!errorStackTrace.equals(""))
+                errorResp.put("stacktrace", errorStackTrace);
+        } catch (JSONException e) {
+            // TODO(madhusudancs): Figure out what to do when JSONException occurs while building the results.
+        }
+        return errorResp;
+    }
+
+    public static void webUIErrorHandler(PrintWriter out, Exception e) {
+        String errorTemplate = readTemplateFile("/webui/errortemplate.html", "%s\n%s\n%s");
+
+        String errorOutput = String.format(errorTemplate, escapeHTML(extractErrorMessage(e)),
+                escapeHTML(extractErrorSummary(e)), escapeHTML(extractFullStackTrace(e)));
+        out.println(errorOutput);
+    }
+
+    public static void webUIParseExceptionHandler(PrintWriter out, Throwable e, String query) {
+        String errorTemplate = readTemplateFile("/webui/errortemplate_message.html", "<pre class=\"error\">%s\n</pre>");
+
+        String errorOutput = String.format(errorTemplate, buildParseExceptionMessage(e, query));
+        out.println(errorOutput);
+    }
+
+    public static void apiErrorHandler(PrintWriter out, Exception e) {
+        int errorCode = 99;
+        if (e instanceof ParseException) {
+            errorCode = 2;
+        } else if (e instanceof AlgebricksException) {
+            errorCode = 3;
+        } else if (e instanceof HyracksDataException) {
+            errorCode = 4;
+        }
+
+        JSONObject errorResp = ResultUtils.getErrorResponse(errorCode, extractErrorMessage(e), extractErrorSummary(e),
+                extractFullStackTrace(e));
+        out.write(errorResp.toString());
+    }
+
+    public static String buildParseExceptionMessage(Throwable e, String query) {
+        StringBuilder errorMessage = new StringBuilder();
+        String message = e.getMessage();
+        message = message.replace("<", "&lt");
+        message = message.replace(">", "&gt");
+        errorMessage.append("SyntaxError: " + message + "\n");
+        int pos = message.indexOf("line");
+        if (pos > 0) {
+            Pattern p = Pattern.compile("\\d+");
+            Matcher m = p.matcher(message);
+            if (m.find(pos)) {
+                int lineNo = Integer.parseInt(message.substring(m.start(), m.end()));
+                String[] lines = query.split("\n");
+                if (lineNo > lines.length) {
+                    errorMessage.append("===> &ltBLANK LINE&gt \n");
+                } else {
+                    String line = lines[lineNo - 1];
+                    errorMessage.append("==> " + line);
+                }
+            }
+        }
+        return errorMessage.toString();
+    }
+
+    private static Throwable getRootCause(Throwable cause) {
+        Throwable nextCause = cause.getCause();
+        while (nextCause != null) {
+            cause = nextCause;
+            nextCause = cause.getCause();
+        }
+        return cause;
+    }
+
+    /**
+     * Extract the message in the root cause of the stack trace:
+     *
+     * @param e
+     * @return error message string.
+     */
+    private static String extractErrorMessage(Throwable e) {
+        Throwable cause = getRootCause(e);
+        String fullyQualifiedExceptionClassName = cause.getClass().getName();
+        String[] hierarchySplits = fullyQualifiedExceptionClassName.split("\\.");
+        //try returning the class without package qualification
+        String exceptionClassName = hierarchySplits[hierarchySplits.length - 1];
+        String localizedMessage = cause.getLocalizedMessage();
+        if(localizedMessage == null){
+            localizedMessage = "Internal error. Please check instance logs for further details.";
+        }
+        return localizedMessage + " [" + exceptionClassName + "]";
+    }
+
+    /**
+     * Extract the meaningful part of a stack trace:
+     * a. the causes in the stack trace hierarchy
+     * b. the top exception for each cause
+     *
+     * @param e
+     * @return the contacted message containing a and b.
+     */
+    private static String extractErrorSummary(Throwable e) {
+        StringBuilder errorMessageBuilder = new StringBuilder();
+        Throwable cause = e;
+        errorMessageBuilder.append(cause.getLocalizedMessage());
+        while (cause != null) {
+            StackTraceElement[] stackTraceElements = cause.getStackTrace();
+            errorMessageBuilder.append(stackTraceElements.length > 0 ? "\n caused by: " + stackTraceElements[0] : "");
+            cause = cause.getCause();
+        }
+        return errorMessageBuilder.toString();
+    }
+
+    /**
+     * Extract the full stack trace:
+     *
+     * @param e
+     * @return the string containing the full stack trace of the error.
+     */
+    private static String extractFullStackTrace(Throwable e) {
+        StringWriter stringWriter = new StringWriter();
+        PrintWriter printWriter = new PrintWriter(stringWriter);
+        e.printStackTrace(printWriter);
+        return stringWriter.toString();
+    }
+
+    /**
+     * Read the template file which is stored as a resource and return its content. If the file does not exist or is
+     * not readable return the default template string.
+     *
+     * @param path
+     *            The path to the resource template file
+     * @param defaultTemplate
+     *            The default template string if the template file does not exist or is not readable
+     * @return The template string to be used to render the output.
+     */
+    private static String readTemplateFile(String path, String defaultTemplate) {
+        String errorTemplate = defaultTemplate;
+        try {
+            String resourcePath = "/webui/errortemplate_message.html";
+            InputStream is = APIServlet.class.getResourceAsStream(resourcePath);
+            InputStreamReader isr = new InputStreamReader(is);
+            StringBuilder sb = new StringBuilder();
+            BufferedReader br = new BufferedReader(isr);
+            String line = br.readLine();
+
+            while (line != null) {
+                sb.append(line);
+                line = br.readLine();
+            }
+            errorTemplate = sb.toString();
+        } catch (IOException ioe) {
+            // If there is an IOException reading the error template html file, default value of error template is used.
+        }
+        return errorTemplate;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java b/asterix-app/src/main/java/org/apache/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java
new file mode 100644
index 0000000..a788c37
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.dataflow.std.misc;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+
+public class ConstantTupleSourceOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    private int[] fieldSlots;
+    private byte[] tupleData;
+    private int tupleSize;
+
+    public ConstantTupleSourceOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc, int[] fieldSlots,
+            byte[] tupleData, int tupleSize) {
+        super(spec, 0, 1);
+        this.tupleData = tupleData;
+        this.fieldSlots = fieldSlots;
+        this.tupleSize = tupleSize;
+        recordDescriptors[0] = recDesc;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
+        return new ConstantTupleSourceOperatorNodePushable(ctx, fieldSlots, tupleData, tupleSize);
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorNodePushable.java b/asterix-app/src/main/java/org/apache/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorNodePushable.java
new file mode 100644
index 0000000..ba75fb0
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorNodePushable.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.dataflow.std.misc;
+
+import edu.uci.ics.hyracks.api.comm.VSizeFrame;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+
+public class ConstantTupleSourceOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
+    private IHyracksTaskContext ctx;
+
+    private int[] fieldSlots;
+    private byte[] tupleData;
+    private int tupleSize;
+
+    public ConstantTupleSourceOperatorNodePushable(IHyracksTaskContext ctx, int[] fieldSlots, byte[] tupleData,
+            int tupleSize) {
+        super();
+        this.fieldSlots = fieldSlots;
+        this.tupleData = tupleData;
+        this.tupleSize = tupleSize;
+        this.ctx = ctx;
+    }
+
+    @Override
+    public void initialize() throws HyracksDataException {
+        FrameTupleAppender appender = new FrameTupleAppender(new VSizeFrame(ctx));
+        if (fieldSlots != null && tupleData != null && tupleSize > 0)
+            appender.append(fieldSlots, tupleData, 0, tupleSize);
+        writer.open();
+        try {
+            appender.flush(writer, true);
+        }
+        finally {
+            writer.close();
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/test/java/edu/uci/ics/asterix/api/http/servlet/ConnectorAPIServletTest.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/api/http/servlet/ConnectorAPIServletTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/api/http/servlet/ConnectorAPIServletTest.java
deleted file mode 100644
index 7f5f480..0000000
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/api/http/servlet/ConnectorAPIServletTest.java
+++ /dev/null
@@ -1,176 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.api.http.servlet;
-
-import static org.mockito.Matchers.anyString;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.spy;
-import static org.mockito.Mockito.when;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.InputStreamReader;
-import java.io.PrintWriter;
-import java.util.HashMap;
-import java.util.Map;
-
-import javax.servlet.ServletConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import junit.extensions.PA;
-import junit.framework.Assert;
-
-import org.json.JSONArray;
-import org.json.JSONObject;
-import org.json.JSONTokener;
-import org.junit.Test;
-
-import edu.uci.ics.asterix.feeds.CentralFeedManager;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.JSONDeserializerForTypes;
-import edu.uci.ics.asterix.test.runtime.ExecutionTest;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
-import edu.uci.ics.hyracks.api.comm.NetworkAddress;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-
-@SuppressWarnings("deprecation")
-public class ConnectorAPIServletTest {
-
-    @Test
-    public void testGet() throws Exception {
-        // Starts test asterixdb cluster.
-        ExecutionTest.setUp();
-
-        // Configures a test connector api servlet.
-        ConnectorAPIServlet servlet = spy(new ConnectorAPIServlet());
-        ServletConfig mockServletConfig = mock(ServletConfig.class);
-        servlet.init(mockServletConfig);
-        Map<String, NodeControllerInfo> nodeMap = new HashMap<String, NodeControllerInfo>();
-        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
-        PrintWriter outputWriter = new PrintWriter(outputStream);
-
-        // Creates mocks.
-        ServletContext mockContext = mock(ServletContext.class);
-        IHyracksClientConnection mockHcc = mock(IHyracksClientConnection.class);
-        NodeControllerInfo mockInfo1 = mock(NodeControllerInfo.class);
-        NodeControllerInfo mockInfo2 = mock(NodeControllerInfo.class);
-        HttpServletRequest mockRequest = mock(HttpServletRequest.class);
-        HttpServletResponse mockResponse = mock(HttpServletResponse.class);
-
-        // Sets up mock returns.
-        when(servlet.getServletContext()).thenReturn(mockContext);
-        when(mockRequest.getParameter("dataverseName")).thenReturn("Metadata");
-        when(mockRequest.getParameter("datasetName")).thenReturn("Dataset");
-        when(mockResponse.getWriter()).thenReturn(outputWriter);
-        when(mockContext.getAttribute(anyString())).thenReturn(mockHcc);
-        when(mockHcc.getNodeControllerInfos()).thenReturn(nodeMap);
-        when(mockInfo1.getNetworkAddress()).thenReturn(new NetworkAddress("127.0.0.1", 3099));
-        when(mockInfo2.getNetworkAddress()).thenReturn(new NetworkAddress("127.0.0.2", 3099));
-
-        // Calls ConnectorAPIServlet.formResponseObject.
-        nodeMap.put("nc1", mockInfo1);
-        nodeMap.put("nc2", mockInfo2);
-        servlet.doGet(mockRequest, mockResponse);
-
-        // Constructs the actual response.
-        JSONTokener tokener = new JSONTokener(new InputStreamReader(
-                new ByteArrayInputStream(outputStream.toByteArray())));
-        JSONObject actualResponse = new JSONObject(tokener);
-
-        // Checks the data type of the dataset.
-        String primaryKey = actualResponse.getString("keys");
-        Assert.assertEquals("DataverseName,DatasetName", primaryKey);
-        ARecordType recordType = (ARecordType) JSONDeserializerForTypes.convertFromJSON((JSONObject) actualResponse
-                .get("type"));
-        Assert.assertEquals(getMetadataRecordType("Metadata", "Dataset"), recordType);
-
-        // Checks the correctness of results.
-        JSONArray splits = actualResponse.getJSONArray("splits");
-        String path = ((JSONObject) splits.get(0)).getString("path");
-        Assert.assertTrue(path.endsWith("Metadata/Dataset_idx_Dataset"));
-
-        // Tears down the asterixdb cluster.
-        ExecutionTest.tearDown();
-    }
-
-    @Test
-    public void testFormResponseObject() throws Exception {
-        ConnectorAPIServlet servlet = new ConnectorAPIServlet();
-        JSONObject actualResponse = new JSONObject();
-        FileSplit[] splits = new FileSplit[2];
-        splits[0] = new FileSplit("nc1", "foo1");
-        splits[1] = new FileSplit("nc2", "foo2");
-        Map<String, NodeControllerInfo> nodeMap = new HashMap<String, NodeControllerInfo>();
-        NodeControllerInfo mockInfo1 = mock(NodeControllerInfo.class);
-        NodeControllerInfo mockInfo2 = mock(NodeControllerInfo.class);
-
-        // Sets up mock returns.
-        when(mockInfo1.getNetworkAddress()).thenReturn(new NetworkAddress("127.0.0.1", 3099));
-        when(mockInfo2.getNetworkAddress()).thenReturn(new NetworkAddress("127.0.0.2", 3099));
-
-        String[] fieldNames = new String[] { "a1", "a2" };
-        IAType[] fieldTypes = new IAType[] { BuiltinType.ABOOLEAN, BuiltinType.ADAYTIMEDURATION };
-        ARecordType recordType = new ARecordType("record", fieldNames, fieldTypes, true);
-        String primaryKey = "a1";
-
-        // Calls ConnectorAPIServlet.formResponseObject.
-        nodeMap.put("nc1", mockInfo1);
-        nodeMap.put("nc2", mockInfo2);
-        PA.invokeMethod(servlet,
-                "formResponseObject(org.json.JSONObject, edu.uci.ics.hyracks.dataflow.std.file.FileSplit[], "
-                        + "edu.uci.ics.asterix.om.types.ARecordType, java.lang.String, java.util.Map)", actualResponse,
-                splits, recordType, primaryKey, nodeMap);
-
-        // Constructs expected response.
-        JSONObject expectedResponse = new JSONObject();
-        expectedResponse.put("keys", primaryKey);
-        expectedResponse.put("type", recordType.toJSON());
-        JSONArray splitsArray = new JSONArray();
-        JSONObject element1 = new JSONObject();
-        element1.put("ip", "127.0.0.1");
-        element1.put("path", splits[0].getLocalFile().getFile().getAbsolutePath());
-        JSONObject element2 = new JSONObject();
-        element2.put("ip", "127.0.0.2");
-        element2.put("path", splits[1].getLocalFile().getFile().getAbsolutePath());
-        splitsArray.put(element1);
-        splitsArray.put(element2);
-        expectedResponse.put("splits", splitsArray);
-
-        // Checks results.
-        Assert.assertEquals(actualResponse.toString(), expectedResponse.toString());
-    }
-
-    private ARecordType getMetadataRecordType(String dataverseName, String datasetName) throws Exception {
-        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-        // Retrieves file splits of the dataset.
-        AqlMetadataProvider metadataProvider = new AqlMetadataProvider(null, CentralFeedManager.getInstance());
-        metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
-        ARecordType recordType = (ARecordType) metadataProvider.findType(dataverseName, dataset.getItemTypeName());
-        // Metadata transaction commits.
-        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-        return recordType;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/test/java/edu/uci/ics/asterix/test/aql/AQLTestCase.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/aql/AQLTestCase.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/aql/AQLTestCase.java
deleted file mode 100644
index da5068a..0000000
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/aql/AQLTestCase.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.test.aql;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.InputStreamReader;
-import java.io.PrintWriter;
-import java.io.Reader;
-import java.io.StringWriter;
-import java.io.UnsupportedEncodingException;
-import java.util.List;
-
-import junit.framework.TestCase;
-
-import org.junit.Test;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.parser.AQLParser;
-import edu.uci.ics.asterix.aql.parser.ParseException;
-import edu.uci.ics.asterix.common.config.GlobalConfig;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-
-public class AQLTestCase extends TestCase {
-
-    private final File queryFile;
-
-    AQLTestCase(File queryFile) {
-        super("testAQL");
-        this.queryFile = queryFile;
-    }
-
-    @Test
-    public void testAQL() throws UnsupportedEncodingException, FileNotFoundException, ParseException, AsterixException,
-            AlgebricksException {
-        Reader fis = new BufferedReader(new InputStreamReader(new FileInputStream(queryFile), "UTF-8"));
-        AQLParser parser = new AQLParser(fis);
-        List<Statement> statements;
-        GlobalConfig.ASTERIX_LOGGER.info(queryFile.toString());
-        try {
-            statements = parser.parse();
-        } catch (ParseException e) {
-            GlobalConfig.ASTERIX_LOGGER.warning("Failed while testing file " + fis);
-            StringWriter sw = new StringWriter();
-            PrintWriter writer = new PrintWriter(sw);
-            e.printStackTrace(writer);
-            GlobalConfig.ASTERIX_LOGGER.warning(sw.toString());
-            throw new ParseException("Parsing " + queryFile.toString());
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/test/java/edu/uci/ics/asterix/test/aql/AQLTestSuite.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/aql/AQLTestSuite.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/aql/AQLTestSuite.java
deleted file mode 100644
index c536113..0000000
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/aql/AQLTestSuite.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.test.aql;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.UnsupportedEncodingException;
-
-import junit.framework.Test;
-import junit.framework.TestSuite;
-
-import org.apache.commons.lang3.StringUtils;
-
-import edu.uci.ics.asterix.aql.parser.ParseException;
-
-public class AQLTestSuite extends TestSuite {
-    private static String AQLTS_PATH = StringUtils.join(new String[] { "src", "test", "resources", "AQLTS",
-            "queries" + File.separator }, File.separator);
-    private static String AQLTS_SQL_LIKE_PATH = StringUtils.join(new String[] { "src", "test", "resources", "AQLTS",
-            "queries-sql-like" + File.separator }, File.separator);
-
-    public static Test suite() throws ParseException, UnsupportedEncodingException, FileNotFoundException {
-        File testData = new File(AQLTS_PATH);
-        File[] queries = testData.listFiles();
-        TestSuite testSuite = new TestSuite();
-        for (File file : queries) {
-            if (file.isFile()) {
-                testSuite.addTest(new AQLTestCase(file));
-            }
-        }
-        testData = new File(AQLTS_SQL_LIKE_PATH);
-        queries = testData.listFiles();
-        for (File file : queries) {
-            if (file.isFile()) {
-                testSuite.addTest(new AQLTestCase(file));
-            }
-        }
-
-        return testSuite;
-
-    }
-
-    public static void main(String args[]) throws Throwable {
-        junit.textui.TestRunner.run(AQLTestSuite.suite());
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/test/java/edu/uci/ics/asterix/test/common/TestHelper.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/common/TestHelper.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/common/TestHelper.java
deleted file mode 100644
index 827b40b..0000000
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/common/TestHelper.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.test.common;
-
-import java.util.List;
-
-public final class TestHelper {
-
-    public static boolean isInPrefixList(List<String> prefixList, String s) {
-        for (String s2 : prefixList) {
-            if (s.startsWith(s2)) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/test/java/edu/uci/ics/asterix/test/dml/DmlTest.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/dml/DmlTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/dml/DmlTest.java
deleted file mode 100644
index 9222081..0000000
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/dml/DmlTest.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.test.dml;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.InputStreamReader;
-import java.io.PrintWriter;
-import java.io.Reader;
-
-import org.junit.Test;
-
-import edu.uci.ics.asterix.api.common.AsterixHyracksIntegrationUtil;
-import edu.uci.ics.asterix.api.java.AsterixJavaClient;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.test.aql.TestsUtils;
-import edu.uci.ics.asterix.test.base.AsterixTestHelper;
-
-public class DmlTest {
-
-    private static final String[] ASTERIX_DATA_DIRS = new String[] { "nc1data", "nc2data" };
-    private static final String PATH_ACTUAL = "dmltest" + File.separator;
-    private static final String SEPARATOR = File.separator;
-    private static final String PATH_BASE = "src" + SEPARATOR + "test" + SEPARATOR + "resources" + SEPARATOR + "dmlts"
-            + SEPARATOR;
-    private static final String PATH_EXPECTED = PATH_BASE + "results" + SEPARATOR;
-    private static final String PATH_SCRIPTS = PATH_BASE + "scripts" + SEPARATOR;
-    private static final String LOAD_FOR_ENLIST_FILE = PATH_SCRIPTS + "load-cust.aql";
-    private static final String ENLIST_FILE = PATH_SCRIPTS + "enlist-scan-cust.aql";
-
-    private static final PrintWriter ERR = new PrintWriter(System.err);
-
-    @Test
-    public void enlistTest() throws Exception {
-        File outdir = new File(PATH_ACTUAL);
-        if (outdir.exists()) {
-            AsterixTestHelper.deleteRec(outdir);
-        }
-        outdir.mkdirs();
-
-        AsterixHyracksIntegrationUtil.init();
-        Reader loadReader = new BufferedReader(
-                new InputStreamReader(new FileInputStream(LOAD_FOR_ENLIST_FILE), "UTF-8"));
-        AsterixJavaClient asterixLoad = new AsterixJavaClient(
-                AsterixHyracksIntegrationUtil.getHyracksClientConnection(), loadReader, ERR);
-        try {
-            asterixLoad.compile(true, false, false, false, false, true, false);
-        } catch (AsterixException e) {
-            throw new Exception("Compile ERROR for " + LOAD_FOR_ENLIST_FILE + ": " + e.getMessage(), e);
-        } finally {
-            loadReader.close();
-        }
-        asterixLoad.execute();
-        File enlistFile = new File(ENLIST_FILE);
-        int dot = enlistFile.getName().lastIndexOf('.');
-        String resultFileName = enlistFile.getName().substring(0, dot + 1) + ".adm";
-        File expectedFile = new File(PATH_EXPECTED + SEPARATOR + resultFileName);
-        File actualFile = new File(PATH_ACTUAL + SEPARATOR + resultFileName);
-        // Khurram
-        //TestsUtils.runScriptAndCompareWithResult(AsterixHyracksIntegrationUtil.getHyracksClientConnection(),
-                //enlistFile, ERR, expectedFile, actualFile);
-
-        AsterixHyracksIntegrationUtil.deinit();
-        for (String d : ASTERIX_DATA_DIRS) {
-            TestsUtils.deleteRec(new File(d));
-        }
-        outdir.delete();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
deleted file mode 100644
index 069c5aa..0000000
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.test.metadata;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collection;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
-
-import edu.uci.ics.asterix.api.common.AsterixHyracksIntegrationUtil;
-import edu.uci.ics.asterix.common.config.AsterixPropertiesAccessor;
-import edu.uci.ics.asterix.common.config.AsterixTransactionProperties;
-import edu.uci.ics.asterix.common.config.GlobalConfig;
-import edu.uci.ics.asterix.test.aql.TestsUtils;
-import edu.uci.ics.asterix.testframework.context.TestCaseContext;
-
-/**
- * Executes the Metadata tests.
- */
-@RunWith(Parameterized.class)
-public class MetadataTest {
-
-    private TestCaseContext tcCtx;
-
-    private static final String PATH_ACTUAL = "mdtest" + File.separator;
-    private static final String PATH_BASE = StringUtils.join(new String[] { "src", "test", "resources",
-            "metadata" + File.separator }, File.separator);
-    private static final String TEST_CONFIG_FILE_NAME = "asterix-build-configuration.xml";
-
-    private static AsterixTransactionProperties txnProperties;
-
-    @BeforeClass
-    public static void setUp() throws Exception {
-        System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, TEST_CONFIG_FILE_NAME);
-        File outdir = new File(PATH_ACTUAL);
-        outdir.mkdirs();
-
-        AsterixPropertiesAccessor apa = new AsterixPropertiesAccessor();
-        txnProperties = new AsterixTransactionProperties(apa);
-
-        deleteTransactionLogs();
-
-        AsterixHyracksIntegrationUtil.init();
-    }
-
-    @AfterClass
-    public static void tearDown() throws Exception {
-        AsterixHyracksIntegrationUtil.deinit();
-        File outdir = new File(PATH_ACTUAL);
-        File[] files = outdir.listFiles();
-        if (files == null || files.length == 0) {
-            outdir.delete();
-        }
-
-        // clean up the files written by the ASTERIX storage manager
-        for (String d : AsterixHyracksIntegrationUtil.getDataDirs()) {
-            TestsUtils.deleteRec(new File(d));
-        }
-    }
-
-    private static void deleteTransactionLogs() throws Exception {
-        for (String ncId : AsterixHyracksIntegrationUtil.getNcNames()) {
-            File log = new File(txnProperties.getLogDirectory(ncId));
-            if (log.exists()) {
-                FileUtils.deleteDirectory(log);
-            }
-        }
-    }
-
-    @Parameters
-    public static Collection<Object[]> tests() throws Exception {
-        Collection<Object[]> testArgs = new ArrayList<Object[]>();
-        TestCaseContext.Builder b = new TestCaseContext.Builder();
-        for (TestCaseContext ctx : b.build(new File(PATH_BASE))) {
-            testArgs.add(new Object[] { ctx });
-        }
-        return testArgs;
-    }
-
-    public MetadataTest(TestCaseContext tcCtx) {
-        this.tcCtx = tcCtx;
-    }
-
-    @Test
-    public void test() throws Exception {
-        TestsUtils.executeTest(PATH_ACTUAL, tcCtx, null, false);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/test/java/edu/uci/ics/asterix/test/optimizer/OptimizerTest.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/optimizer/OptimizerTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/optimizer/OptimizerTest.java
deleted file mode 100644
index 58531aa..0000000
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/optimizer/OptimizerTest.java
+++ /dev/null
@@ -1,220 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.test.optimizer;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.InputStreamReader;
-import java.io.PrintWriter;
-import java.io.Reader;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.logging.Logger;
-
-import org.apache.commons.io.FileUtils;
-import org.junit.AfterClass;
-import org.junit.Assume;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.internal.AssumptionViolatedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
-
-import edu.uci.ics.asterix.api.common.AsterixHyracksIntegrationUtil;
-import edu.uci.ics.asterix.api.java.AsterixJavaClient;
-import edu.uci.ics.asterix.common.config.AsterixPropertiesAccessor;
-import edu.uci.ics.asterix.common.config.AsterixTransactionProperties;
-import edu.uci.ics.asterix.common.config.GlobalConfig;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.external.dataset.adapter.FileSystemBasedAdapter;
-import edu.uci.ics.asterix.external.util.IdentitiyResolverFactory;
-import edu.uci.ics.asterix.test.base.AsterixTestHelper;
-import edu.uci.ics.asterix.test.common.TestHelper;
-
-@RunWith(Parameterized.class)
-public class OptimizerTest {
-
-    private static final Logger LOGGER = Logger.getLogger(OptimizerTest.class.getName());
-
-    private static final String SEPARATOR = File.separator;
-    private static final String EXTENSION_QUERY = "aql";
-    private static final String EXTENSION_RESULT = "plan";
-    private static final String FILENAME_IGNORE = "ignore.txt";
-    private static final String FILENAME_ONLY = "only.txt";
-    private static final String PATH_BASE = "src" + SEPARATOR + "test" + SEPARATOR + "resources" + SEPARATOR
-            + "optimizerts" + SEPARATOR;
-    private static final String PATH_QUERIES = PATH_BASE + "queries" + SEPARATOR;
-    private static final String PATH_EXPECTED = PATH_BASE + "results" + SEPARATOR;
-    private static final String PATH_ACTUAL = "opttest" + SEPARATOR;
-
-    private static final ArrayList<String> ignore = AsterixTestHelper.readFile(FILENAME_IGNORE, PATH_BASE);
-    private static final ArrayList<String> only = AsterixTestHelper.readFile(FILENAME_ONLY, PATH_BASE);
-    private static final String TEST_CONFIG_FILE_NAME = "asterix-build-configuration.xml";
-
-    private static AsterixTransactionProperties txnProperties;
-
-    @BeforeClass
-    public static void setUp() throws Exception {
-        // File outdir = new File(PATH_ACTUAL);
-        // outdir.mkdirs();
-
-        System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, TEST_CONFIG_FILE_NAME);
-        File outdir = new File(PATH_ACTUAL);
-        outdir.mkdirs();
-
-        AsterixPropertiesAccessor apa = new AsterixPropertiesAccessor();
-        txnProperties = new AsterixTransactionProperties(apa);
-
-        deleteTransactionLogs();
-
-        AsterixHyracksIntegrationUtil.init();
-        // Set the node resolver to be the identity resolver that expects node names
-        // to be node controller ids; a valid assumption in test environment.
-        System.setProperty(FileSystemBasedAdapter.NODE_RESOLVER_FACTORY_PROPERTY,
-                IdentitiyResolverFactory.class.getName());
-    }
-
-    private static void deleteTransactionLogs() throws Exception {
-        for (String ncId : AsterixHyracksIntegrationUtil.getNcNames()) {
-            File log = new File(txnProperties.getLogDirectory(ncId));
-            if (log.exists()) {
-                FileUtils.deleteDirectory(log);
-            }
-        }
-    }
-
-    @AfterClass
-    public static void tearDown() throws Exception {
-        // _bootstrap.stop();
-        File outdir = new File(PATH_ACTUAL);
-        File[] files = outdir.listFiles();
-        if (files == null || files.length == 0) {
-            outdir.delete();
-        }
-        AsterixHyracksIntegrationUtil.deinit();
-    }
-
-    private static void suiteBuild(File dir, Collection<Object[]> testArgs, String path) {
-        for (File file : dir.listFiles()) {
-            if (file.isDirectory() && !file.getName().startsWith(".")) {
-                suiteBuild(file, testArgs, path + file.getName() + SEPARATOR);
-            }
-            if (file.isFile() && file.getName().endsWith(EXTENSION_QUERY)
-            // && !ignore.contains(path + file.getName())
-            ) {
-                String resultFileName = AsterixTestHelper.extToResExt(file.getName(), EXTENSION_RESULT);
-                File expectedFile = new File(PATH_EXPECTED + path + resultFileName);
-                File actualFile = new File(PATH_ACTUAL + SEPARATOR + path.replace(SEPARATOR, "_") + resultFileName);
-                testArgs.add(new Object[] { file, expectedFile, actualFile });
-            }
-        }
-    }
-
-    @Parameters
-    public static Collection<Object[]> tests() {
-        Collection<Object[]> testArgs = new ArrayList<Object[]>();
-        suiteBuild(new File(PATH_QUERIES), testArgs, "");
-        return testArgs;
-    }
-
-    private File actualFile;
-    private File expectedFile;
-    private File queryFile;
-
-    public OptimizerTest(File queryFile, File expectedFile, File actualFile) {
-        this.queryFile = queryFile;
-        this.expectedFile = expectedFile;
-        this.actualFile = actualFile;
-    }
-
-    @Test
-    public void test() throws Exception {
-        try {
-            String queryFileShort = queryFile.getPath().substring(PATH_QUERIES.length())
-                    .replace(SEPARATOR.charAt(0), '/');
-            if (!only.isEmpty()) {
-                boolean toRun = TestHelper.isInPrefixList(only, queryFileShort);
-                if (!toRun) {
-                    LOGGER.info("SKIP TEST: \"" + queryFile.getPath()
-                            + "\" \"only.txt\" not empty and not in \"only.txt\".");
-                }
-                Assume.assumeTrue(toRun);
-            }
-            boolean skipped = TestHelper.isInPrefixList(ignore, queryFileShort);
-            if (skipped) {
-                LOGGER.info("SKIP TEST: \"" + queryFile.getPath() + "\" in \"ignore.txt\".");
-            }
-            Assume.assumeTrue(!skipped);
-
-            LOGGER.info("RUN TEST: \"" + queryFile.getPath() + "\"");
-            Reader query = new BufferedReader(new InputStreamReader(new FileInputStream(queryFile), "UTF-8"));
-            PrintWriter plan = new PrintWriter(actualFile);
-            AsterixJavaClient asterix = new AsterixJavaClient(
-                    AsterixHyracksIntegrationUtil.getHyracksClientConnection(), query, plan);
-            try {
-                asterix.compile(true, false, false, true, true, false, false);
-            } catch (AsterixException e) {
-                plan.close();
-                query.close();
-                throw new Exception("Compile ERROR for " + queryFile + ": " + e.getMessage(), e);
-            }
-            plan.close();
-            query.close();
-
-            BufferedReader readerExpected = new BufferedReader(new InputStreamReader(new FileInputStream(expectedFile),
-                    "UTF-8"));
-            BufferedReader readerActual = new BufferedReader(new InputStreamReader(new FileInputStream(actualFile),
-                    "UTF-8"));
-
-            String lineExpected, lineActual;
-            int num = 1;
-            try {
-                while ((lineExpected = readerExpected.readLine()) != null) {
-                    lineActual = readerActual.readLine();
-                    // Assert.assertEquals(lineExpected, lineActual);
-                    if (lineActual == null) {
-                        throw new Exception("Result for " + queryFile + " changed at line " + num + ":\n< "
-                                + lineExpected + "\n> ");
-                    }
-                    if (!lineExpected.equals(lineActual)) {
-                        throw new Exception("Result for " + queryFile + " changed at line " + num + ":\n< "
-                                + lineExpected + "\n> " + lineActual);
-                    }
-                    ++num;
-                }
-                lineActual = readerActual.readLine();
-                // Assert.assertEquals(null, lineActual);
-                if (lineActual != null) {
-                    throw new Exception("Result for " + queryFile + " changed at line " + num + ":\n< \n> "
-                            + lineActual);
-                }
-                LOGGER.info("Test \"" + queryFile.getPath() + "\" PASSED!");
-                actualFile.delete();
-            } finally {
-                readerExpected.close();
-                readerActual.close();
-            }
-        } catch (Exception e) {
-            if (!(e instanceof AssumptionViolatedException)) {
-                LOGGER.severe("Test \"" + queryFile.getPath() + "\" FAILED!");
-                throw new Exception("Test \"" + queryFile.getPath() + "\" FAILED!", e);
-            } else {
-                throw e;
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
deleted file mode 100644
index 8547eb0..0000000
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.test.runtime;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
-
-import edu.uci.ics.asterix.api.common.AsterixHyracksIntegrationUtil;
-import edu.uci.ics.asterix.common.config.AsterixPropertiesAccessor;
-import edu.uci.ics.asterix.common.config.AsterixTransactionProperties;
-import edu.uci.ics.asterix.common.config.GlobalConfig;
-import edu.uci.ics.asterix.external.dataset.adapter.FileSystemBasedAdapter;
-import edu.uci.ics.asterix.external.util.IdentitiyResolverFactory;
-import edu.uci.ics.asterix.test.aql.TestsUtils;
-import edu.uci.ics.asterix.testframework.context.TestCaseContext;
-
-/**
- * Runs the runtime test cases under 'asterix-app/src/test/resources/runtimets'.
- */
-@RunWith(Parameterized.class)
-public class ExecutionTest {
-
-    protected static final Logger LOGGER = Logger.getLogger(ExecutionTest.class.getName());
-
-    protected static final String PATH_ACTUAL = "rttest" + File.separator;
-    protected static final String PATH_BASE = StringUtils.join(
-            new String[] { "src", "test", "resources", "runtimets" }, File.separator);
-
-    protected static final String TEST_CONFIG_FILE_NAME = "asterix-build-configuration.xml";
-
-    protected static AsterixTransactionProperties txnProperties;
-
-    @BeforeClass
-    public static void setUp() throws Exception {
-        System.out.println("Starting setup");
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Starting setup");
-        }
-        System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, TEST_CONFIG_FILE_NAME);
-        File outdir = new File(PATH_ACTUAL);
-        outdir.mkdirs();
-
-        AsterixPropertiesAccessor apa = new AsterixPropertiesAccessor();
-        txnProperties = new AsterixTransactionProperties(apa);
-
-        deleteTransactionLogs();
-
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("initializing pseudo cluster");
-        }
-        AsterixHyracksIntegrationUtil.init();
-
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("initializing HDFS");
-        }
-
-        HDFSCluster.getInstance().setup();
-
-        // Set the node resolver to be the identity resolver that expects node names
-        // to be node controller ids; a valid assumption in test environment.
-        System.setProperty(FileSystemBasedAdapter.NODE_RESOLVER_FACTORY_PROPERTY,
-                IdentitiyResolverFactory.class.getName());
-    }
-
-    @AfterClass
-    public static void tearDown() throws Exception {
-        AsterixHyracksIntegrationUtil.deinit();
-        File outdir = new File(PATH_ACTUAL);
-        File[] files = outdir.listFiles();
-        if (files == null || files.length == 0) {
-            outdir.delete();
-        }
-        // clean up the files written by the ASTERIX storage manager
-        for (String d : AsterixHyracksIntegrationUtil.getDataDirs()) {
-            TestsUtils.deleteRec(new File(d));
-        }
-        HDFSCluster.getInstance().cleanup();
-    }
-
-    private static void deleteTransactionLogs() throws Exception {
-        for (String ncId : AsterixHyracksIntegrationUtil.getNcNames()) {
-            File log = new File(txnProperties.getLogDirectory(ncId));
-            if (log.exists()) {
-                FileUtils.deleteDirectory(log);
-            }
-        }
-    }
-
-    @Parameters
-    public static Collection<Object[]> tests() throws Exception {
-        Collection<Object[]> testArgs = buildTestsInXml(TestCaseContext.ONLY_TESTSUITE_XML_NAME);
-        if (testArgs.size() == 0) {
-            testArgs = buildTestsInXml(TestCaseContext.DEFAULT_TESTSUITE_XML_NAME);
-        }
-        return testArgs;
-    }
-
-    protected static Collection<Object[]> buildTestsInXml(String xmlfile) throws Exception {
-        Collection<Object[]> testArgs = new ArrayList<Object[]>();
-        TestCaseContext.Builder b = new TestCaseContext.Builder();
-        for (TestCaseContext ctx : b.build(new File(PATH_BASE), xmlfile)) {
-            testArgs.add(new Object[] { ctx });
-        }
-        return testArgs;
-
-    }
-
-    protected TestCaseContext tcCtx;
-
-    public ExecutionTest(TestCaseContext tcCtx) {
-        this.tcCtx = tcCtx;
-    }
-
-    @Test
-    public void test() throws Exception {
-            TestsUtils.executeTest(PATH_ACTUAL, tcCtx, null, false);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/HDFSCluster.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/HDFSCluster.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/HDFSCluster.java
deleted file mode 100644
index 9463657..0000000
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/HDFSCluster.java
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.test.runtime;
-
-import java.io.File;
-import java.io.IOException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
-import org.apache.hadoop.mapred.InputSplit;
-import org.apache.hadoop.mapred.JobConf;
-
-import edu.uci.ics.asterix.external.dataset.adapter.HDFSAdapter;
-
-/**
- * Manages a Mini (local VM) HDFS cluster with a configured number of datanodes.
- *
- * @author ramangrover29
- */
-@SuppressWarnings("deprecation")
-public class HDFSCluster {
-
-    private static final String PATH_TO_HADOOP_CONF = "src/test/resources/hadoop/conf";
-    private static final int nameNodePort = 31888;
-    private static final String DATA_PATH = "data/hdfs";
-    private static final String HDFS_PATH = "/asterix";
-    private static final HDFSCluster INSTANCE = new HDFSCluster();
-
-    private MiniDFSCluster dfsCluster;
-    private int numDataNodes = 2;
-    private JobConf conf = new JobConf();
-    private FileSystem dfs;
-
-    public static HDFSCluster getInstance() {
-        return INSTANCE;
-    }
-
-    private HDFSCluster() {
-
-    }
-
-    /**
-     * Instantiates the (Mini) DFS Cluster with the configured number of datanodes.
-     * Post instantiation, data is laoded to HDFS.
-     * Called prior to running the Runtime test suite.
-     */
-    public void setup() throws Exception {
-        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
-        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
-        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
-        cleanupLocal();
-        //this constructor is deprecated in hadoop 2x 
-        //dfsCluster = new MiniDFSCluster(nameNodePort, conf, numDataNodes, true, true, StartupOption.REGULAR, null);
-        MiniDFSCluster.Builder build = new MiniDFSCluster.Builder(conf);
-        build.nameNodePort(nameNodePort);
-        build.numDataNodes(numDataNodes);
-        build.startupOption(StartupOption.REGULAR);
-        dfsCluster = build.build();
-        dfs = FileSystem.get(conf);
-        loadData();
-    }
-
-    private void loadData() throws IOException {
-        Path destDir = new Path(HDFS_PATH);
-        dfs.mkdirs(destDir);
-        File srcDir = new File(DATA_PATH);
-        File[] listOfFiles = srcDir.listFiles();
-        for (File srcFile : listOfFiles) {
-            Path path = new Path(srcFile.getAbsolutePath());
-            dfs.copyFromLocalFile(path, destDir);
-        }
-    }
-
-    private void cleanupLocal() throws IOException {
-        // cleanup artifacts created on the local file system
-        FileSystem lfs = FileSystem.getLocal(new Configuration());
-        lfs.delete(new Path("build"), true);
-        System.setProperty("hadoop.log.dir", "logs");
-    }
-
-    public void cleanup() throws Exception {
-        if (dfsCluster != null) {
-            dfsCluster.shutdown();
-            cleanupLocal();
-        }
-    }
-
-    public static void main(String[] args) throws Exception {
-        HDFSCluster cluster = new HDFSCluster();
-        cluster.setup();
-        JobConf conf = configureJobConf();
-        FileSystem fs = FileSystem.get(conf);
-        InputSplit[] inputSplits = conf.getInputFormat().getSplits(conf, 0);
-        for (InputSplit split : inputSplits) {
-            System.out.println("split :" + split);
-        }
-        //   cluster.cleanup();
-    }
-
-    private static JobConf configureJobConf() throws Exception {
-        JobConf conf = new JobConf();
-        String hdfsUrl = "hdfs://127.0.0.1:31888";
-        String hdfsPath = "/asterix/extrasmalltweets.txt";
-        conf.set("fs.default.name", hdfsUrl);
-        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
-        conf.setClassLoader(HDFSAdapter.class.getClassLoader());
-        conf.set("mapred.input.dir", hdfsPath);
-        conf.set("mapred.input.format.class", "org.apache.hadoop.mapred.TextInputFormat");
-        return conf;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/RepeatedTest.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/RepeatedTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/RepeatedTest.java
deleted file mode 100644
index faeb688..0000000
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/RepeatedTest.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.test.runtime;
-
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.annotation.Target;
-import java.util.Collection;
-
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.MethodRule;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
-import org.junit.runners.model.FrameworkMethod;
-import org.junit.runners.model.Statement;
-
-import edu.uci.ics.asterix.test.aql.TestsUtils;
-import edu.uci.ics.asterix.test.runtime.RepeatRule.Repeat;
-import edu.uci.ics.asterix.testframework.context.TestCaseContext;
-
-/**
- * Runs runtime test cases that have been identified in the repeatedtestsuite.xml.
- * 
- * Each test is run 10000 times.
- */
-class RepeatRule implements MethodRule {
-
-    @Retention(RetentionPolicy.RUNTIME)
-    @Target({ java.lang.annotation.ElementType.METHOD })
-    public @interface Repeat {
-        public abstract int times();
-
-    }
-
-    private static class RepeatStatement extends Statement {
-
-        private final int times;
-        private final Statement statement;
-
-        private RepeatStatement(int times, Statement statement) {
-            this.times = times;
-            this.statement = statement;
-        }
-
-        @Override
-        public void evaluate() throws Throwable {
-            for (int i = 0; i < times; i++) {
-                statement.evaluate();
-            }
-        }
-    }
-
-    @Override
-    public Statement apply(Statement statement, FrameworkMethod method, Object target) {
-        Statement result = statement;
-        Repeat repeat = method.getAnnotation(Repeat.class);
-        if (repeat != null) {
-            int times = repeat.times();
-            result = new RepeatStatement(times, statement);
-        }
-        return result;
-
-    }
-}
-
-@RunWith(Parameterized.class)
-public class RepeatedTest extends ExecutionTest {
-
-    private int count;
-    
-    @Parameters
-    public static Collection<Object[]> tests() throws Exception {
-        Collection<Object[]> testArgs = buildTestsInXml(TestCaseContext.DEFAULT_REPEADED_TESTSUITE_XML_NAME);
-        return testArgs;
-    }
-
-    public RepeatedTest(TestCaseContext tcCtx) {
-        super(tcCtx);
-        count = 0;
-    }
-
-    @Rule
-    public RepeatRule repeatRule = new RepeatRule();
-
-    @Test
-    @Repeat(times = 10000)
-    public void test() throws Exception {
-        System.err.println("***** Test Count: " + (++count) + " ******");
-        TestsUtils.executeTest(PATH_ACTUAL, tcCtx, null, false);
-    }
-}


[03/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java b/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java
new file mode 100644
index 0000000..7f5f480
--- /dev/null
+++ b/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java
@@ -0,0 +1,176 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.api.http.servlet;
+
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.InputStreamReader;
+import java.io.PrintWriter;
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.servlet.ServletConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import junit.extensions.PA;
+import junit.framework.Assert;
+
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.json.JSONTokener;
+import org.junit.Test;
+
+import edu.uci.ics.asterix.feeds.CentralFeedManager;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.JSONDeserializerForTypes;
+import edu.uci.ics.asterix.test.runtime.ExecutionTest;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
+import edu.uci.ics.hyracks.api.comm.NetworkAddress;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+
+@SuppressWarnings("deprecation")
+public class ConnectorAPIServletTest {
+
+    @Test
+    public void testGet() throws Exception {
+        // Starts test asterixdb cluster.
+        ExecutionTest.setUp();
+
+        // Configures a test connector api servlet.
+        ConnectorAPIServlet servlet = spy(new ConnectorAPIServlet());
+        ServletConfig mockServletConfig = mock(ServletConfig.class);
+        servlet.init(mockServletConfig);
+        Map<String, NodeControllerInfo> nodeMap = new HashMap<String, NodeControllerInfo>();
+        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+        PrintWriter outputWriter = new PrintWriter(outputStream);
+
+        // Creates mocks.
+        ServletContext mockContext = mock(ServletContext.class);
+        IHyracksClientConnection mockHcc = mock(IHyracksClientConnection.class);
+        NodeControllerInfo mockInfo1 = mock(NodeControllerInfo.class);
+        NodeControllerInfo mockInfo2 = mock(NodeControllerInfo.class);
+        HttpServletRequest mockRequest = mock(HttpServletRequest.class);
+        HttpServletResponse mockResponse = mock(HttpServletResponse.class);
+
+        // Sets up mock returns.
+        when(servlet.getServletContext()).thenReturn(mockContext);
+        when(mockRequest.getParameter("dataverseName")).thenReturn("Metadata");
+        when(mockRequest.getParameter("datasetName")).thenReturn("Dataset");
+        when(mockResponse.getWriter()).thenReturn(outputWriter);
+        when(mockContext.getAttribute(anyString())).thenReturn(mockHcc);
+        when(mockHcc.getNodeControllerInfos()).thenReturn(nodeMap);
+        when(mockInfo1.getNetworkAddress()).thenReturn(new NetworkAddress("127.0.0.1", 3099));
+        when(mockInfo2.getNetworkAddress()).thenReturn(new NetworkAddress("127.0.0.2", 3099));
+
+        // Calls ConnectorAPIServlet.formResponseObject.
+        nodeMap.put("nc1", mockInfo1);
+        nodeMap.put("nc2", mockInfo2);
+        servlet.doGet(mockRequest, mockResponse);
+
+        // Constructs the actual response.
+        JSONTokener tokener = new JSONTokener(new InputStreamReader(
+                new ByteArrayInputStream(outputStream.toByteArray())));
+        JSONObject actualResponse = new JSONObject(tokener);
+
+        // Checks the data type of the dataset.
+        String primaryKey = actualResponse.getString("keys");
+        Assert.assertEquals("DataverseName,DatasetName", primaryKey);
+        ARecordType recordType = (ARecordType) JSONDeserializerForTypes.convertFromJSON((JSONObject) actualResponse
+                .get("type"));
+        Assert.assertEquals(getMetadataRecordType("Metadata", "Dataset"), recordType);
+
+        // Checks the correctness of results.
+        JSONArray splits = actualResponse.getJSONArray("splits");
+        String path = ((JSONObject) splits.get(0)).getString("path");
+        Assert.assertTrue(path.endsWith("Metadata/Dataset_idx_Dataset"));
+
+        // Tears down the asterixdb cluster.
+        ExecutionTest.tearDown();
+    }
+
+    @Test
+    public void testFormResponseObject() throws Exception {
+        ConnectorAPIServlet servlet = new ConnectorAPIServlet();
+        JSONObject actualResponse = new JSONObject();
+        FileSplit[] splits = new FileSplit[2];
+        splits[0] = new FileSplit("nc1", "foo1");
+        splits[1] = new FileSplit("nc2", "foo2");
+        Map<String, NodeControllerInfo> nodeMap = new HashMap<String, NodeControllerInfo>();
+        NodeControllerInfo mockInfo1 = mock(NodeControllerInfo.class);
+        NodeControllerInfo mockInfo2 = mock(NodeControllerInfo.class);
+
+        // Sets up mock returns.
+        when(mockInfo1.getNetworkAddress()).thenReturn(new NetworkAddress("127.0.0.1", 3099));
+        when(mockInfo2.getNetworkAddress()).thenReturn(new NetworkAddress("127.0.0.2", 3099));
+
+        String[] fieldNames = new String[] { "a1", "a2" };
+        IAType[] fieldTypes = new IAType[] { BuiltinType.ABOOLEAN, BuiltinType.ADAYTIMEDURATION };
+        ARecordType recordType = new ARecordType("record", fieldNames, fieldTypes, true);
+        String primaryKey = "a1";
+
+        // Calls ConnectorAPIServlet.formResponseObject.
+        nodeMap.put("nc1", mockInfo1);
+        nodeMap.put("nc2", mockInfo2);
+        PA.invokeMethod(servlet,
+                "formResponseObject(org.json.JSONObject, edu.uci.ics.hyracks.dataflow.std.file.FileSplit[], "
+                        + "edu.uci.ics.asterix.om.types.ARecordType, java.lang.String, java.util.Map)", actualResponse,
+                splits, recordType, primaryKey, nodeMap);
+
+        // Constructs expected response.
+        JSONObject expectedResponse = new JSONObject();
+        expectedResponse.put("keys", primaryKey);
+        expectedResponse.put("type", recordType.toJSON());
+        JSONArray splitsArray = new JSONArray();
+        JSONObject element1 = new JSONObject();
+        element1.put("ip", "127.0.0.1");
+        element1.put("path", splits[0].getLocalFile().getFile().getAbsolutePath());
+        JSONObject element2 = new JSONObject();
+        element2.put("ip", "127.0.0.2");
+        element2.put("path", splits[1].getLocalFile().getFile().getAbsolutePath());
+        splitsArray.put(element1);
+        splitsArray.put(element2);
+        expectedResponse.put("splits", splitsArray);
+
+        // Checks results.
+        Assert.assertEquals(actualResponse.toString(), expectedResponse.toString());
+    }
+
+    private ARecordType getMetadataRecordType(String dataverseName, String datasetName) throws Exception {
+        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+        // Retrieves file splits of the dataset.
+        AqlMetadataProvider metadataProvider = new AqlMetadataProvider(null, CentralFeedManager.getInstance());
+        metadataProvider.setMetadataTxnContext(mdTxnCtx);
+        Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
+        ARecordType recordType = (ARecordType) metadataProvider.findType(dataverseName, dataset.getItemTypeName());
+        // Metadata transaction commits.
+        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+        return recordType;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/test/java/org/apache/asterix/test/aql/AQLTestCase.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/org/apache/asterix/test/aql/AQLTestCase.java b/asterix-app/src/test/java/org/apache/asterix/test/aql/AQLTestCase.java
new file mode 100644
index 0000000..da5068a
--- /dev/null
+++ b/asterix-app/src/test/java/org/apache/asterix/test/aql/AQLTestCase.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.test.aql;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.InputStreamReader;
+import java.io.PrintWriter;
+import java.io.Reader;
+import java.io.StringWriter;
+import java.io.UnsupportedEncodingException;
+import java.util.List;
+
+import junit.framework.TestCase;
+
+import org.junit.Test;
+
+import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.parser.AQLParser;
+import edu.uci.ics.asterix.aql.parser.ParseException;
+import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+
+public class AQLTestCase extends TestCase {
+
+    private final File queryFile;
+
+    AQLTestCase(File queryFile) {
+        super("testAQL");
+        this.queryFile = queryFile;
+    }
+
+    @Test
+    public void testAQL() throws UnsupportedEncodingException, FileNotFoundException, ParseException, AsterixException,
+            AlgebricksException {
+        Reader fis = new BufferedReader(new InputStreamReader(new FileInputStream(queryFile), "UTF-8"));
+        AQLParser parser = new AQLParser(fis);
+        List<Statement> statements;
+        GlobalConfig.ASTERIX_LOGGER.info(queryFile.toString());
+        try {
+            statements = parser.parse();
+        } catch (ParseException e) {
+            GlobalConfig.ASTERIX_LOGGER.warning("Failed while testing file " + fis);
+            StringWriter sw = new StringWriter();
+            PrintWriter writer = new PrintWriter(sw);
+            e.printStackTrace(writer);
+            GlobalConfig.ASTERIX_LOGGER.warning(sw.toString());
+            throw new ParseException("Parsing " + queryFile.toString());
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/test/java/org/apache/asterix/test/aql/AQLTestSuite.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/org/apache/asterix/test/aql/AQLTestSuite.java b/asterix-app/src/test/java/org/apache/asterix/test/aql/AQLTestSuite.java
new file mode 100644
index 0000000..c536113
--- /dev/null
+++ b/asterix-app/src/test/java/org/apache/asterix/test/aql/AQLTestSuite.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.test.aql;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.UnsupportedEncodingException;
+
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+import org.apache.commons.lang3.StringUtils;
+
+import edu.uci.ics.asterix.aql.parser.ParseException;
+
+public class AQLTestSuite extends TestSuite {
+    private static String AQLTS_PATH = StringUtils.join(new String[] { "src", "test", "resources", "AQLTS",
+            "queries" + File.separator }, File.separator);
+    private static String AQLTS_SQL_LIKE_PATH = StringUtils.join(new String[] { "src", "test", "resources", "AQLTS",
+            "queries-sql-like" + File.separator }, File.separator);
+
+    public static Test suite() throws ParseException, UnsupportedEncodingException, FileNotFoundException {
+        File testData = new File(AQLTS_PATH);
+        File[] queries = testData.listFiles();
+        TestSuite testSuite = new TestSuite();
+        for (File file : queries) {
+            if (file.isFile()) {
+                testSuite.addTest(new AQLTestCase(file));
+            }
+        }
+        testData = new File(AQLTS_SQL_LIKE_PATH);
+        queries = testData.listFiles();
+        for (File file : queries) {
+            if (file.isFile()) {
+                testSuite.addTest(new AQLTestCase(file));
+            }
+        }
+
+        return testSuite;
+
+    }
+
+    public static void main(String args[]) throws Throwable {
+        junit.textui.TestRunner.run(AQLTestSuite.suite());
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/test/java/org/apache/asterix/test/common/TestHelper.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/org/apache/asterix/test/common/TestHelper.java b/asterix-app/src/test/java/org/apache/asterix/test/common/TestHelper.java
new file mode 100644
index 0000000..827b40b
--- /dev/null
+++ b/asterix-app/src/test/java/org/apache/asterix/test/common/TestHelper.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.test.common;
+
+import java.util.List;
+
+public final class TestHelper {
+
+    public static boolean isInPrefixList(List<String> prefixList, String s) {
+        for (String s2 : prefixList) {
+            if (s.startsWith(s2)) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/test/java/org/apache/asterix/test/dml/DmlTest.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/org/apache/asterix/test/dml/DmlTest.java b/asterix-app/src/test/java/org/apache/asterix/test/dml/DmlTest.java
new file mode 100644
index 0000000..9222081
--- /dev/null
+++ b/asterix-app/src/test/java/org/apache/asterix/test/dml/DmlTest.java
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.test.dml;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+import java.io.PrintWriter;
+import java.io.Reader;
+
+import org.junit.Test;
+
+import edu.uci.ics.asterix.api.common.AsterixHyracksIntegrationUtil;
+import edu.uci.ics.asterix.api.java.AsterixJavaClient;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.test.aql.TestsUtils;
+import edu.uci.ics.asterix.test.base.AsterixTestHelper;
+
+public class DmlTest {
+
+    private static final String[] ASTERIX_DATA_DIRS = new String[] { "nc1data", "nc2data" };
+    private static final String PATH_ACTUAL = "dmltest" + File.separator;
+    private static final String SEPARATOR = File.separator;
+    private static final String PATH_BASE = "src" + SEPARATOR + "test" + SEPARATOR + "resources" + SEPARATOR + "dmlts"
+            + SEPARATOR;
+    private static final String PATH_EXPECTED = PATH_BASE + "results" + SEPARATOR;
+    private static final String PATH_SCRIPTS = PATH_BASE + "scripts" + SEPARATOR;
+    private static final String LOAD_FOR_ENLIST_FILE = PATH_SCRIPTS + "load-cust.aql";
+    private static final String ENLIST_FILE = PATH_SCRIPTS + "enlist-scan-cust.aql";
+
+    private static final PrintWriter ERR = new PrintWriter(System.err);
+
+    @Test
+    public void enlistTest() throws Exception {
+        File outdir = new File(PATH_ACTUAL);
+        if (outdir.exists()) {
+            AsterixTestHelper.deleteRec(outdir);
+        }
+        outdir.mkdirs();
+
+        AsterixHyracksIntegrationUtil.init();
+        Reader loadReader = new BufferedReader(
+                new InputStreamReader(new FileInputStream(LOAD_FOR_ENLIST_FILE), "UTF-8"));
+        AsterixJavaClient asterixLoad = new AsterixJavaClient(
+                AsterixHyracksIntegrationUtil.getHyracksClientConnection(), loadReader, ERR);
+        try {
+            asterixLoad.compile(true, false, false, false, false, true, false);
+        } catch (AsterixException e) {
+            throw new Exception("Compile ERROR for " + LOAD_FOR_ENLIST_FILE + ": " + e.getMessage(), e);
+        } finally {
+            loadReader.close();
+        }
+        asterixLoad.execute();
+        File enlistFile = new File(ENLIST_FILE);
+        int dot = enlistFile.getName().lastIndexOf('.');
+        String resultFileName = enlistFile.getName().substring(0, dot + 1) + ".adm";
+        File expectedFile = new File(PATH_EXPECTED + SEPARATOR + resultFileName);
+        File actualFile = new File(PATH_ACTUAL + SEPARATOR + resultFileName);
+        // Khurram
+        //TestsUtils.runScriptAndCompareWithResult(AsterixHyracksIntegrationUtil.getHyracksClientConnection(),
+                //enlistFile, ERR, expectedFile, actualFile);
+
+        AsterixHyracksIntegrationUtil.deinit();
+        for (String d : ASTERIX_DATA_DIRS) {
+            TestsUtils.deleteRec(new File(d));
+        }
+        outdir.delete();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTest.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTest.java b/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTest.java
new file mode 100644
index 0000000..069c5aa
--- /dev/null
+++ b/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTest.java
@@ -0,0 +1,109 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.test.metadata;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collection;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+import edu.uci.ics.asterix.api.common.AsterixHyracksIntegrationUtil;
+import edu.uci.ics.asterix.common.config.AsterixPropertiesAccessor;
+import edu.uci.ics.asterix.common.config.AsterixTransactionProperties;
+import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.test.aql.TestsUtils;
+import edu.uci.ics.asterix.testframework.context.TestCaseContext;
+
+/**
+ * Executes the Metadata tests.
+ */
+@RunWith(Parameterized.class)
+public class MetadataTest {
+
+    private TestCaseContext tcCtx;
+
+    private static final String PATH_ACTUAL = "mdtest" + File.separator;
+    private static final String PATH_BASE = StringUtils.join(new String[] { "src", "test", "resources",
+            "metadata" + File.separator }, File.separator);
+    private static final String TEST_CONFIG_FILE_NAME = "asterix-build-configuration.xml";
+
+    private static AsterixTransactionProperties txnProperties;
+
+    @BeforeClass
+    public static void setUp() throws Exception {
+        System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, TEST_CONFIG_FILE_NAME);
+        File outdir = new File(PATH_ACTUAL);
+        outdir.mkdirs();
+
+        AsterixPropertiesAccessor apa = new AsterixPropertiesAccessor();
+        txnProperties = new AsterixTransactionProperties(apa);
+
+        deleteTransactionLogs();
+
+        AsterixHyracksIntegrationUtil.init();
+    }
+
+    @AfterClass
+    public static void tearDown() throws Exception {
+        AsterixHyracksIntegrationUtil.deinit();
+        File outdir = new File(PATH_ACTUAL);
+        File[] files = outdir.listFiles();
+        if (files == null || files.length == 0) {
+            outdir.delete();
+        }
+
+        // clean up the files written by the ASTERIX storage manager
+        for (String d : AsterixHyracksIntegrationUtil.getDataDirs()) {
+            TestsUtils.deleteRec(new File(d));
+        }
+    }
+
+    private static void deleteTransactionLogs() throws Exception {
+        for (String ncId : AsterixHyracksIntegrationUtil.getNcNames()) {
+            File log = new File(txnProperties.getLogDirectory(ncId));
+            if (log.exists()) {
+                FileUtils.deleteDirectory(log);
+            }
+        }
+    }
+
+    @Parameters
+    public static Collection<Object[]> tests() throws Exception {
+        Collection<Object[]> testArgs = new ArrayList<Object[]>();
+        TestCaseContext.Builder b = new TestCaseContext.Builder();
+        for (TestCaseContext ctx : b.build(new File(PATH_BASE))) {
+            testArgs.add(new Object[] { ctx });
+        }
+        return testArgs;
+    }
+
+    public MetadataTest(TestCaseContext tcCtx) {
+        this.tcCtx = tcCtx;
+    }
+
+    @Test
+    public void test() throws Exception {
+        TestsUtils.executeTest(PATH_ACTUAL, tcCtx, null, false);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java b/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java
new file mode 100644
index 0000000..58531aa
--- /dev/null
+++ b/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java
@@ -0,0 +1,220 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.test.optimizer;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+import java.io.PrintWriter;
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.logging.Logger;
+
+import org.apache.commons.io.FileUtils;
+import org.junit.AfterClass;
+import org.junit.Assume;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.internal.AssumptionViolatedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+import edu.uci.ics.asterix.api.common.AsterixHyracksIntegrationUtil;
+import edu.uci.ics.asterix.api.java.AsterixJavaClient;
+import edu.uci.ics.asterix.common.config.AsterixPropertiesAccessor;
+import edu.uci.ics.asterix.common.config.AsterixTransactionProperties;
+import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.external.dataset.adapter.FileSystemBasedAdapter;
+import edu.uci.ics.asterix.external.util.IdentitiyResolverFactory;
+import edu.uci.ics.asterix.test.base.AsterixTestHelper;
+import edu.uci.ics.asterix.test.common.TestHelper;
+
+@RunWith(Parameterized.class)
+public class OptimizerTest {
+
+    private static final Logger LOGGER = Logger.getLogger(OptimizerTest.class.getName());
+
+    private static final String SEPARATOR = File.separator;
+    private static final String EXTENSION_QUERY = "aql";
+    private static final String EXTENSION_RESULT = "plan";
+    private static final String FILENAME_IGNORE = "ignore.txt";
+    private static final String FILENAME_ONLY = "only.txt";
+    private static final String PATH_BASE = "src" + SEPARATOR + "test" + SEPARATOR + "resources" + SEPARATOR
+            + "optimizerts" + SEPARATOR;
+    private static final String PATH_QUERIES = PATH_BASE + "queries" + SEPARATOR;
+    private static final String PATH_EXPECTED = PATH_BASE + "results" + SEPARATOR;
+    private static final String PATH_ACTUAL = "opttest" + SEPARATOR;
+
+    private static final ArrayList<String> ignore = AsterixTestHelper.readFile(FILENAME_IGNORE, PATH_BASE);
+    private static final ArrayList<String> only = AsterixTestHelper.readFile(FILENAME_ONLY, PATH_BASE);
+    private static final String TEST_CONFIG_FILE_NAME = "asterix-build-configuration.xml";
+
+    private static AsterixTransactionProperties txnProperties;
+
+    @BeforeClass
+    public static void setUp() throws Exception {
+        // File outdir = new File(PATH_ACTUAL);
+        // outdir.mkdirs();
+
+        System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, TEST_CONFIG_FILE_NAME);
+        File outdir = new File(PATH_ACTUAL);
+        outdir.mkdirs();
+
+        AsterixPropertiesAccessor apa = new AsterixPropertiesAccessor();
+        txnProperties = new AsterixTransactionProperties(apa);
+
+        deleteTransactionLogs();
+
+        AsterixHyracksIntegrationUtil.init();
+        // Set the node resolver to be the identity resolver that expects node names
+        // to be node controller ids; a valid assumption in test environment.
+        System.setProperty(FileSystemBasedAdapter.NODE_RESOLVER_FACTORY_PROPERTY,
+                IdentitiyResolverFactory.class.getName());
+    }
+
+    private static void deleteTransactionLogs() throws Exception {
+        for (String ncId : AsterixHyracksIntegrationUtil.getNcNames()) {
+            File log = new File(txnProperties.getLogDirectory(ncId));
+            if (log.exists()) {
+                FileUtils.deleteDirectory(log);
+            }
+        }
+    }
+
+    @AfterClass
+    public static void tearDown() throws Exception {
+        // _bootstrap.stop();
+        File outdir = new File(PATH_ACTUAL);
+        File[] files = outdir.listFiles();
+        if (files == null || files.length == 0) {
+            outdir.delete();
+        }
+        AsterixHyracksIntegrationUtil.deinit();
+    }
+
+    private static void suiteBuild(File dir, Collection<Object[]> testArgs, String path) {
+        for (File file : dir.listFiles()) {
+            if (file.isDirectory() && !file.getName().startsWith(".")) {
+                suiteBuild(file, testArgs, path + file.getName() + SEPARATOR);
+            }
+            if (file.isFile() && file.getName().endsWith(EXTENSION_QUERY)
+            // && !ignore.contains(path + file.getName())
+            ) {
+                String resultFileName = AsterixTestHelper.extToResExt(file.getName(), EXTENSION_RESULT);
+                File expectedFile = new File(PATH_EXPECTED + path + resultFileName);
+                File actualFile = new File(PATH_ACTUAL + SEPARATOR + path.replace(SEPARATOR, "_") + resultFileName);
+                testArgs.add(new Object[] { file, expectedFile, actualFile });
+            }
+        }
+    }
+
+    @Parameters
+    public static Collection<Object[]> tests() {
+        Collection<Object[]> testArgs = new ArrayList<Object[]>();
+        suiteBuild(new File(PATH_QUERIES), testArgs, "");
+        return testArgs;
+    }
+
+    private File actualFile;
+    private File expectedFile;
+    private File queryFile;
+
+    public OptimizerTest(File queryFile, File expectedFile, File actualFile) {
+        this.queryFile = queryFile;
+        this.expectedFile = expectedFile;
+        this.actualFile = actualFile;
+    }
+
+    @Test
+    public void test() throws Exception {
+        try {
+            String queryFileShort = queryFile.getPath().substring(PATH_QUERIES.length())
+                    .replace(SEPARATOR.charAt(0), '/');
+            if (!only.isEmpty()) {
+                boolean toRun = TestHelper.isInPrefixList(only, queryFileShort);
+                if (!toRun) {
+                    LOGGER.info("SKIP TEST: \"" + queryFile.getPath()
+                            + "\" \"only.txt\" not empty and not in \"only.txt\".");
+                }
+                Assume.assumeTrue(toRun);
+            }
+            boolean skipped = TestHelper.isInPrefixList(ignore, queryFileShort);
+            if (skipped) {
+                LOGGER.info("SKIP TEST: \"" + queryFile.getPath() + "\" in \"ignore.txt\".");
+            }
+            Assume.assumeTrue(!skipped);
+
+            LOGGER.info("RUN TEST: \"" + queryFile.getPath() + "\"");
+            Reader query = new BufferedReader(new InputStreamReader(new FileInputStream(queryFile), "UTF-8"));
+            PrintWriter plan = new PrintWriter(actualFile);
+            AsterixJavaClient asterix = new AsterixJavaClient(
+                    AsterixHyracksIntegrationUtil.getHyracksClientConnection(), query, plan);
+            try {
+                asterix.compile(true, false, false, true, true, false, false);
+            } catch (AsterixException e) {
+                plan.close();
+                query.close();
+                throw new Exception("Compile ERROR for " + queryFile + ": " + e.getMessage(), e);
+            }
+            plan.close();
+            query.close();
+
+            BufferedReader readerExpected = new BufferedReader(new InputStreamReader(new FileInputStream(expectedFile),
+                    "UTF-8"));
+            BufferedReader readerActual = new BufferedReader(new InputStreamReader(new FileInputStream(actualFile),
+                    "UTF-8"));
+
+            String lineExpected, lineActual;
+            int num = 1;
+            try {
+                while ((lineExpected = readerExpected.readLine()) != null) {
+                    lineActual = readerActual.readLine();
+                    // Assert.assertEquals(lineExpected, lineActual);
+                    if (lineActual == null) {
+                        throw new Exception("Result for " + queryFile + " changed at line " + num + ":\n< "
+                                + lineExpected + "\n> ");
+                    }
+                    if (!lineExpected.equals(lineActual)) {
+                        throw new Exception("Result for " + queryFile + " changed at line " + num + ":\n< "
+                                + lineExpected + "\n> " + lineActual);
+                    }
+                    ++num;
+                }
+                lineActual = readerActual.readLine();
+                // Assert.assertEquals(null, lineActual);
+                if (lineActual != null) {
+                    throw new Exception("Result for " + queryFile + " changed at line " + num + ":\n< \n> "
+                            + lineActual);
+                }
+                LOGGER.info("Test \"" + queryFile.getPath() + "\" PASSED!");
+                actualFile.delete();
+            } finally {
+                readerExpected.close();
+                readerActual.close();
+            }
+        } catch (Exception e) {
+            if (!(e instanceof AssumptionViolatedException)) {
+                LOGGER.severe("Test \"" + queryFile.getPath() + "\" FAILED!");
+                throw new Exception("Test \"" + queryFile.getPath() + "\" FAILED!", e);
+            } else {
+                throw e;
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTest.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTest.java b/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTest.java
new file mode 100644
index 0000000..8547eb0
--- /dev/null
+++ b/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTest.java
@@ -0,0 +1,142 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.test.runtime;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+import edu.uci.ics.asterix.api.common.AsterixHyracksIntegrationUtil;
+import edu.uci.ics.asterix.common.config.AsterixPropertiesAccessor;
+import edu.uci.ics.asterix.common.config.AsterixTransactionProperties;
+import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.external.dataset.adapter.FileSystemBasedAdapter;
+import edu.uci.ics.asterix.external.util.IdentitiyResolverFactory;
+import edu.uci.ics.asterix.test.aql.TestsUtils;
+import edu.uci.ics.asterix.testframework.context.TestCaseContext;
+
+/**
+ * Runs the runtime test cases under 'asterix-app/src/test/resources/runtimets'.
+ */
+@RunWith(Parameterized.class)
+public class ExecutionTest {
+
+    protected static final Logger LOGGER = Logger.getLogger(ExecutionTest.class.getName());
+
+    protected static final String PATH_ACTUAL = "rttest" + File.separator;
+    protected static final String PATH_BASE = StringUtils.join(
+            new String[] { "src", "test", "resources", "runtimets" }, File.separator);
+
+    protected static final String TEST_CONFIG_FILE_NAME = "asterix-build-configuration.xml";
+
+    protected static AsterixTransactionProperties txnProperties;
+
+    @BeforeClass
+    public static void setUp() throws Exception {
+        System.out.println("Starting setup");
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Starting setup");
+        }
+        System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, TEST_CONFIG_FILE_NAME);
+        File outdir = new File(PATH_ACTUAL);
+        outdir.mkdirs();
+
+        AsterixPropertiesAccessor apa = new AsterixPropertiesAccessor();
+        txnProperties = new AsterixTransactionProperties(apa);
+
+        deleteTransactionLogs();
+
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("initializing pseudo cluster");
+        }
+        AsterixHyracksIntegrationUtil.init();
+
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("initializing HDFS");
+        }
+
+        HDFSCluster.getInstance().setup();
+
+        // Set the node resolver to be the identity resolver that expects node names
+        // to be node controller ids; a valid assumption in test environment.
+        System.setProperty(FileSystemBasedAdapter.NODE_RESOLVER_FACTORY_PROPERTY,
+                IdentitiyResolverFactory.class.getName());
+    }
+
+    @AfterClass
+    public static void tearDown() throws Exception {
+        AsterixHyracksIntegrationUtil.deinit();
+        File outdir = new File(PATH_ACTUAL);
+        File[] files = outdir.listFiles();
+        if (files == null || files.length == 0) {
+            outdir.delete();
+        }
+        // clean up the files written by the ASTERIX storage manager
+        for (String d : AsterixHyracksIntegrationUtil.getDataDirs()) {
+            TestsUtils.deleteRec(new File(d));
+        }
+        HDFSCluster.getInstance().cleanup();
+    }
+
+    private static void deleteTransactionLogs() throws Exception {
+        for (String ncId : AsterixHyracksIntegrationUtil.getNcNames()) {
+            File log = new File(txnProperties.getLogDirectory(ncId));
+            if (log.exists()) {
+                FileUtils.deleteDirectory(log);
+            }
+        }
+    }
+
+    @Parameters
+    public static Collection<Object[]> tests() throws Exception {
+        Collection<Object[]> testArgs = buildTestsInXml(TestCaseContext.ONLY_TESTSUITE_XML_NAME);
+        if (testArgs.size() == 0) {
+            testArgs = buildTestsInXml(TestCaseContext.DEFAULT_TESTSUITE_XML_NAME);
+        }
+        return testArgs;
+    }
+
+    protected static Collection<Object[]> buildTestsInXml(String xmlfile) throws Exception {
+        Collection<Object[]> testArgs = new ArrayList<Object[]>();
+        TestCaseContext.Builder b = new TestCaseContext.Builder();
+        for (TestCaseContext ctx : b.build(new File(PATH_BASE), xmlfile)) {
+            testArgs.add(new Object[] { ctx });
+        }
+        return testArgs;
+
+    }
+
+    protected TestCaseContext tcCtx;
+
+    public ExecutionTest(TestCaseContext tcCtx) {
+        this.tcCtx = tcCtx;
+    }
+
+    @Test
+    public void test() throws Exception {
+            TestsUtils.executeTest(PATH_ACTUAL, tcCtx, null, false);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java b/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java
new file mode 100644
index 0000000..9463657
--- /dev/null
+++ b/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java
@@ -0,0 +1,127 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.test.runtime;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.JobConf;
+
+import edu.uci.ics.asterix.external.dataset.adapter.HDFSAdapter;
+
+/**
+ * Manages a Mini (local VM) HDFS cluster with a configured number of datanodes.
+ *
+ * @author ramangrover29
+ */
+@SuppressWarnings("deprecation")
+public class HDFSCluster {
+
+    private static final String PATH_TO_HADOOP_CONF = "src/test/resources/hadoop/conf";
+    private static final int nameNodePort = 31888;
+    private static final String DATA_PATH = "data/hdfs";
+    private static final String HDFS_PATH = "/asterix";
+    private static final HDFSCluster INSTANCE = new HDFSCluster();
+
+    private MiniDFSCluster dfsCluster;
+    private int numDataNodes = 2;
+    private JobConf conf = new JobConf();
+    private FileSystem dfs;
+
+    public static HDFSCluster getInstance() {
+        return INSTANCE;
+    }
+
+    private HDFSCluster() {
+
+    }
+
+    /**
+     * Instantiates the (Mini) DFS Cluster with the configured number of datanodes.
+     * Post instantiation, data is laoded to HDFS.
+     * Called prior to running the Runtime test suite.
+     */
+    public void setup() throws Exception {
+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
+        conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
+        cleanupLocal();
+        //this constructor is deprecated in hadoop 2x 
+        //dfsCluster = new MiniDFSCluster(nameNodePort, conf, numDataNodes, true, true, StartupOption.REGULAR, null);
+        MiniDFSCluster.Builder build = new MiniDFSCluster.Builder(conf);
+        build.nameNodePort(nameNodePort);
+        build.numDataNodes(numDataNodes);
+        build.startupOption(StartupOption.REGULAR);
+        dfsCluster = build.build();
+        dfs = FileSystem.get(conf);
+        loadData();
+    }
+
+    private void loadData() throws IOException {
+        Path destDir = new Path(HDFS_PATH);
+        dfs.mkdirs(destDir);
+        File srcDir = new File(DATA_PATH);
+        File[] listOfFiles = srcDir.listFiles();
+        for (File srcFile : listOfFiles) {
+            Path path = new Path(srcFile.getAbsolutePath());
+            dfs.copyFromLocalFile(path, destDir);
+        }
+    }
+
+    private void cleanupLocal() throws IOException {
+        // cleanup artifacts created on the local file system
+        FileSystem lfs = FileSystem.getLocal(new Configuration());
+        lfs.delete(new Path("build"), true);
+        System.setProperty("hadoop.log.dir", "logs");
+    }
+
+    public void cleanup() throws Exception {
+        if (dfsCluster != null) {
+            dfsCluster.shutdown();
+            cleanupLocal();
+        }
+    }
+
+    public static void main(String[] args) throws Exception {
+        HDFSCluster cluster = new HDFSCluster();
+        cluster.setup();
+        JobConf conf = configureJobConf();
+        FileSystem fs = FileSystem.get(conf);
+        InputSplit[] inputSplits = conf.getInputFormat().getSplits(conf, 0);
+        for (InputSplit split : inputSplits) {
+            System.out.println("split :" + split);
+        }
+        //   cluster.cleanup();
+    }
+
+    private static JobConf configureJobConf() throws Exception {
+        JobConf conf = new JobConf();
+        String hdfsUrl = "hdfs://127.0.0.1:31888";
+        String hdfsPath = "/asterix/extrasmalltweets.txt";
+        conf.set("fs.default.name", hdfsUrl);
+        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
+        conf.setClassLoader(HDFSAdapter.class.getClassLoader());
+        conf.set("mapred.input.dir", hdfsPath);
+        conf.set("mapred.input.format.class", "org.apache.hadoop.mapred.TextInputFormat");
+        return conf;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/test/java/org/apache/asterix/test/runtime/RepeatedTest.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/org/apache/asterix/test/runtime/RepeatedTest.java b/asterix-app/src/test/java/org/apache/asterix/test/runtime/RepeatedTest.java
new file mode 100644
index 0000000..faeb688
--- /dev/null
+++ b/asterix-app/src/test/java/org/apache/asterix/test/runtime/RepeatedTest.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.test.runtime;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.util.Collection;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.MethodRule;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+import org.junit.runners.model.FrameworkMethod;
+import org.junit.runners.model.Statement;
+
+import edu.uci.ics.asterix.test.aql.TestsUtils;
+import edu.uci.ics.asterix.test.runtime.RepeatRule.Repeat;
+import edu.uci.ics.asterix.testframework.context.TestCaseContext;
+
+/**
+ * Runs runtime test cases that have been identified in the repeatedtestsuite.xml.
+ * 
+ * Each test is run 10000 times.
+ */
+class RepeatRule implements MethodRule {
+
+    @Retention(RetentionPolicy.RUNTIME)
+    @Target({ java.lang.annotation.ElementType.METHOD })
+    public @interface Repeat {
+        public abstract int times();
+
+    }
+
+    private static class RepeatStatement extends Statement {
+
+        private final int times;
+        private final Statement statement;
+
+        private RepeatStatement(int times, Statement statement) {
+            this.times = times;
+            this.statement = statement;
+        }
+
+        @Override
+        public void evaluate() throws Throwable {
+            for (int i = 0; i < times; i++) {
+                statement.evaluate();
+            }
+        }
+    }
+
+    @Override
+    public Statement apply(Statement statement, FrameworkMethod method, Object target) {
+        Statement result = statement;
+        Repeat repeat = method.getAnnotation(Repeat.class);
+        if (repeat != null) {
+            int times = repeat.times();
+            result = new RepeatStatement(times, statement);
+        }
+        return result;
+
+    }
+}
+
+@RunWith(Parameterized.class)
+public class RepeatedTest extends ExecutionTest {
+
+    private int count;
+    
+    @Parameters
+    public static Collection<Object[]> tests() throws Exception {
+        Collection<Object[]> testArgs = buildTestsInXml(TestCaseContext.DEFAULT_REPEADED_TESTSUITE_XML_NAME);
+        return testArgs;
+    }
+
+    public RepeatedTest(TestCaseContext tcCtx) {
+        super(tcCtx);
+        count = 0;
+    }
+
+    @Rule
+    public RepeatRule repeatRule = new RepeatRule();
+
+    @Test
+    @Repeat(times = 10000)
+    public void test() throws Exception {
+        System.err.println("***** Test Count: " + (++count) + " ******");
+        TestsUtils.executeTest(PATH_ACTUAL, tcCtx, null, false);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/AbstractExpression.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/AbstractExpression.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/AbstractExpression.java
deleted file mode 100644
index 31311a8..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/AbstractExpression.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.base;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionAnnotation;
-
-public abstract class AbstractExpression implements Expression {
-    protected List<IExpressionAnnotation> hints;
-
-    public void addHint(IExpressionAnnotation hint) {
-        if (hints == null) {
-            hints = new ArrayList<IExpressionAnnotation>();
-        }
-        hints.add(hint);
-    }
-
-    public boolean hasHints() {
-        return hints != null;
-    }
-
-    public List<IExpressionAnnotation> getHints() {
-        return hints;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Clause.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Clause.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Clause.java
deleted file mode 100644
index e92d132..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Clause.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.base;
-
-public interface Clause extends IAqlExpression {
-    public ClauseType getClauseType();
-
-    public enum ClauseType {
-        FOR_CLAUSE,
-        LET_CLAUSE,
-        WHERE_CLAUSE,
-        ORDER_BY_CLAUSE,
-        LIMIT_CLAUSE,
-        GROUP_BY_CLAUSE,
-        DISTINCT_BY_CLAUSE,
-        UPDATE_CLAUSE
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Expression.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Expression.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Expression.java
deleted file mode 100644
index 0208f42..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Expression.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.base;
-
-public interface Expression extends IAqlExpression {
-    public abstract Kind getKind();
-
-    public enum Kind {
-        LITERAL_EXPRESSION,
-        FLWOGR_EXPRESSION,
-        IF_EXPRESSION,
-        QUANTIFIED_EXPRESSION,
-        // PARENTHESIZED_EXPRESSION,
-        LIST_CONSTRUCTOR_EXPRESSION,
-        RECORD_CONSTRUCTOR_EXPRESSION,
-        VARIABLE_EXPRESSION,
-        METAVARIABLE_EXPRESSION,
-        CALL_EXPRESSION,
-        OP_EXPRESSION,
-        FIELD_ACCESSOR_EXPRESSION,
-        INDEX_ACCESSOR_EXPRESSION,
-        UNARY_EXPRESSION,
-        UNION_EXPRESSION
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/IAqlExpression.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/IAqlExpression.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/IAqlExpression.java
deleted file mode 100644
index e977efb..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/IAqlExpression.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.base;
-
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public interface IAqlExpression {
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException;
-
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException;
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Literal.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Literal.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Literal.java
deleted file mode 100644
index 84ae0d1..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Literal.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.base;
-
-import java.io.Serializable;
-
-public abstract class Literal implements Serializable {
-    /**
-     * 
-     */
-    private static final long serialVersionUID = -6468144574890768345L;
-
-    public enum Type {
-        STRING,
-        INTEGER,
-        NULL,
-        TRUE,
-        FALSE,
-        FLOAT,
-        DOUBLE,
-        LONG
-    }
-
-    abstract public Object getValue();
-
-    abstract public Type getLiteralType();
-
-    public String getStringValue() {
-        return getValue().toString();
-    }
-
-    @Override
-    public int hashCode() {
-        return getValue().hashCode();
-    }
-
-    public boolean equals(Object obj) {
-        if (!(obj instanceof Literal)) {
-            return false;
-        }
-        Literal literal = (Literal) obj;
-        return getValue().equals(literal.getValue());
-    }
-
-    @Override
-    public String toString() {
-        return getStringValue();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Statement.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Statement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Statement.java
deleted file mode 100644
index d6cfeec..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Statement.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.base;
-
-public interface Statement extends IAqlExpression {
-    public enum Kind {
-        DATASET_DECL,
-        DATAVERSE_DECL,
-        DATAVERSE_DROP,
-        DATASET_DROP,
-        DELETE,
-        INSERT,
-        UPDATE,
-        DML_CMD_LIST,
-        FUNCTION_DECL,
-        LOAD,
-        NODEGROUP_DECL,
-        NODEGROUP_DROP,
-        QUERY,
-        SET,
-        TYPE_DECL,
-        TYPE_DROP,
-        WRITE,
-        CREATE_INDEX,
-        INDEX_DECL,
-        CREATE_DATAVERSE,
-        INDEX_DROP,
-        CREATE_PRIMARY_FEED,
-        CREATE_SECONDARY_FEED,
-        DROP_FEED,
-        CONNECT_FEED,
-        DISCONNECT_FEED,
-        SUBSCRIBE_FEED,
-        CREATE_FEED_POLICY,
-        DROP_FEED_POLICY,
-        CREATE_FUNCTION,
-        FUNCTION_DROP,
-        COMPACT, 
-        EXTERNAL_DATASET_REFRESH,
-        RUN
-    }
-
-    public abstract Kind getKind();
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/context/FunctionExpressionMap.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/context/FunctionExpressionMap.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/context/FunctionExpressionMap.java
deleted file mode 100644
index 1eb4da9..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/context/FunctionExpressionMap.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.context;
-
-import java.util.HashMap;
-
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
-
-public class FunctionExpressionMap extends HashMap<Integer, FunctionSignature> {
-    /**
-     * 
-     */
-    private static final long serialVersionUID = 1L;
-    private boolean varargs;
-
-    public boolean isVarargs() {
-        return varargs;
-    }
-
-    public void setVarargs(boolean varargs) {
-        this.varargs = varargs;
-    }
-
-    public FunctionExpressionMap(boolean varargs) {
-        super();
-        this.varargs = varargs;
-    }
-
-    public FunctionSignature get(int arity) {
-        if (varargs) {
-            return super.get(-1);
-        } else {
-            return super.get(arity);
-        }
-    }
-
-    public FunctionSignature put(int arity, FunctionSignature fd) {
-        if (varargs) {
-            return super.put(-1, fd);
-        } else {
-            return super.put(arity, fd);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/context/FunctionSignatures.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/context/FunctionSignatures.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/context/FunctionSignatures.java
deleted file mode 100644
index a2e966c..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/context/FunctionSignatures.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.context;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
-
-public class FunctionSignatures {
-    private final Map<FunctionSignature, FunctionExpressionMap> functionMap;
-
-    public FunctionSignatures() {
-        functionMap = new HashMap<FunctionSignature, FunctionExpressionMap>();
-    }
-
-    public FunctionSignature get(String dataverse, String name, int arity) {
-        FunctionSignature fid = new FunctionSignature(dataverse, name, arity);
-        FunctionExpressionMap possibleFD = functionMap.get(fid);
-        if (possibleFD == null) {
-            return null;
-        } else {
-            return possibleFD.get(arity);
-        }
-    }
-
-    public void put(FunctionSignature fd, boolean varargs) {
-        FunctionExpressionMap func = functionMap.get(fd);
-        if (func == null) {
-            func = new FunctionExpressionMap(varargs);
-            functionMap.put(fd, func);
-        }
-        func.put(fd.getArity(), fd);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/context/RootScopeFactory.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/context/RootScopeFactory.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/context/RootScopeFactory.java
deleted file mode 100644
index 605f54c..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/context/RootScopeFactory.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.context;
-
-import edu.uci.ics.asterix.aql.parser.ScopeChecker;
-
-public class RootScopeFactory {
-
-    public static Scope createRootScope(ScopeChecker sc) {
-        Scope rootScope = new Scope(sc);
-        return rootScope;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/context/Scope.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/context/Scope.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/context/Scope.java
deleted file mode 100644
index e73c084..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/context/Scope.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.context;
-
-import java.util.HashMap;
-
-import edu.uci.ics.asterix.aql.expression.Identifier;
-import edu.uci.ics.asterix.aql.expression.VarIdentifier;
-import edu.uci.ics.asterix.aql.parser.ScopeChecker;
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
-
-public final class Scope {
-    private Scope parent;
-    private HashMap<String, Identifier> symbols = null;
-    private FunctionSignatures functionSignatures = null;
-    private final ScopeChecker scopeChecker;
-    private boolean maskParentScope = false;
-
-    public Scope(ScopeChecker scopeChecker, Scope parent) {
-        this.scopeChecker = scopeChecker;
-        this.parent = parent;
-    }
-
-    public Scope(ScopeChecker scopeChecker) {
-        this(scopeChecker, null);
-    }
-
-    public Scope(ScopeChecker scopeChecker, Scope parent, boolean maskParentScope) {
-        this(scopeChecker, parent);
-        this.maskParentScope = maskParentScope;
-    }
-
-    /**
-     * Find a symbol in the scope
-     * 
-     * @param name
-     * @return the Identifier of this symbol; otherwise null;
-     */
-    public Identifier findSymbol(String name) {
-        Identifier ident = null;
-        if (symbols != null) {
-            ident = symbols.get(name);
-        }
-        if (ident == null && !maskParentScope && parent != null) {
-            ident = parent.findSymbol(name);
-        }
-        return ident;
-    }
-
-    public Identifier findLocalSymbol(String name) {
-        if (symbols != null) {
-            return symbols.get(name);
-        }
-        return null;
-    }
-
-    /**
-     * Add a symbol into scope
-     * 
-     * @param ident
-     */
-    public void addSymbolToScope(Identifier ident) {
-        if (symbols == null) {
-            symbols = new HashMap<String, Identifier>();
-        }
-        symbols.put(ident.getValue(), ident);
-    }
-
-    public void addNewVarSymbolToScope(VarIdentifier ident) {
-        scopeChecker.incVarCounter();
-        ident.setId(scopeChecker.getVarCounter());
-        addSymbolToScope(ident);
-    }
-
-    /**
-     * Add a FunctionDescriptor into functionSignatures
-     * 
-     * @param fd
-     *            FunctionDescriptor
-     * @param varargs
-     *            whether this function has varargs
-     */
-    public void addFunctionDescriptor(FunctionSignature signature, boolean varargs) {
-        if (functionSignatures == null) {
-            functionSignatures = new FunctionSignatures();
-        }
-        functionSignatures.put(signature, varargs);
-    }
-
-    /**
-     * find a function signature
-     * 
-     * @param name
-     *            name of the function
-     * @param arity
-     *            # of arguments
-     * @return FunctionDescriptor of the function found; otherwise null
-     */
-    public FunctionSignature findFunctionSignature(String dataverse, String name, int arity) {
-        FunctionSignature fd = null;
-        if (functionSignatures != null) {
-            fd = functionSignatures.get(dataverse, name, arity);
-        }
-        if (fd == null && parent != null) {
-            fd = parent.findFunctionSignature(dataverse, name, arity);
-        }
-        return fd;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/AbstractAccessor.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/AbstractAccessor.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/AbstractAccessor.java
deleted file mode 100644
index 1138a57..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/AbstractAccessor.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import edu.uci.ics.asterix.aql.base.Expression;
-
-public abstract class AbstractAccessor implements Expression {
-    protected Expression expr;
-
-    public AbstractAccessor(Expression expr) {
-        super();
-        this.expr = expr;
-    }
-
-    public Expression getExpr() {
-        return expr;
-    }
-
-    public void setExpr(Expression expr) {
-        this.expr = expr;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/AdmSplitInfo.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/AdmSplitInfo.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/AdmSplitInfo.java
deleted file mode 100644
index 89bb8e6..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/AdmSplitInfo.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/**
- *
- */
-package edu.uci.ics.asterix.aql.expression;
-
-public class AdmSplitInfo {
-    public Identifier nodeName;
-    public String fileName;
-
-    public AdmSplitInfo(Identifier nodeName, String fileName) {
-        this.nodeName = nodeName;
-        this.fileName = fileName;
-    }
-
-    @Override
-    public String toString() {
-        return nodeName.value + ":" + fileName;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CallExpr.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CallExpr.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CallExpr.java
deleted file mode 100644
index 1c7c754..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CallExpr.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import java.util.List;
-
-import edu.uci.ics.asterix.aql.base.AbstractExpression;
-import edu.uci.ics.asterix.aql.base.Expression;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
-
-public class CallExpr extends AbstractExpression {
-    private final FunctionSignature functionSignature;
-    private List<Expression> exprList;
-    private boolean isBuiltin;
-
-    public CallExpr(FunctionSignature functionSignature, List<Expression> exprList) {
-        this.functionSignature = functionSignature;
-        this.exprList = exprList;
-    }
-
-    public FunctionSignature getFunctionSignature() {
-        return functionSignature;
-    }
-
-    public List<Expression> getExprList() {
-        return exprList;
-    }
-
-    public boolean isBuiltin() {
-        return isBuiltin;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.CALL_EXPRESSION;
-    }
-
-    public void setExprList(List<Expression> exprList) {
-        this.exprList = exprList;
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitCallExpr(this, arg);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CompactStatement.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CompactStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CompactStatement.java
deleted file mode 100644
index eab75cf..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CompactStatement.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class CompactStatement implements Statement {
-
-    private final Identifier dataverseName;
-    private final Identifier datasetName;
-
-    public CompactStatement(Identifier dataverseName, Identifier datasetName) {
-        this.dataverseName = dataverseName;
-        this.datasetName = datasetName;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.COMPACT;
-    }
-
-    public Identifier getDataverseName() {
-        return dataverseName;
-    }
-
-    public Identifier getDatasetName() {
-        return datasetName;
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitCompactStatement(this, arg);
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ConnectFeedStatement.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ConnectFeedStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ConnectFeedStatement.java
deleted file mode 100644
index 5d9794e..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ConnectFeedStatement.java
+++ /dev/null
@@ -1,189 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import java.io.StringReader;
-import java.util.List;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.aql.parser.AQLParser;
-import edu.uci.ics.asterix.aql.parser.ParseException;
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
-import edu.uci.ics.asterix.metadata.MetadataException;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter.AdapterType;
-import edu.uci.ics.asterix.metadata.entities.Feed;
-import edu.uci.ics.asterix.metadata.entities.Function;
-import edu.uci.ics.asterix.metadata.entities.PrimaryFeed;
-import edu.uci.ics.asterix.metadata.feeds.BuiltinFeedPolicies;
-import edu.uci.ics.asterix.metadata.feeds.FeedUtil;
-import edu.uci.ics.asterix.metadata.feeds.IFeedAdapterFactory;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
-
-public class ConnectFeedStatement implements Statement {
-
-    private final Identifier dataverseName;
-    private final Identifier datasetName;
-    private final String feedName;
-    private final String policy;
-    private Query query;
-    private int varCounter;
-    private boolean forceConnect = false;
-
-    public static final String WAIT_FOR_COMPLETION = "wait-for-completion-feed";
-
-    public ConnectFeedStatement(Pair<Identifier, Identifier> feedNameCmp, Pair<Identifier, Identifier> datasetNameCmp,
-            String policy, int varCounter) {
-        if (feedNameCmp.first != null && datasetNameCmp.first != null
-                && !feedNameCmp.first.getValue().equals(datasetNameCmp.first.getValue())) {
-            throw new IllegalArgumentException("Dataverse for source feed and target dataset do not match");
-        }
-        this.dataverseName = feedNameCmp.first != null ? feedNameCmp.first
-                : datasetNameCmp.first != null ? datasetNameCmp.first : null;
-        this.datasetName = datasetNameCmp.second;
-        this.feedName = feedNameCmp.second.getValue();
-        this.policy = policy != null ? policy : BuiltinFeedPolicies.DEFAULT_POLICY.getPolicyName();
-        this.varCounter = varCounter;
-    }
-
-    public ConnectFeedStatement(Identifier dataverseName, Identifier feedName, Identifier datasetName, String policy,
-            int varCounter) {
-        this.dataverseName = dataverseName;
-        this.datasetName = datasetName;
-        this.feedName = feedName.getValue();
-        this.policy = policy != null ? policy : BuiltinFeedPolicies.DEFAULT_POLICY.getPolicyName();
-        this.varCounter = varCounter;
-    }
-
-    /*
-    public void initialize(MetadataTransactionContext mdTxnCtx, Dataset targetDataset, Feed sourceFeed)
-            throws MetadataException {
-        query = new Query();
-        FunctionSignature appliedFunction = sourceFeed.getAppliedFunction();
-        Function function = null;
-        String adapterOutputType = null;
-        if (appliedFunction != null) {
-            function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, appliedFunction);
-            if (function == null) {
-                throw new MetadataException(" Unknown function " + function);
-            } else if (function.getParams().size() > 1) {
-                throw new MetadataException(" Incompatible function: " + appliedFunction
-                        + " Number if arguments must be 1");
-            }
-        }
-
-        Triple<IFeedAdapterFactory, ARecordType, AdapterType> factoryOutput = null;
-        try {
-            factoryOutput = FeedUtil.getPrimaryFeedFactoryAndOutput((PrimaryFeed) sourceFeed, mdTxnCtx);
-            adapterOutputType = factoryOutput.second.getTypeName();
-        } catch (AlgebricksException ae) {
-            ae.printStackTrace();
-            throw new MetadataException(ae);
-        }
-
-        StringBuilder builder = new StringBuilder();
-        builder.append("set" + " " + FunctionUtils.IMPORT_PRIVATE_FUNCTIONS + " " + "'" + Boolean.TRUE + "'" + ";\n");
-        builder.append("insert into dataset " + datasetName + " ");
-
-        if (appliedFunction == null) {
-            builder.append(" (" + " for $x in feed-ingest ('" + feedName + "'" + "," + "'" + adapterOutputType + "'"
-                    + "," + "'" + targetDataset.getDatasetName() + "'" + ")");
-            builder.append(" return $x");
-        } else {
-            if (function.getLanguage().equalsIgnoreCase(Function.LANGUAGE_AQL)) {
-                String param = function.getParams().get(0);
-                builder.append(" (" + " for" + " " + param + " in feed-ingest ('" + feedName + "'" + "," + "'"
-                        + adapterOutputType + "'" + "," + "'" + targetDataset.getDatasetName() + "'" + ")");
-                builder.append(" let $y:=(" + function.getFunctionBody() + ")" + " return $y");
-            } else {
-                builder.append(" (" + " for $x in feed-ingest ('" + feedName + "'" + "," + "'" + adapterOutputType
-                        + "'" + "," + "'" + targetDataset.getDatasetName() + "'" + ")");
-                builder.append(" let $y:=" + sourceFeed.getDataverseName() + "." + function.getName() + "(" + "$x"
-                        + ")");
-                builder.append(" return $y");
-            }
-
-        }
-        builder.append(")");
-        builder.append(";");
-        AQLParser parser = new AQLParser(new StringReader(builder.toString()));
-
-        List<Statement> statements;
-        try {
-            statements = parser.parse();
-            query = ((InsertStatement) statements.get(1)).getQuery();
-        } catch (ParseException pe) {
-            throw new MetadataException(pe);
-        }
-
-    }*/
-
-    public Identifier getDataverseName() {
-        return dataverseName;
-    }
-
-    public Identifier getDatasetName() {
-        return datasetName;
-    }
-
-    public Query getQuery() {
-        return query;
-    }
-
-    public int getVarCounter() {
-        return varCounter;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.CONNECT_FEED;
-    }
-
-    public String getPolicy() {
-        return policy;
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitConnectFeedStatement(this, arg);
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-    public boolean forceConnect() {
-        return forceConnect;
-    }
-
-    public void setForceConnect(boolean forceConnect) {
-        this.forceConnect = forceConnect;
-    }
-
-    public String getFeedName() {
-        return feedName;
-    }
-
-}



[38/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlExpressionToPlanTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlExpressionToPlanTranslator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlExpressionToPlanTranslator.java
deleted file mode 100644
index 0360ffb..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlExpressionToPlanTranslator.java
+++ /dev/null
@@ -1,1608 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.translator;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-import java.util.concurrent.atomic.AtomicLong;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.aql.base.Clause;
-import edu.uci.ics.asterix.aql.base.Expression;
-import edu.uci.ics.asterix.aql.base.Expression.Kind;
-import edu.uci.ics.asterix.aql.expression.CallExpr;
-import edu.uci.ics.asterix.aql.expression.CompactStatement;
-import edu.uci.ics.asterix.aql.expression.ConnectFeedStatement;
-import edu.uci.ics.asterix.aql.expression.CreateDataverseStatement;
-import edu.uci.ics.asterix.aql.expression.CreateFeedPolicyStatement;
-import edu.uci.ics.asterix.aql.expression.CreateFunctionStatement;
-import edu.uci.ics.asterix.aql.expression.CreateIndexStatement;
-import edu.uci.ics.asterix.aql.expression.CreatePrimaryFeedStatement;
-import edu.uci.ics.asterix.aql.expression.CreateSecondaryFeedStatement;
-import edu.uci.ics.asterix.aql.expression.DatasetDecl;
-import edu.uci.ics.asterix.aql.expression.DataverseDecl;
-import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
-import edu.uci.ics.asterix.aql.expression.DeleteStatement;
-import edu.uci.ics.asterix.aql.expression.DisconnectFeedStatement;
-import edu.uci.ics.asterix.aql.expression.DistinctClause;
-import edu.uci.ics.asterix.aql.expression.DropStatement;
-import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
-import edu.uci.ics.asterix.aql.expression.FeedDropStatement;
-import edu.uci.ics.asterix.aql.expression.FeedPolicyDropStatement;
-import edu.uci.ics.asterix.aql.expression.FieldAccessor;
-import edu.uci.ics.asterix.aql.expression.FieldBinding;
-import edu.uci.ics.asterix.aql.expression.ForClause;
-import edu.uci.ics.asterix.aql.expression.FunctionDecl;
-import edu.uci.ics.asterix.aql.expression.FunctionDropStatement;
-import edu.uci.ics.asterix.aql.expression.GbyVariableExpressionPair;
-import edu.uci.ics.asterix.aql.expression.GroupbyClause;
-import edu.uci.ics.asterix.aql.expression.IfExpr;
-import edu.uci.ics.asterix.aql.expression.IndexAccessor;
-import edu.uci.ics.asterix.aql.expression.IndexDropStatement;
-import edu.uci.ics.asterix.aql.expression.InsertStatement;
-import edu.uci.ics.asterix.aql.expression.LetClause;
-import edu.uci.ics.asterix.aql.expression.LimitClause;
-import edu.uci.ics.asterix.aql.expression.ListConstructor;
-import edu.uci.ics.asterix.aql.expression.ListConstructor.Type;
-import edu.uci.ics.asterix.aql.expression.LiteralExpr;
-import edu.uci.ics.asterix.aql.expression.LoadStatement;
-import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
-import edu.uci.ics.asterix.aql.expression.NodegroupDecl;
-import edu.uci.ics.asterix.aql.expression.OperatorExpr;
-import edu.uci.ics.asterix.aql.expression.OperatorType;
-import edu.uci.ics.asterix.aql.expression.OrderbyClause;
-import edu.uci.ics.asterix.aql.expression.OrderbyClause.OrderModifier;
-import edu.uci.ics.asterix.aql.expression.OrderedListTypeDefinition;
-import edu.uci.ics.asterix.aql.expression.QuantifiedExpression;
-import edu.uci.ics.asterix.aql.expression.QuantifiedExpression.Quantifier;
-import edu.uci.ics.asterix.aql.expression.QuantifiedPair;
-import edu.uci.ics.asterix.aql.expression.Query;
-import edu.uci.ics.asterix.aql.expression.RecordConstructor;
-import edu.uci.ics.asterix.aql.expression.RecordTypeDefinition;
-import edu.uci.ics.asterix.aql.expression.SetStatement;
-import edu.uci.ics.asterix.aql.expression.TypeDecl;
-import edu.uci.ics.asterix.aql.expression.TypeDropStatement;
-import edu.uci.ics.asterix.aql.expression.TypeReferenceExpression;
-import edu.uci.ics.asterix.aql.expression.UnaryExpr;
-import edu.uci.ics.asterix.aql.expression.UnaryExpr.Sign;
-import edu.uci.ics.asterix.aql.expression.UnionExpr;
-import edu.uci.ics.asterix.aql.expression.UnorderedListTypeDefinition;
-import edu.uci.ics.asterix.aql.expression.UpdateClause;
-import edu.uci.ics.asterix.aql.expression.UpdateStatement;
-import edu.uci.ics.asterix.aql.expression.VariableExpr;
-import edu.uci.ics.asterix.aql.expression.WhereClause;
-import edu.uci.ics.asterix.aql.expression.WriteStatement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.aql.util.RangeMapBuilder;
-import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.functions.FunctionConstants;
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
-import edu.uci.ics.asterix.metadata.MetadataException;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
-import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
-import edu.uci.ics.asterix.metadata.declared.LoadableDataSource;
-import edu.uci.ics.asterix.metadata.declared.ResultSetDataSink;
-import edu.uci.ics.asterix.metadata.declared.ResultSetSinkId;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.Function;
-import edu.uci.ics.asterix.metadata.functions.ExternalFunctionCompilerUtil;
-import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
-import edu.uci.ics.asterix.om.base.AOrderedList;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.functions.AsterixFunctionInfo;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
-import edu.uci.ics.asterix.runtime.formats.FormatUtils;
-import edu.uci.ics.asterix.translator.CompiledStatements.CompiledLoadFromFileStatement;
-import edu.uci.ics.asterix.translator.CompiledStatements.ICompiledDmlStatement;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.Counter;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.OperatorAnnotations;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression.FunctionKind;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.BroadcastExpressionAnnotation;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.BroadcastExpressionAnnotation.BroadcastSide;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionAnnotation;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder.OrderKind;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.LocalOrderProperty;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.OrderColumn;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-
-/**
- * Each visit returns a pair of an operator and a variable. The variable
- * corresponds to the new column, if any, added to the tuple flow. E.g., for
- * Unnest, the column is the variable bound to the elements in the list, for
- * Subplan it is null. The first argument of a visit method is the expression
- * which is translated. The second argument of a visit method is the tuple
- * source for the current subtree.
- */
-
-public class AqlExpressionToPlanTranslator extends AbstractAqlTranslator implements
-        IAqlExpressionVisitor<Pair<ILogicalOperator, LogicalVariable>, Mutable<ILogicalOperator>> {
-
-    private final AqlMetadataProvider metadataProvider;
-    private final TranslationContext context;
-    private final String outputDatasetName;
-    private final ICompiledDmlStatement stmt;
-    private static AtomicLong outputFileID = new AtomicLong(0);
-    private static final String OUTPUT_FILE_PREFIX = "OUTPUT_";
-    private static LogicalVariable METADATA_DUMMY_VAR = new LogicalVariable(-1);
-
-    public AqlExpressionToPlanTranslator(AqlMetadataProvider metadataProvider, int currentVarCounter,
-            String outputDatasetName, ICompiledDmlStatement stmt) throws AlgebricksException {
-        this.context = new TranslationContext(new Counter(currentVarCounter));
-        this.outputDatasetName = outputDatasetName;
-        this.stmt = stmt;
-        this.metadataProvider = metadataProvider;
-        FormatUtils.getDefaultFormat().registerRuntimeFunctions();
-    }
-
-    public int getVarCounter() {
-        return context.getVarCounter();
-    }
-
-    public ILogicalPlan translateLoad() throws AlgebricksException {
-        CompiledLoadFromFileStatement clffs = (CompiledLoadFromFileStatement) stmt;
-        Dataset dataset = metadataProvider.findDataset(clffs.getDataverseName(), clffs.getDatasetName());
-        if (dataset == null) {
-            // This would never happen since we check for this in AqlTranslator
-            throw new AlgebricksException("Unable to load dataset " + clffs.getDatasetName()
-                    + " since it does not exist");
-        }
-        IAType itemType = metadataProvider.findType(clffs.getDataverseName(), dataset.getItemTypeName());
-        DatasetDataSource targetDatasource = validateDatasetInfo(metadataProvider, stmt.getDataverseName(),
-                stmt.getDatasetName());
-        List<List<String>> partitionKeys = DatasetUtils.getPartitioningKeys(targetDatasource.getDataset());
-
-        LoadableDataSource lds;
-        try {
-            lds = new LoadableDataSource(dataset, itemType, clffs.getAdapter(), clffs.getProperties());
-        } catch (IOException e) {
-            throw new AlgebricksException(e);
-        }
-
-        // etsOp is a dummy input operator used to keep the compiler happy. it
-        // could be removed but would result in
-        // the need to fix many rewrite rules that assume that datasourcescan
-        // operators always have input.
-        ILogicalOperator etsOp = new EmptyTupleSourceOperator();
-
-        // Add a logical variable for the record.
-        List<LogicalVariable> payloadVars = new ArrayList<LogicalVariable>();
-        payloadVars.add(context.newVar());
-
-        // Create a scan operator and make the empty tuple source its input
-        DataSourceScanOperator dssOp = new DataSourceScanOperator(payloadVars, lds);
-        dssOp.getInputs().add(new MutableObject<ILogicalOperator>(etsOp));
-        ILogicalExpression payloadExpr = new VariableReferenceExpression(payloadVars.get(0));
-        Mutable<ILogicalExpression> payloadRef = new MutableObject<ILogicalExpression>(payloadExpr);
-
-        // Creating the assign to extract the PK out of the record
-        ArrayList<LogicalVariable> pkVars = new ArrayList<LogicalVariable>();
-        ArrayList<Mutable<ILogicalExpression>> pkExprs = new ArrayList<Mutable<ILogicalExpression>>();
-        List<Mutable<ILogicalExpression>> varRefsForLoading = new ArrayList<Mutable<ILogicalExpression>>();
-        LogicalVariable payloadVar = payloadVars.get(0);
-        for (List<String> keyFieldName : partitionKeys) {
-            prepareVarAndExpression(keyFieldName, payloadVar, pkVars, pkExprs, varRefsForLoading);
-        }
-
-        AssignOperator assign = new AssignOperator(pkVars, pkExprs);
-        assign.getInputs().add(new MutableObject<ILogicalOperator>(dssOp));
-
-        // If the input is pre-sorted, we set the ordering property explicitly in the assign
-        if (clffs.alreadySorted()) {
-            List<OrderColumn> orderColumns = new ArrayList<OrderColumn>();
-            for (int i = 0; i < pkVars.size(); ++i) {
-                orderColumns.add(new OrderColumn(pkVars.get(i), OrderKind.ASC));
-            }
-            assign.setExplicitOrderingProperty(new LocalOrderProperty(orderColumns));
-        }
-
-        List<String> additionalFilteringField = DatasetUtils.getFilterField(targetDatasource.getDataset());
-        List<LogicalVariable> additionalFilteringVars = null;
-        List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions = null;
-        List<Mutable<ILogicalExpression>> additionalFilteringExpressions = null;
-        AssignOperator additionalFilteringAssign = null;
-        if (additionalFilteringField != null) {
-            additionalFilteringVars = new ArrayList<LogicalVariable>();
-            additionalFilteringAssignExpressions = new ArrayList<Mutable<ILogicalExpression>>();
-            additionalFilteringExpressions = new ArrayList<Mutable<ILogicalExpression>>();
-            prepareVarAndExpression(additionalFilteringField, payloadVar, additionalFilteringVars,
-                    additionalFilteringAssignExpressions, additionalFilteringExpressions);
-            additionalFilteringAssign = new AssignOperator(additionalFilteringVars,
-                    additionalFilteringAssignExpressions);
-        }
-
-        InsertDeleteOperator insertOp = new InsertDeleteOperator(targetDatasource, payloadRef, varRefsForLoading,
-                InsertDeleteOperator.Kind.INSERT, true);
-        insertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
-
-        if (additionalFilteringAssign != null) {
-            additionalFilteringAssign.getInputs().add(new MutableObject<ILogicalOperator>(assign));
-            insertOp.getInputs().add(new MutableObject<ILogicalOperator>(additionalFilteringAssign));
-        } else {
-            insertOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
-        }
-
-        ILogicalOperator leafOperator = new SinkOperator();
-        leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(insertOp));
-        return new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(leafOperator));
-    }
-
-    public ILogicalPlan translate(Query expr) throws AlgebricksException, AsterixException {
-        Pair<ILogicalOperator, LogicalVariable> p = expr.accept(this, new MutableObject<ILogicalOperator>(
-                new EmptyTupleSourceOperator()));
-        ArrayList<Mutable<ILogicalOperator>> globalPlanRoots = new ArrayList<Mutable<ILogicalOperator>>();
-        ILogicalOperator topOp = p.first;
-        ProjectOperator project = (ProjectOperator) topOp;
-        LogicalVariable resVar = project.getVariables().get(0);
-
-        if (outputDatasetName == null) {
-            FileSplit outputFileSplit = metadataProvider.getOutputFile();
-            if (outputFileSplit == null) {
-                outputFileSplit = getDefaultOutputFileLocation();
-            }
-            metadataProvider.setOutputFile(outputFileSplit);
-
-            List<Mutable<ILogicalExpression>> writeExprList = new ArrayList<Mutable<ILogicalExpression>>(1);
-            writeExprList.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(resVar)));
-            ResultSetSinkId rssId = new ResultSetSinkId(metadataProvider.getResultSetId());
-            ResultSetDataSink sink = new ResultSetDataSink(rssId, null);
-            topOp = new DistributeResultOperator(writeExprList, sink);
-            topOp.getInputs().add(new MutableObject<ILogicalOperator>(project));
-
-            // Retrieve the Output RecordType (if any) and store it on
-            // the DistributeResultOperator
-            IAType outputRecordType = metadataProvider.findOutputRecordType();
-            if (outputRecordType != null) {
-                topOp.getAnnotations().put("output-record-type", outputRecordType);
-            }
-        } else {
-            /**
-             * add the collection-to-sequence right before the final project,
-             * because dataset only accept non-collection records
-             */
-            LogicalVariable seqVar = context.newVar();
-            @SuppressWarnings("unchecked")
-            /** This assign adds a marker function collection-to-sequence: if the input is a singleton collection, unnest it; otherwise do nothing. */
-            AssignOperator assignCollectionToSequence = new AssignOperator(seqVar,
-                    new MutableObject<ILogicalExpression>(new ScalarFunctionCallExpression(
-                            FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.COLLECTION_TO_SEQUENCE),
-                            new MutableObject<ILogicalExpression>(new VariableReferenceExpression(resVar)))));
-            assignCollectionToSequence.getInputs().add(
-                    new MutableObject<ILogicalOperator>(project.getInputs().get(0).getValue()));
-            project.getInputs().get(0).setValue(assignCollectionToSequence);
-            project.getVariables().set(0, seqVar);
-            resVar = seqVar;
-
-            DatasetDataSource targetDatasource = validateDatasetInfo(metadataProvider, stmt.getDataverseName(),
-                    stmt.getDatasetName());
-            ArrayList<LogicalVariable> vars = new ArrayList<LogicalVariable>();
-            ArrayList<Mutable<ILogicalExpression>> exprs = new ArrayList<Mutable<ILogicalExpression>>();
-            List<Mutable<ILogicalExpression>> varRefsForLoading = new ArrayList<Mutable<ILogicalExpression>>();
-            List<List<String>> partitionKeys = DatasetUtils.getPartitioningKeys(targetDatasource.getDataset());
-            for (List<String> keyFieldName : partitionKeys) {
-                prepareVarAndExpression(keyFieldName, resVar, vars, exprs, varRefsForLoading);
-            }
-
-            List<String> additionalFilteringField = DatasetUtils.getFilterField(targetDatasource.getDataset());
-            List<LogicalVariable> additionalFilteringVars = null;
-            List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions = null;
-            List<Mutable<ILogicalExpression>> additionalFilteringExpressions = null;
-            AssignOperator additionalFilteringAssign = null;
-            if (additionalFilteringField != null) {
-                additionalFilteringVars = new ArrayList<LogicalVariable>();
-                additionalFilteringAssignExpressions = new ArrayList<Mutable<ILogicalExpression>>();
-                additionalFilteringExpressions = new ArrayList<Mutable<ILogicalExpression>>();
-
-                prepareVarAndExpression(additionalFilteringField, resVar, additionalFilteringVars,
-                        additionalFilteringAssignExpressions, additionalFilteringExpressions);
-
-                additionalFilteringAssign = new AssignOperator(additionalFilteringVars,
-                        additionalFilteringAssignExpressions);
-            }
-
-            AssignOperator assign = new AssignOperator(vars, exprs);
-
-            if (additionalFilteringAssign != null) {
-                additionalFilteringAssign.getInputs().add(new MutableObject<ILogicalOperator>(project));
-                assign.getInputs().add(new MutableObject<ILogicalOperator>(additionalFilteringAssign));
-            } else {
-                assign.getInputs().add(new MutableObject<ILogicalOperator>(project));
-            }
-
-            Mutable<ILogicalExpression> varRef = new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
-                    resVar));
-            ILogicalOperator leafOperator = null;
-
-            switch (stmt.getKind()) {
-                case INSERT: {
-                    InsertDeleteOperator insertOp = new InsertDeleteOperator(targetDatasource, varRef,
-                            varRefsForLoading, InsertDeleteOperator.Kind.INSERT, false);
-                    insertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
-                    insertOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
-                    leafOperator = new SinkOperator();
-                    leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(insertOp));
-                    break;
-                }
-                case DELETE: {
-                    InsertDeleteOperator deleteOp = new InsertDeleteOperator(targetDatasource, varRef,
-                            varRefsForLoading, InsertDeleteOperator.Kind.DELETE, false);
-                    deleteOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
-                    deleteOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
-                    leafOperator = new SinkOperator();
-                    leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(deleteOp));
-                    break;
-                }
-                case CONNECT_FEED: {
-                    InsertDeleteOperator insertOp = new InsertDeleteOperator(targetDatasource, varRef,
-                            varRefsForLoading, InsertDeleteOperator.Kind.INSERT, false);
-                    insertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
-                    insertOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
-                    leafOperator = new SinkOperator();
-                    leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(insertOp));
-                    break;
-                }
-                case SUBSCRIBE_FEED: { 
-                    ILogicalOperator insertOp = new InsertDeleteOperator(targetDatasource, varRef, varRefsForLoading,   
-                            InsertDeleteOperator.Kind.INSERT, false);   
-                    insertOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));  
-                    leafOperator = new SinkOperator();  
-                    leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(insertOp));    
-                    break;  
-                }
-            }
-            topOp = leafOperator;
-        }
-        globalPlanRoots.add(new MutableObject<ILogicalOperator>(topOp));
-        ILogicalPlan plan = new ALogicalPlanImpl(globalPlanRoots);
-        return plan;
-    }
-
-    @SuppressWarnings("unchecked")
-    private void prepareVarAndExpression(List<String> field, LogicalVariable resVar,
-            List<LogicalVariable> additionalFilteringVars,
-            List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions,
-            List<Mutable<ILogicalExpression>> varRefs) {
-        IFunctionInfo finfoAccess;
-        ScalarFunctionCallExpression f;
-        if (field.size() > 1) {
-            finfoAccess = FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_NESTED);
-            f = new ScalarFunctionCallExpression(finfoAccess, new MutableObject<ILogicalExpression>(
-                    new VariableReferenceExpression(METADATA_DUMMY_VAR)), new MutableObject<ILogicalExpression>(
-                    new ConstantExpression(new AsterixConstantValue(new AOrderedList(field)))));
-        } else {
-            finfoAccess = FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME);
-            f = new ScalarFunctionCallExpression(finfoAccess, new MutableObject<ILogicalExpression>(
-                    new VariableReferenceExpression(METADATA_DUMMY_VAR)), new MutableObject<ILogicalExpression>(
-                    new ConstantExpression(new AsterixConstantValue(new AString(field.get(0))))));
-        }
-        f.substituteVar(METADATA_DUMMY_VAR, resVar);
-        additionalFilteringAssignExpressions.add(new MutableObject<ILogicalExpression>(f));
-        LogicalVariable v = context.newVar();
-        additionalFilteringVars.add(v);
-        varRefs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(v)));
-    }
-
-    private DatasetDataSource validateDatasetInfo(AqlMetadataProvider metadataProvider, String dataverseName,
-            String datasetName) throws AlgebricksException {
-        Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
-        if (dataset == null) {
-            throw new AlgebricksException("Cannot find dataset " + datasetName + " in dataverse " + dataverseName);
-        }
-        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
-            throw new AlgebricksException("Cannot write output to an external dataset.");
-        }
-        AqlSourceId sourceId = new AqlSourceId(dataverseName, datasetName);
-        String itemTypeName = dataset.getItemTypeName();
-        IAType itemType = metadataProvider.findType(dataverseName, itemTypeName);
-        DatasetDataSource dataSource = new DatasetDataSource(sourceId, dataset.getDataverseName(),
-                dataset.getDatasetName(), itemType, AqlDataSourceType.INTERNAL_DATASET);
-
-        return dataSource;
-    }
-
-    private FileSplit getDefaultOutputFileLocation() throws MetadataException {
-        String outputDir = System.getProperty("java.io.tmpDir");
-        String filePath = outputDir + System.getProperty("file.separator") + OUTPUT_FILE_PREFIX
-                + outputFileID.incrementAndGet();
-        AsterixMetadataProperties metadataProperties = AsterixAppContextInfo.getInstance().getMetadataProperties();
-        return new FileSplit(metadataProperties.getMetadataNodeName(), new FileReference(new File(filePath)));
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitForClause(ForClause fc, Mutable<ILogicalOperator> tupSource)
-            throws AsterixException {
-        LogicalVariable v = context.newVar(fc.getVarExpr());
-        Expression inExpr = fc.getInExpr();
-        Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(inExpr, tupSource);
-        ILogicalOperator returnedOp;
-
-        if (fc.getPosVarExpr() == null) {
-            returnedOp = new UnnestOperator(v, new MutableObject<ILogicalExpression>(makeUnnestExpression(eo.first)));
-        } else {
-            LogicalVariable pVar = context.newVar(fc.getPosVarExpr());
-            // We set the positional variable type as INT64 type.
-            returnedOp = new UnnestOperator(v, new MutableObject<ILogicalExpression>(makeUnnestExpression(eo.first)),
-                    pVar, BuiltinType.AINT64, new AqlPositionWriter());
-        }
-        returnedOp.getInputs().add(eo.second);
-
-        return new Pair<ILogicalOperator, LogicalVariable>(returnedOp, v);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitLetClause(LetClause lc, Mutable<ILogicalOperator> tupSource)
-            throws AsterixException {
-        LogicalVariable v;
-        ILogicalOperator returnedOp;
-
-        switch (lc.getBindingExpr().getKind()) {
-            case VARIABLE_EXPRESSION: {
-                v = context.newVar(lc.getVarExpr());
-                LogicalVariable prev = context.getVar(((VariableExpr) lc.getBindingExpr()).getVar().getId());
-                returnedOp = new AssignOperator(v, new MutableObject<ILogicalExpression>(
-                        new VariableReferenceExpression(prev)));
-                returnedOp.getInputs().add(tupSource);
-                break;
-            }
-            default: {
-                v = context.newVar(lc.getVarExpr());
-                Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(lc.getBindingExpr(),
-                        tupSource);
-                returnedOp = new AssignOperator(v, new MutableObject<ILogicalExpression>(eo.first));
-                returnedOp.getInputs().add(eo.second);
-                break;
-            }
-        }
-        return new Pair<ILogicalOperator, LogicalVariable>(returnedOp, v);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitFlworExpression(FLWOGRExpression flwor,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        Mutable<ILogicalOperator> flworPlan = tupSource;
-        boolean isTop = context.isTopFlwor();
-        if (isTop) {
-            context.setTopFlwor(false);
-        }
-        for (Clause c : flwor.getClauseList()) {
-            Pair<ILogicalOperator, LogicalVariable> pC = c.accept(this, flworPlan);
-            flworPlan = new MutableObject<ILogicalOperator>(pC.first);
-        }
-
-        Expression r = flwor.getReturnExpr();
-        boolean noFlworClause = flwor.noForClause();
-
-        if (r.getKind() == Kind.VARIABLE_EXPRESSION) {
-            VariableExpr v = (VariableExpr) r;
-            LogicalVariable var = context.getVar(v.getVar().getId());
-
-            return produceFlwrResult(noFlworClause, isTop, flworPlan, var);
-
-        } else {
-            Mutable<ILogicalOperator> baseOp = new MutableObject<ILogicalOperator>(flworPlan.getValue());
-            Pair<ILogicalOperator, LogicalVariable> rRes = r.accept(this, baseOp);
-            ILogicalOperator rOp = rRes.first;
-            ILogicalOperator resOp;
-            if (expressionNeedsNoNesting(r)) {
-                baseOp.setValue(flworPlan.getValue());
-                resOp = rOp;
-            } else {
-                SubplanOperator s = new SubplanOperator(rOp);
-                s.getInputs().add(flworPlan);
-                resOp = s;
-                baseOp.setValue(new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(s)));
-            }
-            Mutable<ILogicalOperator> resOpRef = new MutableObject<ILogicalOperator>(resOp);
-            return produceFlwrResult(noFlworClause, isTop, resOpRef, rRes.second);
-        }
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitFieldAccessor(FieldAccessor fa,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(fa.getExpr(), tupSource);
-        LogicalVariable v = context.newVar();
-        AbstractFunctionCallExpression fldAccess = new ScalarFunctionCallExpression(
-                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME));
-        fldAccess.getArguments().add(new MutableObject<ILogicalExpression>(p.first));
-        ILogicalExpression faExpr = new ConstantExpression(new AsterixConstantValue(new AString(fa.getIdent()
-                .getValue())));
-        fldAccess.getArguments().add(new MutableObject<ILogicalExpression>(faExpr));
-        AssignOperator a = new AssignOperator(v, new MutableObject<ILogicalExpression>(fldAccess));
-        a.getInputs().add(p.second);
-        return new Pair<ILogicalOperator, LogicalVariable>(a, v);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitIndexAccessor(IndexAccessor ia,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(ia.getExpr(), tupSource);
-        LogicalVariable v = context.newVar();
-        AbstractFunctionCallExpression f;
-        if (ia.isAny()) {
-            f = new ScalarFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.ANY_COLLECTION_MEMBER));
-            f.getArguments().add(new MutableObject<ILogicalExpression>(p.first));
-        } else {
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> indexPair = aqlExprToAlgExpression(ia.getIndexExpr(),
-                    tupSource);
-            f = new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM));
-            f.getArguments().add(new MutableObject<ILogicalExpression>(p.first));
-            f.getArguments().add(new MutableObject<ILogicalExpression>(indexPair.first));
-        }
-        AssignOperator a = new AssignOperator(v, new MutableObject<ILogicalExpression>(f));
-        a.getInputs().add(p.second);
-        return new Pair<ILogicalOperator, LogicalVariable>(a, v);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitCallExpr(CallExpr fcall, Mutable<ILogicalOperator> tupSource)
-            throws AsterixException {
-        LogicalVariable v = context.newVar();
-        FunctionSignature signature = fcall.getFunctionSignature();
-        List<Mutable<ILogicalExpression>> args = new ArrayList<Mutable<ILogicalExpression>>();
-        Mutable<ILogicalOperator> topOp = tupSource;
-
-        for (Expression expr : fcall.getExprList()) {
-            switch (expr.getKind()) {
-                case VARIABLE_EXPRESSION: {
-                    LogicalVariable var = context.getVar(((VariableExpr) expr).getVar().getId());
-                    args.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(var)));
-                    break;
-                }
-                case LITERAL_EXPRESSION: {
-                    LiteralExpr val = (LiteralExpr) expr;
-                    args.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
-                            ConstantHelper.objectFromLiteral(val.getValue())))));
-                    break;
-                }
-                default: {
-                    Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(expr, topOp);
-                    AbstractLogicalOperator o1 = (AbstractLogicalOperator) eo.second.getValue();
-                    args.add(new MutableObject<ILogicalExpression>(eo.first));
-                    if (o1 != null && !(o1.getOperatorTag() == LogicalOperatorTag.ASSIGN && hasOnlyChild(o1, topOp))) {
-                        topOp = eo.second;
-                    }
-                    break;
-                }
-            }
-        }
-
-        AbstractFunctionCallExpression f;
-        if ((f = lookupUserDefinedFunction(signature, args)) == null) {
-            f = lookupBuiltinFunction(signature.getName(), signature.getArity(), args);
-        }
-
-        if (f == null) {
-            throw new AsterixException(" Unknown function " + signature.getName() + "@" + signature.getArity());
-        }
-
-        // Put hints into function call expr.
-        if (fcall.hasHints()) {
-            for (IExpressionAnnotation hint : fcall.getHints()) {
-                f.getAnnotations().put(hint, hint);
-            }
-        }
-
-        AssignOperator op = new AssignOperator(v, new MutableObject<ILogicalExpression>(f));
-        if (topOp != null) {
-            op.getInputs().add(topOp);
-        }
-
-        return new Pair<ILogicalOperator, LogicalVariable>(op, v);
-    }
-
-    private AbstractFunctionCallExpression lookupUserDefinedFunction(FunctionSignature signature,
-            List<Mutable<ILogicalExpression>> args) throws MetadataException {
-        if (signature.getNamespace() == null) {
-            return null;
-        }
-        Function function = MetadataManager.INSTANCE.getFunction(metadataProvider.getMetadataTxnContext(), signature);
-        if (function == null) {
-            return null;
-        }
-        AbstractFunctionCallExpression f = null;
-        if (function.getLanguage().equalsIgnoreCase(Function.LANGUAGE_JAVA)) {
-            IFunctionInfo finfo = ExternalFunctionCompilerUtil.getExternalFunctionInfo(
-                    metadataProvider.getMetadataTxnContext(), function);
-            f = new ScalarFunctionCallExpression(finfo, args);
-        } else if (function.getLanguage().equalsIgnoreCase(Function.LANGUAGE_AQL)) {
-            IFunctionInfo finfo = FunctionUtils.getFunctionInfo(signature);
-            f = new ScalarFunctionCallExpression(finfo, args);
-        } else {
-            throw new MetadataException(" User defined functions written in " + function.getLanguage()
-                    + " are not supported");
-        }
-        return f;
-    }
-
-    private AbstractFunctionCallExpression lookupBuiltinFunction(String functionName, int arity,
-            List<Mutable<ILogicalExpression>> args) {
-        AbstractFunctionCallExpression f = null;
-        FunctionIdentifier fi = new FunctionIdentifier(AlgebricksBuiltinFunctions.ALGEBRICKS_NS, functionName, arity);
-        AsterixFunctionInfo afi = AsterixBuiltinFunctions.lookupFunction(fi);
-        FunctionIdentifier builtinAquafi = afi == null ? null : afi.getFunctionIdentifier();
-
-        if (builtinAquafi != null) {
-            fi = builtinAquafi;
-        } else {
-            fi = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, functionName, arity);
-            afi = AsterixBuiltinFunctions.lookupFunction(fi);
-            if (afi == null) {
-                return null;
-            }
-        }
-        if (AsterixBuiltinFunctions.isBuiltinAggregateFunction(fi)) {
-            f = AsterixBuiltinFunctions.makeAggregateFunctionExpression(fi, args);
-        } else if (AsterixBuiltinFunctions.isBuiltinUnnestingFunction(fi)) {
-            UnnestingFunctionCallExpression ufce = new UnnestingFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(fi), args);
-            ufce.setReturnsUniqueValues(AsterixBuiltinFunctions.returnsUniqueValues(fi));
-            f = ufce;
-        } else {
-            f = new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(fi), args);
-        }
-        return f;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitFunctionDecl(FunctionDecl fd,
-            Mutable<ILogicalOperator> tupSource) {
-        // TODO Auto-generated method stub
-        throw new NotImplementedException();
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitGroupbyClause(GroupbyClause gc,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        GroupByOperator gOp = new GroupByOperator();
-        Mutable<ILogicalOperator> topOp = tupSource;
-        for (GbyVariableExpressionPair ve : gc.getGbyPairList()) {
-            LogicalVariable v;
-            VariableExpr vexpr = ve.getVar();
-            if (vexpr != null) {
-                v = context.newVar(vexpr);
-            } else {
-                v = context.newVar();
-            }
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(ve.getExpr(), topOp);
-            gOp.addGbyExpression(v, eo.first);
-            topOp = eo.second;
-        }
-        for (GbyVariableExpressionPair ve : gc.getDecorPairList()) {
-            LogicalVariable v;
-            VariableExpr vexpr = ve.getVar();
-            if (vexpr != null) {
-                v = context.newVar(vexpr);
-            } else {
-                v = context.newVar();
-            }
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(ve.getExpr(), topOp);
-            gOp.addDecorExpression(v, eo.first);
-            topOp = eo.second;
-        }
-        gOp.getInputs().add(topOp);
-
-        for (VariableExpr var : gc.getWithVarList()) {
-            LogicalVariable aggVar = context.newVar();
-            LogicalVariable oldVar = context.getVar(var);
-            List<Mutable<ILogicalExpression>> flArgs = new ArrayList<Mutable<ILogicalExpression>>(1);
-            flArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(oldVar)));
-            AggregateFunctionCallExpression fListify = AsterixBuiltinFunctions.makeAggregateFunctionExpression(
-                    AsterixBuiltinFunctions.LISTIFY, flArgs);
-            AggregateOperator agg = new AggregateOperator(mkSingletonArrayList(aggVar),
-                    (List) mkSingletonArrayList(new MutableObject<ILogicalExpression>(fListify)));
-
-            agg.getInputs().add(
-                    new MutableObject<ILogicalOperator>(new NestedTupleSourceOperator(
-                            new MutableObject<ILogicalOperator>(gOp))));
-            ILogicalPlan plan = new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(agg));
-            gOp.getNestedPlans().add(plan);
-            // Hide the variable that was part of the "with", replacing it with
-            // the one bound by the aggregation op.
-            context.setVar(var, aggVar);
-        }
-
-        gOp.getAnnotations().put(OperatorAnnotations.USE_HASH_GROUP_BY, gc.hasHashGroupByHint());
-        return new Pair<ILogicalOperator, LogicalVariable>(gOp, null);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitIfExpr(IfExpr ifexpr, Mutable<ILogicalOperator> tupSource)
-            throws AsterixException {
-        // In the most general case, IfThenElse is translated in the following
-        // way.
-        //
-        // We assign the result of the condition to one variable varCond.
-        // We create one subplan which contains the plan for the "then" branch,
-        // on top of which there is a selection whose condition is varCond.
-        // Similarly, we create one subplan for the "else" branch, in which the
-        // selection is not(varCond).
-        // Finally, we concatenate the results. (??)
-
-        Pair<ILogicalOperator, LogicalVariable> pCond = ifexpr.getCondExpr().accept(this, tupSource);
-        ILogicalOperator opCond = pCond.first;
-        LogicalVariable varCond = pCond.second;
-
-        SubplanOperator sp = new SubplanOperator();
-        Mutable<ILogicalOperator> nestedSource = new MutableObject<ILogicalOperator>(new NestedTupleSourceOperator(
-                new MutableObject<ILogicalOperator>(sp)));
-
-        Pair<ILogicalOperator, LogicalVariable> pThen = ifexpr.getThenExpr().accept(this, nestedSource);
-        SelectOperator sel1 = new SelectOperator(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
-                varCond)), false, null);
-        sel1.getInputs().add(new MutableObject<ILogicalOperator>(pThen.first));
-
-        Pair<ILogicalOperator, LogicalVariable> pElse = ifexpr.getElseExpr().accept(this, nestedSource);
-        AbstractFunctionCallExpression notVarCond = new ScalarFunctionCallExpression(
-                FunctionUtils.getFunctionInfo(AlgebricksBuiltinFunctions.NOT), new MutableObject<ILogicalExpression>(
-                        new VariableReferenceExpression(varCond)));
-        SelectOperator sel2 = new SelectOperator(new MutableObject<ILogicalExpression>(notVarCond), false, null);
-        sel2.getInputs().add(new MutableObject<ILogicalOperator>(pElse.first));
-
-        ILogicalPlan p1 = new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(sel1));
-        sp.getNestedPlans().add(p1);
-        ILogicalPlan p2 = new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(sel2));
-        sp.getNestedPlans().add(p2);
-
-        Mutable<ILogicalOperator> opCondRef = new MutableObject<ILogicalOperator>(opCond);
-        sp.getInputs().add(opCondRef);
-
-        LogicalVariable resV = context.newVar();
-        AbstractFunctionCallExpression concatNonNull = new ScalarFunctionCallExpression(
-                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.CONCAT_NON_NULL),
-                new MutableObject<ILogicalExpression>(new VariableReferenceExpression(pThen.second)),
-                new MutableObject<ILogicalExpression>(new VariableReferenceExpression(pElse.second)));
-        AssignOperator a = new AssignOperator(resV, new MutableObject<ILogicalExpression>(concatNonNull));
-        a.getInputs().add(new MutableObject<ILogicalOperator>(sp));
-
-        return new Pair<ILogicalOperator, LogicalVariable>(a, resV);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitLiteralExpr(LiteralExpr l, Mutable<ILogicalOperator> tupSource) {
-        LogicalVariable var = context.newVar();
-        AssignOperator a = new AssignOperator(var, new MutableObject<ILogicalExpression>(new ConstantExpression(
-                new AsterixConstantValue(ConstantHelper.objectFromLiteral(l.getValue())))));
-        if (tupSource != null) {
-            a.getInputs().add(tupSource);
-        }
-        return new Pair<ILogicalOperator, LogicalVariable>(a, var);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitOperatorExpr(OperatorExpr op,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        ArrayList<OperatorType> ops = op.getOpList();
-        int nOps = ops.size();
-
-        if (nOps > 0 && (ops.get(0) == OperatorType.AND || ops.get(0) == OperatorType.OR)) {
-            return visitAndOrOperator(op, tupSource);
-        }
-
-        ArrayList<Expression> exprs = op.getExprList();
-
-        Mutable<ILogicalOperator> topOp = tupSource;
-
-        ILogicalExpression currExpr = null;
-        for (int i = 0; i <= nOps; i++) {
-
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(exprs.get(i), topOp);
-            topOp = p.second;
-            ILogicalExpression e = p.first;
-            // now look at the operator
-            if (i < nOps) {
-                if (OperatorExpr.opIsComparison(ops.get(i))) {
-                    AbstractFunctionCallExpression c = createComparisonExpression(ops.get(i));
-
-                    // chain the operators
-                    if (i == 0) {
-                        c.getArguments().add(new MutableObject<ILogicalExpression>(e));
-                        currExpr = c;
-                        if (op.isBroadcastOperand(i)) {
-                            BroadcastExpressionAnnotation bcast = new BroadcastExpressionAnnotation();
-                            bcast.setObject(BroadcastSide.LEFT);
-                            c.getAnnotations().put(BroadcastExpressionAnnotation.BROADCAST_ANNOTATION_KEY, bcast);
-                        }
-                    } else {
-                        ((AbstractFunctionCallExpression) currExpr).getArguments().add(
-                                new MutableObject<ILogicalExpression>(e));
-                        c.getArguments().add(new MutableObject<ILogicalExpression>(currExpr));
-                        currExpr = c;
-                        if (i == 1 && op.isBroadcastOperand(i)) {
-                            BroadcastExpressionAnnotation bcast = new BroadcastExpressionAnnotation();
-                            bcast.setObject(BroadcastSide.RIGHT);
-                            c.getAnnotations().put(BroadcastExpressionAnnotation.BROADCAST_ANNOTATION_KEY, bcast);
-                        }
-                    }
-                } else {
-                    AbstractFunctionCallExpression f = createFunctionCallExpressionForBuiltinOperator(ops.get(i));
-
-                    if (i == 0) {
-                        f.getArguments().add(new MutableObject<ILogicalExpression>(e));
-                        currExpr = f;
-                    } else {
-                        ((AbstractFunctionCallExpression) currExpr).getArguments().add(
-                                new MutableObject<ILogicalExpression>(e));
-                        f.getArguments().add(new MutableObject<ILogicalExpression>(currExpr));
-                        currExpr = f;
-                    }
-                }
-            } else { // don't forget the last expression...
-                ((AbstractFunctionCallExpression) currExpr).getArguments()
-                        .add(new MutableObject<ILogicalExpression>(e));
-                if (i == 1 && op.isBroadcastOperand(i)) {
-                    BroadcastExpressionAnnotation bcast = new BroadcastExpressionAnnotation();
-                    bcast.setObject(BroadcastSide.RIGHT);
-                    ((AbstractFunctionCallExpression) currExpr).getAnnotations().put(
-                            BroadcastExpressionAnnotation.BROADCAST_ANNOTATION_KEY, bcast);
-                }
-            }
-        }
-
-        // Add hints as annotations.
-        if (op.hasHints() && currExpr instanceof AbstractFunctionCallExpression) {
-            AbstractFunctionCallExpression currFuncExpr = (AbstractFunctionCallExpression) currExpr;
-            for (IExpressionAnnotation hint : op.getHints()) {
-                currFuncExpr.getAnnotations().put(hint, hint);
-            }
-        }
-
-        LogicalVariable assignedVar = context.newVar();
-        AssignOperator a = new AssignOperator(assignedVar, new MutableObject<ILogicalExpression>(currExpr));
-
-        a.getInputs().add(topOp);
-
-        return new Pair<ILogicalOperator, LogicalVariable>(a, assignedVar);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitOrderbyClause(OrderbyClause oc,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        OrderOperator ord = new OrderOperator();
-        Iterator<OrderModifier> modifIter = oc.getModifierList().iterator();
-        Mutable<ILogicalOperator> topOp = tupSource;
-        for (Expression e : oc.getOrderbyList()) {
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(e, topOp);
-            OrderModifier m = modifIter.next();
-            OrderOperator.IOrder comp = (m == OrderModifier.ASC) ? OrderOperator.ASC_ORDER : OrderOperator.DESC_ORDER;
-            ord.getOrderExpressions()
-                    .add(new Pair<IOrder, Mutable<ILogicalExpression>>(comp, new MutableObject<ILogicalExpression>(
-                            p.first)));
-            topOp = p.second;
-        }
-        ord.getInputs().add(topOp);
-        if (oc.getNumTuples() > 0) {
-            ord.getAnnotations().put(OperatorAnnotations.CARDINALITY, oc.getNumTuples());
-        }
-        if (oc.getNumFrames() > 0) {
-            ord.getAnnotations().put(OperatorAnnotations.MAX_NUMBER_FRAMES, oc.getNumFrames());
-        }
-        if (oc.getRangeMap() != null) {
-            Iterator<OrderModifier> orderModifIter = oc.getModifierList().iterator();
-            boolean ascending = (orderModifIter.next() == OrderModifier.ASC);
-            RangeMapBuilder.verifyRangeOrder(oc.getRangeMap(), ascending);
-            ord.getAnnotations().put(OperatorAnnotations.USE_RANGE_CONNECTOR, oc.getRangeMap());
-        }
-        return new Pair<ILogicalOperator, LogicalVariable>(ord, null);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitQuantifiedExpression(QuantifiedExpression qe,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        Mutable<ILogicalOperator> topOp = tupSource;
-
-        ILogicalOperator firstOp = null;
-        Mutable<ILogicalOperator> lastOp = null;
-
-        for (QuantifiedPair qt : qe.getQuantifiedList()) {
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo1 = aqlExprToAlgExpression(qt.getExpr(), topOp);
-            topOp = eo1.second;
-            LogicalVariable uVar = context.newVar(qt.getVarExpr());
-            ILogicalOperator u = new UnnestOperator(uVar, new MutableObject<ILogicalExpression>(
-                    makeUnnestExpression(eo1.first)));
-
-            if (firstOp == null) {
-                firstOp = u;
-            }
-            if (lastOp != null) {
-                u.getInputs().add(lastOp);
-            }
-            lastOp = new MutableObject<ILogicalOperator>(u);
-        }
-
-        // We make all the unnest correspond. to quantif. vars. sit on top
-        // in the hope of enabling joins & other optimiz.
-        firstOp.getInputs().add(topOp);
-        topOp = lastOp;
-
-        Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo2 = aqlExprToAlgExpression(qe.getSatisfiesExpr(), topOp);
-
-        AggregateFunctionCallExpression fAgg;
-        SelectOperator s;
-        if (qe.getQuantifier() == Quantifier.SOME) {
-            s = new SelectOperator(new MutableObject<ILogicalExpression>(eo2.first), false, null);
-            s.getInputs().add(eo2.second);
-            fAgg = AsterixBuiltinFunctions.makeAggregateFunctionExpression(AsterixBuiltinFunctions.NON_EMPTY_STREAM,
-                    new ArrayList<Mutable<ILogicalExpression>>());
-        } else { // EVERY
-            List<Mutable<ILogicalExpression>> satExprList = new ArrayList<Mutable<ILogicalExpression>>(1);
-            satExprList.add(new MutableObject<ILogicalExpression>(eo2.first));
-            s = new SelectOperator(new MutableObject<ILogicalExpression>(new ScalarFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AlgebricksBuiltinFunctions.NOT), satExprList)), false, null);
-            s.getInputs().add(eo2.second);
-            fAgg = AsterixBuiltinFunctions.makeAggregateFunctionExpression(AsterixBuiltinFunctions.EMPTY_STREAM,
-                    new ArrayList<Mutable<ILogicalExpression>>());
-        }
-        LogicalVariable qeVar = context.newVar();
-        AggregateOperator a = new AggregateOperator(mkSingletonArrayList(qeVar),
-                (List) mkSingletonArrayList(new MutableObject<ILogicalExpression>(fAgg)));
-        a.getInputs().add(new MutableObject<ILogicalOperator>(s));
-        return new Pair<ILogicalOperator, LogicalVariable>(a, qeVar);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitQuery(Query q, Mutable<ILogicalOperator> tupSource)
-            throws AsterixException {
-        return q.getBody().accept(this, tupSource);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitRecordConstructor(RecordConstructor rc,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        AbstractFunctionCallExpression f = new ScalarFunctionCallExpression(
-                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR));
-        LogicalVariable v1 = context.newVar();
-        AssignOperator a = new AssignOperator(v1, new MutableObject<ILogicalExpression>(f));
-        Mutable<ILogicalOperator> topOp = tupSource;
-        for (FieldBinding fb : rc.getFbList()) {
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo1 = aqlExprToAlgExpression(fb.getLeftExpr(), topOp);
-            f.getArguments().add(new MutableObject<ILogicalExpression>(eo1.first));
-            topOp = eo1.second;
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo2 = aqlExprToAlgExpression(fb.getRightExpr(), topOp);
-            f.getArguments().add(new MutableObject<ILogicalExpression>(eo2.first));
-            topOp = eo2.second;
-        }
-        a.getInputs().add(topOp);
-        return new Pair<ILogicalOperator, LogicalVariable>(a, v1);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitListConstructor(ListConstructor lc,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        FunctionIdentifier fid = (lc.getType() == Type.ORDERED_LIST_CONSTRUCTOR) ? AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR
-                : AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR;
-        AbstractFunctionCallExpression f = new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(fid));
-        LogicalVariable v1 = context.newVar();
-        AssignOperator a = new AssignOperator(v1, new MutableObject<ILogicalExpression>(f));
-        Mutable<ILogicalOperator> topOp = tupSource;
-        for (Expression expr : lc.getExprList()) {
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(expr, topOp);
-            f.getArguments().add(new MutableObject<ILogicalExpression>(eo.first));
-            topOp = eo.second;
-        }
-        a.getInputs().add(topOp);
-        return new Pair<ILogicalOperator, LogicalVariable>(a, v1);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitUnaryExpr(UnaryExpr u, Mutable<ILogicalOperator> tupSource)
-            throws AsterixException {
-        Expression expr = u.getExpr();
-        Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(expr, tupSource);
-        LogicalVariable v1 = context.newVar();
-        AssignOperator a;
-        if (u.getSign() == Sign.POSITIVE) {
-            a = new AssignOperator(v1, new MutableObject<ILogicalExpression>(eo.first));
-        } else {
-            AbstractFunctionCallExpression m = new ScalarFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.NUMERIC_UNARY_MINUS));
-            m.getArguments().add(new MutableObject<ILogicalExpression>(eo.first));
-            a = new AssignOperator(v1, new MutableObject<ILogicalExpression>(m));
-        }
-        a.getInputs().add(eo.second);
-        return new Pair<ILogicalOperator, LogicalVariable>(a, v1);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitVariableExpr(VariableExpr v, Mutable<ILogicalOperator> tupSource) {
-        // Should we ever get to this method?
-        LogicalVariable var = context.newVar();
-        LogicalVariable oldV = context.getVar(v.getVar().getId());
-        AssignOperator a = new AssignOperator(var, new MutableObject<ILogicalExpression>(
-                new VariableReferenceExpression(oldV)));
-        a.getInputs().add(tupSource);
-        return new Pair<ILogicalOperator, LogicalVariable>(a, var);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitWhereClause(WhereClause w, Mutable<ILogicalOperator> tupSource)
-            throws AsterixException {
-        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(w.getWhereExpr(), tupSource);
-        SelectOperator s = new SelectOperator(new MutableObject<ILogicalExpression>(p.first), false, null);
-        s.getInputs().add(p.second);
-
-        return new Pair<ILogicalOperator, LogicalVariable>(s, null);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitLimitClause(LimitClause lc, Mutable<ILogicalOperator> tupSource)
-            throws AsterixException {
-        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p1 = aqlExprToAlgExpression(lc.getLimitExpr(), tupSource);
-        LimitOperator opLim;
-        Expression offset = lc.getOffset();
-        if (offset != null) {
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p2 = aqlExprToAlgExpression(offset, p1.second);
-            opLim = new LimitOperator(p1.first, p2.first);
-            opLim.getInputs().add(p2.second);
-        } else {
-            opLim = new LimitOperator(p1.first);
-            opLim.getInputs().add(p1.second);
-        }
-        return new Pair<ILogicalOperator, LogicalVariable>(opLim, null);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitDistinctClause(DistinctClause dc,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        List<Mutable<ILogicalExpression>> exprList = new ArrayList<Mutable<ILogicalExpression>>();
-        Mutable<ILogicalOperator> input = null;
-        for (Expression expr : dc.getDistinctByExpr()) {
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(expr, tupSource);
-            exprList.add(new MutableObject<ILogicalExpression>(p.first));
-            input = p.second;
-        }
-        DistinctOperator opDistinct = new DistinctOperator(exprList);
-        opDistinct.getInputs().add(input);
-        return new Pair<ILogicalOperator, LogicalVariable>(opDistinct, null);
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitUnionExpr(UnionExpr unionExpr,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        //Translate the AQL union into an assign [var] <- [function-call: asterix:union, Args:[..]]
-        //The rule "IntroduceUnionRule" will translates this assign operator into the UnionAll operator.
-        Mutable<ILogicalOperator> ts = tupSource;
-        LogicalVariable assignedVar = context.newVar();
-        List<Mutable<ILogicalExpression>> inputVars = new ArrayList<Mutable<ILogicalExpression>>();
-        for (Expression e : unionExpr.getExprs()) {
-            Pair<ILogicalOperator, LogicalVariable> op_var = e.accept(this, ts);
-            ts = new MutableObject<ILogicalOperator>(op_var.first);
-            VariableReferenceExpression var = new VariableReferenceExpression(op_var.second);
-            inputVars.add(new MutableObject<ILogicalExpression>(var));
-        }
-        AbstractFunctionCallExpression union = new ScalarFunctionCallExpression(
-                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.UNION), inputVars);
-        AssignOperator a = new AssignOperator(assignedVar, new MutableObject<ILogicalExpression>(union));
-        a.getInputs().add(ts);
-        return new Pair<ILogicalOperator, LogicalVariable>(a, assignedVar);
-    }
-
-    private AbstractFunctionCallExpression createComparisonExpression(OperatorType t) {
-        FunctionIdentifier fi = operatorTypeToFunctionIdentifier(t);
-        IFunctionInfo finfo = FunctionUtils.getFunctionInfo(fi);
-        return new ScalarFunctionCallExpression(finfo);
-    }
-
-    private FunctionIdentifier operatorTypeToFunctionIdentifier(OperatorType t) {
-        switch (t) {
-            case EQ: {
-                return AlgebricksBuiltinFunctions.EQ;
-            }
-            case NEQ: {
-                return AlgebricksBuiltinFunctions.NEQ;
-            }
-            case GT: {
-                return AlgebricksBuiltinFunctions.GT;
-            }
-            case GE: {
-                return AlgebricksBuiltinFunctions.GE;
-            }
-            case LT: {
-                return AlgebricksBuiltinFunctions.LT;
-            }
-            case LE: {
-                return AlgebricksBuiltinFunctions.LE;
-            }
-            default: {
-                throw new IllegalStateException();
-            }
-        }
-    }
-
-    private AbstractFunctionCallExpression createFunctionCallExpressionForBuiltinOperator(OperatorType t)
-            throws AsterixException {
-
-        FunctionIdentifier fid = null;
-        switch (t) {
-            case PLUS: {
-                fid = AlgebricksBuiltinFunctions.NUMERIC_ADD;
-                break;
-            }
-            case MINUS: {
-                fid = AsterixBuiltinFunctions.NUMERIC_SUBTRACT;
-                break;
-            }
-            case MUL: {
-                fid = AsterixBuiltinFunctions.NUMERIC_MULTIPLY;
-                break;
-            }
-            case DIV: {
-                fid = AsterixBuiltinFunctions.NUMERIC_DIVIDE;
-                break;
-            }
-            case MOD: {
-                fid = AsterixBuiltinFunctions.NUMERIC_MOD;
-                break;
-            }
-            case IDIV: {
-                fid = AsterixBuiltinFunctions.NUMERIC_IDIV;
-                break;
-            }
-            case CARET: {
-                fid = AsterixBuiltinFunctions.CARET;
-                break;
-            }
-            case AND: {
-                fid = AlgebricksBuiltinFunctions.AND;
-                break;
-            }
-            case OR: {
-                fid = AlgebricksBuiltinFunctions.OR;
-                break;
-            }
-            case FUZZY_EQ: {
-                fid = AsterixBuiltinFunctions.FUZZY_EQ;
-                break;
-            }
-
-            default: {
-                throw new NotImplementedException("Operator " + t + " is not yet implemented");
-            }
-        }
-        return new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(fid));
-    }
-
-    private static boolean hasOnlyChild(ILogicalOperator parent, Mutable<ILogicalOperator> childCandidate) {
-        List<Mutable<ILogicalOperator>> inp = parent.getInputs();
-        if (inp == null || inp.size() != 1) {
-            return false;
-        }
-        return inp.get(0) == childCandidate;
-    }
-
-    private Pair<ILogicalExpression, Mutable<ILogicalOperator>> aqlExprToAlgExpression(Expression expr,
-            Mutable<ILogicalOperator> topOp) throws AsterixException {
-        switch (expr.getKind()) {
-            case VARIABLE_EXPRESSION: {
-                VariableReferenceExpression ve = new VariableReferenceExpression(context.getVar(((VariableExpr) expr)
-                        .getVar().getId()));
-                return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(ve, topOp);
-            }
-            case LITERAL_EXPRESSION: {
-                LiteralExpr val = (LiteralExpr) expr;
-                return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(new ConstantExpression(
-                        new AsterixConstantValue(ConstantHelper.objectFromLiteral(val.getValue()))), topOp);
-            }
-            default: {
-                // Mutable<ILogicalOperator> src = new
-                // Mutable<ILogicalOperator>();
-                // Mutable<ILogicalOperator> src = topOp;
-                if (expressionNeedsNoNesting(expr)) {
-                    Pair<ILogicalOperator, LogicalVariable> p = expr.accept(this, topOp);
-                    ILogicalExpression exp = ((AssignOperator) p.first).getExpressions().get(0).getValue();
-                    return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(exp, p.first.getInputs().get(0));
-                } else {
-                    Mutable<ILogicalOperator> src = new MutableObject<ILogicalOperator>();
-
-                    Pair<ILogicalOperator, LogicalVariable> p = expr.accept(this, src);
-
-                    if (((AbstractLogicalOperator) p.first).getOperatorTag() == LogicalOperatorTag.SUBPLAN) {
-                        src.setValue(topOp.getValue());
-                        Mutable<ILogicalOperator> top2 = new MutableObject<ILogicalOperator>(p.first);
-                        return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(new VariableReferenceExpression(
-                                p.second), top2);
-                    } else {
-                        SubplanOperator s = new SubplanOperator();
-                        s.getInputs().add(topOp);
-                        src.setValue(new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(s)));
-                        Mutable<ILogicalOperator> planRoot = new MutableObject<ILogicalOperator>(p.first);
-                        s.setRootOp(planRoot);
-                        return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(new VariableReferenceExpression(
-                                p.second), new MutableObject<ILogicalOperator>(s));
-                    }
-                }
-            }
-        }
-
-    }
-
-    private Pair<ILogicalOperator, LogicalVariable> produceFlwrResult(boolean noForClause, boolean isTop,
-            Mutable<ILogicalOperator> resOpRef, LogicalVariable resVar) {
-        if (isTop) {
-            ProjectOperator pr = new ProjectOperator(resVar);
-            pr.getInputs().add(resOpRef);
-            return new Pair<ILogicalOperator, LogicalVariable>(pr, resVar);
-
-        } else if (noForClause) {
-            return new Pair<ILogicalOperator, LogicalVariable>(resOpRef.getValue(), resVar);
-        } else {
-            return aggListify(resVar, resOpRef, false);
-        }
-    }
-
-    private Pair<ILogicalOperator, LogicalVariable> aggListify(LogicalVariable var, Mutable<ILogicalOperator> opRef,
-            boolean bProject) {
-        AggregateFunctionCallExpression funAgg = AsterixBuiltinFunctions.makeAggregateFunctionExpression(
-                AsterixBuiltinFunctions.LISTIFY, new ArrayList<Mutable<ILogicalExpression>>());
-        funAgg.getArguments().add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(var)));
-        LogicalVariable varListified = context.newVar();
-        AggregateOperator agg = new AggregateOperator(mkSingletonArrayList(varListified),
-                (List) mkSingletonArrayList(new MutableObject<ILogicalExpression>(funAgg)));
-        agg.getInputs().add(opRef);
-        ILogicalOperator res;
-        if (bProject) {
-            ProjectOperator pr = new ProjectOperator(varListified);
-            pr.getInputs().add(new MutableObject<ILogicalOperator>(agg));
-            res = pr;
-        } else {
-            res = agg;
-        }
-        return new Pair<ILogicalOperator, LogicalVariable>(res, varListified);
-    }
-
-    private Pair<ILogicalOperator, LogicalVariable> visitAndOrOperator(OperatorExpr op,
-            Mutable<ILogicalOperator> tupSource) throws AsterixException {
-        ArrayList<OperatorType> ops = op.getOpList();
-        int nOps = ops.size();
-
-        ArrayList<Expression> exprs = op.getExprList();
-
-        Mutable<ILogicalOperator> topOp = tupSource;
-
-        OperatorType opLogical = ops.get(0);
-        AbstractFunctionCallExpression f = createFunctionCallExpressionForBuiltinOperator(opLogical);
-
-        for (int i = 0; i <= nOps; i++) {
-            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(exprs.get(i), topOp);
-            topOp = p.second;
-            // now look at the operator
-            if (i < nOps) {
-                if (ops.get(i) != opLogical) {
-                    throw new TranslationException("Unexpected operator " + ops.get(i)
-                            + " in an OperatorExpr starting with " + opLogical);
-                }
-            }
-            f.getArguments().add(new MutableObject<ILogicalExpression>(p.first));
-        }
-
-        LogicalVariable assignedVar = context.newVar();
-        AssignOperator a = new AssignOperator(assignedVar, new MutableObject<ILogicalExpression>(f));
-        a.getInputs().add(topOp);
-
-        return new Pair<ILogicalOperator, LogicalVariable>(a, assignedVar);
-
-    }
-
-    private static boolean expressionNeedsNoNesting(Expression expr) {
-        Kind k = expr.getKind();
-        return k == Kind.LITERAL_EXPRESSION || k == Kind.LIST_CONSTRUCTOR_EXPRESSION
-                || k == Kind.RECORD_CONSTRUCTOR_EXPRESSION || k == Kind.VARIABLE_EXPRESSION
-                || k == Kind.CALL_EXPRESSION || k == Kind.OP_EXPRESSION || k == Kind.FIELD_ACCESSOR_EXPRESSION
-                || k == Kind.INDEX_ACCESSOR_EXPRESSION || k == Kind.UNARY_EXPRESSION || k == Kind.UNION_EXPRESSION;
-    }
-
-    private <T> ArrayList<T> mkSingletonArrayList(T item) {
-        ArrayList<T> array = new ArrayList<T>(1);
-        array.add(item);
-        return array;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitTypeDecl(TypeDecl td, Mutable<ILogicalOperator> arg)
-            throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitRecordTypeDefiniton(RecordTypeDefinition tre,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitTypeReferenceExpression(TypeReferenceExpression tre,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitNodegroupDecl(NodegroupDecl ngd, Mutable<ILogicalOperator> arg)
-            throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitLoadStatement(LoadStatement stmtLoad,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitDropStatement(DropStatement del, Mutable<ILogicalOperator> arg)
-            throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitDisconnectFeedStatement(DisconnectFeedStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitCreateIndexStatement(CreateIndexStatement cis,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitOrderedListTypeDefiniton(OrderedListTypeDefinition olte,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitUnorderedListTypeDefiniton(UnorderedListTypeDefinition ulte,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    private ILogicalExpression makeUnnestExpression(ILogicalExpression expr) {
-        switch (expr.getExpressionTag()) {
-            case VARIABLE: {
-                return new UnnestingFunctionCallExpression(
-                        FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.SCAN_COLLECTION),
-                        new MutableObject<ILogicalExpression>(expr));
-            }
-            case FUNCTION_CALL: {
-                AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expr;
-                if (fce.getKind() == FunctionKind.UNNEST) {
-                    return expr;
-                } else {
-                    return new UnnestingFunctionCallExpression(
-                            FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.SCAN_COLLECTION),
-                            new MutableObject<ILogicalExpression>(expr));
-                }
-            }
-            default: {
-                return expr;
-            }
-        }
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitInsertStatement(InsertStatement insert,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitDeleteStatement(DeleteStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitUpdateStatement(UpdateStatement update,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitUpdateClause(UpdateClause del, Mutable<ILogicalOperator> arg)
-            throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitDataverseDecl(DataverseDecl dv, Mutable<ILogicalOperator> arg)
-            throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitDatasetDecl(DatasetDecl dd, Mutable<ILogicalOperator> arg)
-            throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitSetStatement(SetStatement ss, Mutable<ILogicalOperator> arg)
-            throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitWriteStatement(WriteStatement ws, Mutable<ILogicalOperator> arg)
-            throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitCreateDataverseStatement(CreateDataverseStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitIndexDropStatement(IndexDropStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitNodeGroupDropStatement(NodeGroupDropStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitDataverseDropStatement(DataverseDropStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitTypeDropStatement(TypeDropStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visit(CreateFunctionStatement cfs, Mutable<ILogicalOperator> arg)
-            throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitFunctionDropStatement(FunctionDropStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-    
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitCreatePrimaryFeedStatement(CreatePrimaryFeedStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitCreateSecondaryFeedStatement(CreateSecondaryFeedStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
- 
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitConnectFeedStatement(ConnectFeedStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitDropFeedStatement(FeedDropStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitCompactStatement(CompactStatement del,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        // TODO Auto-generated method stub
-        return null;
-    }
-    
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitCreateFeedPolicyStatement(CreateFeedPolicyStatement cfps,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        return null;
-    }
-
-    @Override
-    public Pair<ILogicalOperator, LogicalVariable> visitDropFeedPolicyStatement(FeedPolicyDropStatement dfs,
-            Mutable<ILogicalOperator> arg) throws AsterixException {
-        return null;
-    }
-}


[19/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/java/AsterixJavaClient.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/java/AsterixJavaClient.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/java/AsterixJavaClient.java
deleted file mode 100644
index 54804e7..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/java/AsterixJavaClient.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.java;
-
-import java.io.PrintWriter;
-import java.io.Reader;
-import java.util.List;
-
-import edu.uci.ics.asterix.api.common.APIFramework;
-import edu.uci.ics.asterix.api.common.Job;
-import edu.uci.ics.asterix.api.common.SessionConfig;
-import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.parser.AQLParser;
-import edu.uci.ics.asterix.aql.parser.ParseException;
-import edu.uci.ics.asterix.aql.translator.AqlTranslator;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-
-public class AsterixJavaClient {
-    private IHyracksClientConnection hcc;
-    private Reader queryText;
-    private PrintWriter writer;
-
-    private Job[] dmlJobs;
-    private JobSpecification queryJobSpec;
-
-    public AsterixJavaClient(IHyracksClientConnection hcc, Reader queryText, PrintWriter writer) {
-        this.hcc = hcc;
-        this.queryText = queryText;
-        this.writer = writer;
-    }
-
-    public AsterixJavaClient(IHyracksClientConnection hcc, Reader queryText) {
-        this(hcc, queryText, new PrintWriter(System.out, true));
-    }
-
-    public void compile() throws Exception {
-        compile(true, false, false, false, false, false, false);
-    }
-
-    public void compile(boolean optimize, boolean printRewrittenExpressions, boolean printLogicalPlan,
-            boolean printOptimizedPlan, boolean printPhysicalOpsOnly, boolean generateBinaryRuntime, boolean printJob)
-            throws Exception {
-        queryJobSpec = null;
-        dmlJobs = null;
-
-        if (queryText == null) {
-            return;
-        }
-        int ch;
-        StringBuilder builder = new StringBuilder();
-        while ((ch = queryText.read()) != -1) {
-            builder.append((char) ch);
-        }
-        AQLParser parser = new AQLParser(builder.toString());
-        List<Statement> aqlStatements;
-        try {
-            aqlStatements = parser.parse();
-        } catch (ParseException pe) {
-            throw new AsterixException(pe);
-        }
-        MetadataManager.INSTANCE.init();
-
-        SessionConfig conf = new SessionConfig(writer, OutputFormat.ADM, optimize, true, generateBinaryRuntime);
-        conf.setOOBData(false, printRewrittenExpressions, printLogicalPlan,
-                        printOptimizedPlan, printJob);
-        if (printPhysicalOpsOnly) {
-            conf.set(SessionConfig.FORMAT_ONLY_PHYSICAL_OPS, true);
-        }
-
-        AqlTranslator aqlTranslator = new AqlTranslator(aqlStatements, conf);
-        aqlTranslator.compileAndExecute(hcc, null, AqlTranslator.ResultDelivery.SYNC);
-        writer.flush();
-    }
-
-    public void execute() throws Exception {
-        if (dmlJobs != null) {
-            APIFramework.executeJobArray(hcc, dmlJobs, writer);
-        }
-        if (queryJobSpec != null) {
-            APIFramework.executeJobArray(hcc, new JobSpecification[] { queryJobSpec }, writer);
-        }
-    }
-
-}


[40/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IntroduceSelectAccessMethodRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IntroduceSelectAccessMethodRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IntroduceSelectAccessMethodRule.java
deleted file mode 100644
index 47e3dbf..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IntroduceSelectAccessMethodRule.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules.am;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
-
-/**
- * This rule optimizes simple selections with secondary or primary indexes. The use of an
- * index is expressed as an unnest-map over an index-search function which will be
- * replaced with the appropriate embodiment during codegen.
- * Matches the following operator patterns:
- * Standard secondary index pattern:
- * There must be at least one assign, but there may be more, e.g., when matching similarity-jaccard-check().
- * (select) <-- (assign | unnest)+ <-- (datasource scan)
- * Primary index lookup pattern:
- * Since no assign is necessary to get the primary key fields (they are already stored fields in the BTree tuples).
- * (select) <-- (datasource scan)
- * Replaces the above patterns with this plan:
- * (select) <-- (assign) <-- (btree search) <-- (sort) <-- (unnest-map(index search)) <-- (assign)
- * The sort is optional, and some access methods implementations may choose not to sort.
- * Note that for some index-based optimizations we do not remove the triggering
- * condition from the select, since the index may only acts as a filter, and the
- * final verification must still be done with the original select condition.
- * The basic outline of this rule is:
- * 1. Match operator pattern.
- * 2. Analyze select condition to see if there are optimizable functions (delegated to IAccessMethods).
- * 3. Check metadata to see if there are applicable indexes.
- * 4. Choose an index to apply (for now only a single index will be chosen).
- * 5. Rewrite plan using index (delegated to IAccessMethods).
- */
-public class IntroduceSelectAccessMethodRule extends AbstractIntroduceAccessMethodRule {
-
-    // Operators representing the patterns to be matched:
-    // These ops are set in matchesPattern()
-    protected Mutable<ILogicalOperator> selectRef = null;
-    protected SelectOperator select = null;
-    protected AbstractFunctionCallExpression selectCond = null;
-    protected final OptimizableOperatorSubTree subTree = new OptimizableOperatorSubTree();
-
-    // Register access methods.
-    protected static Map<FunctionIdentifier, List<IAccessMethod>> accessMethods = new HashMap<FunctionIdentifier, List<IAccessMethod>>();
-    static {
-        registerAccessMethod(BTreeAccessMethod.INSTANCE, accessMethods);
-        registerAccessMethod(RTreeAccessMethod.INSTANCE, accessMethods);
-        registerAccessMethod(InvertedIndexAccessMethod.INSTANCE, accessMethods);
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        clear();
-        setMetadataDeclarations(context);
-
-        // Match operator pattern and initialize operator members.
-        if (!matchesOperatorPattern(opRef, context)) {
-            return false;
-        }
-
-        // Analyze select condition.
-        Map<IAccessMethod, AccessMethodAnalysisContext> analyzedAMs = new HashMap<IAccessMethod, AccessMethodAnalysisContext>();
-        if (!analyzeCondition(selectCond, subTree.assignsAndUnnests, analyzedAMs)) {
-            return false;
-        }
-
-        // Set dataset and type metadata.
-        if (!subTree.setDatasetAndTypeMetadata((AqlMetadataProvider) context.getMetadataProvider())) {
-            return false;
-        }
-
-        fillSubTreeIndexExprs(subTree, analyzedAMs, context);
-        pruneIndexCandidates(analyzedAMs);
-
-        // Choose index to be applied.
-        Pair<IAccessMethod, Index> chosenIndex = chooseIndex(analyzedAMs);
-        if (chosenIndex == null) {
-            context.addToDontApplySet(this, select);
-            return false;
-        }
-
-        // Apply plan transformation using chosen index.
-        AccessMethodAnalysisContext analysisCtx = analyzedAMs.get(chosenIndex.first);
-        boolean res = chosenIndex.first.applySelectPlanTransformation(selectRef, subTree, chosenIndex.second,
-                analysisCtx, context);
-        if (res) {
-            OperatorPropertiesUtil.typeOpRec(opRef, context);
-        }
-        context.addToDontApplySet(this, select);
-        return res;
-    }
-
-    protected boolean matchesOperatorPattern(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
-        // First check that the operator is a select and its condition is a function call.
-        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
-        if (context.checkIfInDontApplySet(this, op1)) {
-            return false;
-        }
-        if (op1.getOperatorTag() != LogicalOperatorTag.SELECT) {
-            return false;
-        }
-        // Set and analyze select.
-        selectRef = opRef;
-        select = (SelectOperator) op1;
-        // Check that the select's condition is a function call.
-        ILogicalExpression condExpr = select.getCondition().getValue();
-        if (condExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            return false;
-        }
-        selectCond = (AbstractFunctionCallExpression) condExpr;
-        boolean res = subTree.initFromSubTree(op1.getInputs().get(0));
-        return res && subTree.hasDataSourceScan();
-    }
-
-    @Override
-    public Map<FunctionIdentifier, List<IAccessMethod>> getAccessMethods() {
-        return accessMethods;
-    }
-
-    private void clear() {
-        selectRef = null;
-        select = null;
-        selectCond = null;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/InvertedIndexAccessMethod.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/InvertedIndexAccessMethod.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/InvertedIndexAccessMethod.java
deleted file mode 100644
index 614cfbc..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/InvertedIndexAccessMethod.java
+++ /dev/null
@@ -1,1166 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules.am;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.algebra.base.LogicalOperatorDeepCopyVisitor;
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.common.annotations.SkipSecondaryIndexSearchExpressionAnnotation;
-import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.formats.nontagged.AqlBinaryTokenizerFactoryProvider;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.asterix.om.base.AFloat;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.ANull;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.base.IACollection;
-import edu.uci.ics.asterix.om.base.IAObject;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.types.hierachy.ATypeHierarchy;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.Counter;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IAlgebricksConstantValue;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifierFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.ConjunctiveEditDistanceSearchModifierFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.ConjunctiveListEditDistanceSearchModifierFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.ConjunctiveSearchModifierFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.EditDistanceSearchModifierFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.JaccardSearchModifierFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.ListEditDistanceSearchModifierFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-
-/**
- * Class for helping rewrite rules to choose and apply inverted indexes.
- */
-public class InvertedIndexAccessMethod implements IAccessMethod {
-
-    // Enum describing the search modifier type. Used for passing info to jobgen.
-    public static enum SearchModifierType {
-        CONJUNCTIVE,
-        JACCARD,
-        EDIT_DISTANCE,
-        CONJUNCTIVE_EDIT_DISTANCE,
-        INVALID
-    }
-
-    private static List<FunctionIdentifier> funcIdents = new ArrayList<FunctionIdentifier>();
-    static {
-        funcIdents.add(AsterixBuiltinFunctions.CONTAINS);
-        // For matching similarity-check functions. For example, similarity-jaccard-check returns a list of two items,
-        // and the select condition will get the first list-item and check whether it evaluates to true.
-        funcIdents.add(AsterixBuiltinFunctions.GET_ITEM);
-    }
-
-    // These function identifiers are matched in this AM's analyzeFuncExprArgs(),
-    // and are not visible to the outside driver.
-    private static HashSet<FunctionIdentifier> secondLevelFuncIdents = new HashSet<FunctionIdentifier>();
-    static {
-        secondLevelFuncIdents.add(AsterixBuiltinFunctions.SIMILARITY_JACCARD_CHECK);
-        secondLevelFuncIdents.add(AsterixBuiltinFunctions.EDIT_DISTANCE_CHECK);
-        secondLevelFuncIdents.add(AsterixBuiltinFunctions.EDIT_DISTANCE_CONTAINS);
-    }
-
-    public static InvertedIndexAccessMethod INSTANCE = new InvertedIndexAccessMethod();
-
-    @Override
-    public List<FunctionIdentifier> getOptimizableFunctions() {
-        return funcIdents;
-    }
-
-    @Override
-    public boolean analyzeFuncExprArgs(AbstractFunctionCallExpression funcExpr,
-            List<AbstractLogicalOperator> assignsAndUnnests, AccessMethodAnalysisContext analysisCtx) {
-
-        if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.CONTAINS) {
-            boolean matches = AccessMethodUtils.analyzeFuncExprArgsForOneConstAndVar(funcExpr, analysisCtx);
-            if (!matches) {
-                matches = AccessMethodUtils.analyzeFuncExprArgsForTwoVars(funcExpr, analysisCtx);
-            }
-            return matches;
-        }
-        return analyzeGetItemFuncExpr(funcExpr, assignsAndUnnests, analysisCtx);
-    }
-
-    public boolean analyzeGetItemFuncExpr(AbstractFunctionCallExpression funcExpr,
-            List<AbstractLogicalOperator> assignsAndUnnests, AccessMethodAnalysisContext analysisCtx) {
-        if (funcExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.GET_ITEM) {
-            return false;
-        }
-        ILogicalExpression arg1 = funcExpr.getArguments().get(0).getValue();
-        ILogicalExpression arg2 = funcExpr.getArguments().get(1).getValue();
-        // The second arg is the item index to be accessed. It must be a constant.
-        if (arg2.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
-            return false;
-        }
-        // The first arg must be a variable or a function expr.
-        // If it is a variable we must track its origin in the assigns to get the original function expr.
-        if (arg1.getExpressionTag() != LogicalExpressionTag.VARIABLE
-                && arg1.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            return false;
-        }
-        AbstractFunctionCallExpression matchedFuncExpr = null;
-        // The get-item arg is function call, directly check if it's optimizable.
-        if (arg1.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-            matchedFuncExpr = (AbstractFunctionCallExpression) arg1;
-        }
-        // The get-item arg is a variable. Search the assigns and unnests for its origination function.
-        int matchedAssignOrUnnestIndex = -1;
-        if (arg1.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-            VariableReferenceExpression varRefExpr = (VariableReferenceExpression) arg1;
-            // Try to find variable ref expr in all assigns.
-            for (int i = 0; i < assignsAndUnnests.size(); i++) {
-                AbstractLogicalOperator op = assignsAndUnnests.get(i);
-                if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
-                    AssignOperator assign = (AssignOperator) op;
-                    List<LogicalVariable> assignVars = assign.getVariables();
-                    List<Mutable<ILogicalExpression>> assignExprs = assign.getExpressions();
-                    for (int j = 0; j < assignVars.size(); j++) {
-                        LogicalVariable var = assignVars.get(j);
-                        if (var != varRefExpr.getVariableReference()) {
-                            continue;
-                        }
-                        // We've matched the variable in the first assign. Now analyze the originating function.
-                        ILogicalExpression matchedExpr = assignExprs.get(j).getValue();
-                        if (matchedExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-                            return false;
-                        }
-                        matchedFuncExpr = (AbstractFunctionCallExpression) matchedExpr;
-                        break;
-                    }
-                } else {
-                    UnnestOperator unnest = (UnnestOperator) op;
-                    LogicalVariable var = unnest.getVariable();
-                    if (var == varRefExpr.getVariableReference()) {
-                        ILogicalExpression matchedExpr = unnest.getExpressionRef().getValue();
-                        if (matchedExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-                            return false;
-                        }
-                        AbstractFunctionCallExpression unnestFuncExpr = (AbstractFunctionCallExpression) matchedExpr;
-                        if (unnestFuncExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.SCAN_COLLECTION) {
-                            return false;
-                        }
-                        matchedFuncExpr = (AbstractFunctionCallExpression) unnestFuncExpr.getArguments().get(0)
-                                .getValue();
-                    }
-                }
-                // We've already found a match.
-                if (matchedFuncExpr != null) {
-                    matchedAssignOrUnnestIndex = i;
-                    break;
-                }
-            }
-        }
-        // Check that the matched function is optimizable by this access method.
-        if (!secondLevelFuncIdents.contains(matchedFuncExpr.getFunctionIdentifier())) {
-            return false;
-        }
-        boolean selectMatchFound = analyzeSelectSimilarityCheckFuncExprArgs(matchedFuncExpr, assignsAndUnnests,
-                matchedAssignOrUnnestIndex, analysisCtx);
-        boolean joinMatchFound = analyzeJoinSimilarityCheckFuncExprArgs(matchedFuncExpr, assignsAndUnnests,
-                matchedAssignOrUnnestIndex, analysisCtx);
-        if (selectMatchFound || joinMatchFound) {
-            return true;
-        }
-        return false;
-    }
-
-    private boolean analyzeJoinSimilarityCheckFuncExprArgs(AbstractFunctionCallExpression funcExpr,
-            List<AbstractLogicalOperator> assignsAndUnnests, int matchedAssignOrUnnestIndex,
-            AccessMethodAnalysisContext analysisCtx) {
-        // There should be exactly three arguments.
-        // The last function argument is assumed to be the similarity threshold.
-        IAlgebricksConstantValue constThreshVal = null;
-        ILogicalExpression arg3 = funcExpr.getArguments().get(2).getValue();
-        if (arg3.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
-            return false;
-        }
-        constThreshVal = ((ConstantExpression) arg3).getValue();
-        ILogicalExpression arg1 = funcExpr.getArguments().get(0).getValue();
-        ILogicalExpression arg2 = funcExpr.getArguments().get(1).getValue();
-        // We expect arg1 and arg2 to be non-constants for a join.
-        if (arg1.getExpressionTag() == LogicalExpressionTag.CONSTANT
-                || arg2.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
-            return false;
-        }
-        LogicalVariable fieldVarExpr1 = getNonConstArgFieldExprPair(arg1, funcExpr, assignsAndUnnests,
-                matchedAssignOrUnnestIndex);
-        if (fieldVarExpr1 == null) {
-            return false;
-        }
-        LogicalVariable fieldVarExpr2 = getNonConstArgFieldExprPair(arg2, funcExpr, assignsAndUnnests,
-                matchedAssignOrUnnestIndex);
-        if (fieldVarExpr2 == null) {
-            return false;
-        }
-        OptimizableFuncExpr newOptFuncExpr = new OptimizableFuncExpr(funcExpr, new LogicalVariable[] { fieldVarExpr1,
-                fieldVarExpr2 }, new IAlgebricksConstantValue[] { constThreshVal });
-        for (IOptimizableFuncExpr optFuncExpr : analysisCtx.matchedFuncExprs) {
-            //avoid additional optFuncExpressions in case of a join
-            if (optFuncExpr.getFuncExpr().equals(funcExpr)) {
-                return true;
-            }
-        }
-        analysisCtx.matchedFuncExprs.add(newOptFuncExpr);
-        return true;
-    }
-
-    private boolean analyzeSelectSimilarityCheckFuncExprArgs(AbstractFunctionCallExpression funcExpr,
-            List<AbstractLogicalOperator> assignsAndUnnests, int matchedAssignOrUnnestIndex,
-            AccessMethodAnalysisContext analysisCtx) {
-        // There should be exactly three arguments.
-        // The last function argument is assumed to be the similarity threshold.
-        IAlgebricksConstantValue constThreshVal = null;
-        ILogicalExpression arg3 = funcExpr.getArguments().get(2).getValue();
-        if (arg3.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
-            return false;
-        }
-        constThreshVal = ((ConstantExpression) arg3).getValue();
-        ILogicalExpression arg1 = funcExpr.getArguments().get(0).getValue();
-        ILogicalExpression arg2 = funcExpr.getArguments().get(1).getValue();
-        // Determine whether one arg is constant, and the other is non-constant.
-        ILogicalExpression constArg = null;
-        ILogicalExpression nonConstArg = null;
-        if (arg1.getExpressionTag() == LogicalExpressionTag.CONSTANT
-                && arg2.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
-            // The arguments of edit-distance-contains() function are asymmetrical, we can only use index if it is on the first argument
-            if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CONTAINS) {
-                return false;
-            }
-            constArg = arg1;
-            nonConstArg = arg2;
-        } else if (arg2.getExpressionTag() == LogicalExpressionTag.CONSTANT
-                && arg1.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
-            constArg = arg2;
-            nonConstArg = arg1;
-        } else {
-            return false;
-        }
-        ConstantExpression constExpr = (ConstantExpression) constArg;
-        IAlgebricksConstantValue constFilterVal = constExpr.getValue();
-        LogicalVariable fieldVarExpr = getNonConstArgFieldExprPair(nonConstArg, funcExpr, assignsAndUnnests,
-                matchedAssignOrUnnestIndex);
-        if (fieldVarExpr == null) {
-            return false;
-        }
-        OptimizableFuncExpr newOptFuncExpr = new OptimizableFuncExpr(funcExpr, new LogicalVariable[] { fieldVarExpr },
-                new IAlgebricksConstantValue[] { constFilterVal, constThreshVal });
-        for (IOptimizableFuncExpr optFuncExpr : analysisCtx.matchedFuncExprs) {
-            //avoid additional optFuncExpressions in case of a join
-            if (optFuncExpr.getFuncExpr().equals(funcExpr))
-                return true;
-        }
-        analysisCtx.matchedFuncExprs.add(newOptFuncExpr);
-        return true;
-    }
-
-    private LogicalVariable getNonConstArgFieldExprPair(ILogicalExpression nonConstArg,
-            AbstractFunctionCallExpression funcExpr, List<AbstractLogicalOperator> assignsAndUnnests,
-            int matchedAssignOrUnnestIndex) {
-        LogicalVariable fieldVar = null;
-        // Analyze nonConstArg depending on similarity function.
-        if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.SIMILARITY_JACCARD_CHECK) {
-            AbstractFunctionCallExpression nonConstFuncExpr = funcExpr;
-            if (nonConstArg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                nonConstFuncExpr = (AbstractFunctionCallExpression) nonConstArg;
-                // TODO: Currently, we're only looking for word and gram tokens (non hashed).
-                if (nonConstFuncExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.WORD_TOKENS
-                        && nonConstFuncExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.GRAM_TOKENS) {
-                    return null;
-                }
-                // Find the variable that is being tokenized.
-                nonConstArg = nonConstFuncExpr.getArguments().get(0).getValue();
-            }
-        }
-        if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CHECK
-                || funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CONTAINS) {
-            while (nonConstArg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                AbstractFunctionCallExpression nonConstFuncExpr = (AbstractFunctionCallExpression) nonConstArg;
-                if (nonConstFuncExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.WORD_TOKENS
-                        && nonConstFuncExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.SUBSTRING
-                        && nonConstFuncExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.SUBSTRING_BEFORE
-                        && nonConstFuncExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.SUBSTRING_AFTER) {
-                    return null;
-                }
-                // Find the variable whose substring is used in the similarity function
-                nonConstArg = nonConstFuncExpr.getArguments().get(0).getValue();
-            }
-        }
-        if (nonConstArg.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-            fieldVar = ((VariableReferenceExpression) nonConstArg).getVariableReference();
-        }
-        return fieldVar;
-    }
-
-    @Override
-    public boolean matchAllIndexExprs() {
-        return true;
-    }
-
-    @Override
-    public boolean matchPrefixIndexExprs() {
-        return false;
-    }
-
-    private ILogicalOperator createSecondaryToPrimaryPlan(OptimizableOperatorSubTree indexSubTree,
-            OptimizableOperatorSubTree probeSubTree, Index chosenIndex, IOptimizableFuncExpr optFuncExpr,
-            boolean retainInput, boolean retainNull, boolean requiresBroadcast, IOptimizationContext context)
-            throws AlgebricksException {
-        Dataset dataset = indexSubTree.dataset;
-        ARecordType recordType = indexSubTree.recordType;
-        // we made sure indexSubTree has datasource scan
-        DataSourceScanOperator dataSourceScan = (DataSourceScanOperator) indexSubTree.dataSourceRef.getValue();
-
-        InvertedIndexJobGenParams jobGenParams = new InvertedIndexJobGenParams(chosenIndex.getIndexName(),
-                chosenIndex.getIndexType(), dataset.getDataverseName(), dataset.getDatasetName(), retainInput,
-                retainNull, requiresBroadcast);
-        // Add function-specific args such as search modifier, and possibly a similarity threshold.
-        addFunctionSpecificArgs(optFuncExpr, jobGenParams);
-        // Add the type of search key from the optFuncExpr.
-        addSearchKeyType(optFuncExpr, indexSubTree, context, jobGenParams);
-
-        // Operator that feeds the secondary-index search.
-        AbstractLogicalOperator inputOp = null;
-        // Here we generate vars and funcs for assigning the secondary-index keys to be fed into the secondary-index search.
-        // List of variables for the assign.
-        ArrayList<LogicalVariable> keyVarList = new ArrayList<LogicalVariable>();
-        // probeSubTree is null if we are dealing with a selection query, and non-null for join queries.
-        if (probeSubTree == null) {
-            // List of expressions for the assign.
-            ArrayList<Mutable<ILogicalExpression>> keyExprList = new ArrayList<Mutable<ILogicalExpression>>();
-            // Add key vars and exprs to argument list.
-            addKeyVarsAndExprs(optFuncExpr, keyVarList, keyExprList, context);
-            // Assign operator that sets the secondary-index search-key fields.
-            inputOp = new AssignOperator(keyVarList, keyExprList);
-            // Input to this assign is the EmptyTupleSource (which the dataSourceScan also must have had as input).
-            inputOp.getInputs().add(dataSourceScan.getInputs().get(0));
-            inputOp.setExecutionMode(dataSourceScan.getExecutionMode());
-        } else {
-            // We are optimizing a join. Add the input variable to the secondaryIndexFuncArgs.
-            LogicalVariable inputSearchVariable = getInputSearchVar(optFuncExpr, indexSubTree);
-            keyVarList.add(inputSearchVariable);
-            inputOp = (AbstractLogicalOperator) probeSubTree.root;
-        }
-        jobGenParams.setKeyVarList(keyVarList);
-        UnnestMapOperator secondaryIndexUnnestOp = AccessMethodUtils.createSecondaryIndexUnnestMap(dataset, recordType,
-                chosenIndex, inputOp, jobGenParams, context, true, retainInput);
-
-        // Generate the rest of the upstream plan which feeds the search results into the primary index.
-        UnnestMapOperator primaryIndexUnnestOp = AccessMethodUtils.createPrimaryIndexUnnestMap(dataSourceScan, dataset,
-                recordType, secondaryIndexUnnestOp, context, true, retainInput, retainNull, false);
-
-        return primaryIndexUnnestOp;
-    }
-
-    /**
-     * Returns the variable which acts as the input search key to a secondary
-     * index that optimizes optFuncExpr by replacing rewriting indexSubTree
-     * (which is the original subtree that will be replaced by the index plan).
-     */
-    private LogicalVariable getInputSearchVar(IOptimizableFuncExpr optFuncExpr, OptimizableOperatorSubTree indexSubTree) {
-        if (optFuncExpr.getOperatorSubTree(0) == indexSubTree) {
-            // If the index is on a dataset in subtree 0, then subtree 1 will feed.
-            return optFuncExpr.getLogicalVar(1);
-        } else {
-            // If the index is on a dataset in subtree 1, then subtree 0 will feed.
-            return optFuncExpr.getLogicalVar(0);
-        }
-    }
-
-    @Override
-    public boolean applySelectPlanTransformation(Mutable<ILogicalOperator> selectRef,
-            OptimizableOperatorSubTree subTree, Index chosenIndex, AccessMethodAnalysisContext analysisCtx,
-            IOptimizationContext context) throws AlgebricksException {
-        IOptimizableFuncExpr optFuncExpr = AccessMethodUtils.chooseFirstOptFuncExpr(chosenIndex, analysisCtx);
-        ILogicalOperator indexPlanRootOp = createSecondaryToPrimaryPlan(subTree, null, chosenIndex, optFuncExpr, false,
-                false, false, context);
-        // Replace the datasource scan with the new plan rooted at primaryIndexUnnestMap.
-        subTree.dataSourceRef.setValue(indexPlanRootOp);
-        return true;
-    }
-
-    @Override
-    public boolean applyJoinPlanTransformation(Mutable<ILogicalOperator> joinRef,
-            OptimizableOperatorSubTree leftSubTree, OptimizableOperatorSubTree rightSubTree, Index chosenIndex,
-            AccessMethodAnalysisContext analysisCtx, IOptimizationContext context, boolean isLeftOuterJoin,
-            boolean hasGroupBy) throws AlgebricksException {
-        // Figure out if the index is applicable on the left or right side (if both, we arbitrarily prefer the left side).
-        Dataset dataset = analysisCtx.indexDatasetMap.get(chosenIndex);
-        // Determine probe and index subtrees based on chosen index.
-        OptimizableOperatorSubTree indexSubTree = null;
-        OptimizableOperatorSubTree probeSubTree = null;
-        if (!isLeftOuterJoin && leftSubTree.hasDataSourceScan()
-                && dataset.getDatasetName().equals(leftSubTree.dataset.getDatasetName())) {
-            indexSubTree = leftSubTree;
-            probeSubTree = rightSubTree;
-        } else if (rightSubTree.hasDataSourceScan()
-                && dataset.getDatasetName().equals(rightSubTree.dataset.getDatasetName())) {
-            indexSubTree = rightSubTree;
-            probeSubTree = leftSubTree;
-        }
-        if (indexSubTree == null) {
-            //This may happen for left outer join case
-            return false;
-        }
-
-        IOptimizableFuncExpr optFuncExpr = AccessMethodUtils.chooseFirstOptFuncExpr(chosenIndex, analysisCtx);
-        // The arguments of edit-distance-contains() function are asymmetrical, we can only use index
-        // if the dataset of index subtree and the dataset of first argument's subtree is the same
-        if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CONTAINS
-                && optFuncExpr.getOperatorSubTree(0).dataset != null
-                && !optFuncExpr.getOperatorSubTree(0).dataset.getDatasetName().equals(
-                        indexSubTree.dataset.getDatasetName())) {
-            return false;
-        }
-
-        //if LOJ, reset null place holder variable
-        LogicalVariable newNullPlaceHolderVar = null;
-        if (isLeftOuterJoin && hasGroupBy) {
-            //get a new null place holder variable that is the first field variable of the primary key
-            //from the indexSubTree's datasourceScanOp
-            newNullPlaceHolderVar = indexSubTree.getDataSourceVariables().get(0);
-
-            //reset the null place holder variable
-            AccessMethodUtils.resetLOJNullPlaceholderVariableInGroupByOp(analysisCtx, newNullPlaceHolderVar, context);
-        }
-
-        AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) joinRef.getValue();
-
-        // Remember the original probe subtree, and its primary-key variables,
-        // so we can later retrieve the missing attributes via an equi join.
-        List<LogicalVariable> originalSubTreePKs = new ArrayList<LogicalVariable>();
-        // Remember the primary-keys of the new probe subtree for the top-level equi join.
-        List<LogicalVariable> surrogateSubTreePKs = new ArrayList<LogicalVariable>();
-
-        // Copy probe subtree, replacing their variables with new ones. We will use the original variables
-        // to stitch together a top-level equi join.
-        Mutable<ILogicalOperator> originalProbeSubTreeRootRef = copyAndReinitProbeSubTree(probeSubTree, join
-                .getCondition().getValue(), optFuncExpr, originalSubTreePKs, surrogateSubTreePKs, context);
-
-        // Remember original live variables from the index sub tree.
-        List<LogicalVariable> indexSubTreeLiveVars = new ArrayList<LogicalVariable>();
-        VariableUtilities.getLiveVariables(indexSubTree.root, indexSubTreeLiveVars);
-
-        // Clone the original join condition because we may have to modify it (and we also need the original).
-        ILogicalExpression joinCond = join.getCondition().getValue().cloneExpression();
-        // Create "panic" (non indexed) nested-loop join path if necessary.
-        Mutable<ILogicalOperator> panicJoinRef = null;
-        Map<LogicalVariable, LogicalVariable> panicVarMap = null;
-        if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CHECK
-                || optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CONTAINS) {
-            panicJoinRef = new MutableObject<ILogicalOperator>(joinRef.getValue());
-            panicVarMap = new HashMap<LogicalVariable, LogicalVariable>();
-            Mutable<ILogicalOperator> newProbeRootRef = createPanicNestedLoopJoinPlan(panicJoinRef, indexSubTree,
-                    probeSubTree, optFuncExpr, chosenIndex, panicVarMap, context);
-            probeSubTree.rootRef.setValue(newProbeRootRef.getValue());
-            probeSubTree.root = newProbeRootRef.getValue();
-        }
-        // Create regular indexed-nested loop join path.
-        ILogicalOperator indexPlanRootOp = createSecondaryToPrimaryPlan(indexSubTree, probeSubTree, chosenIndex,
-                optFuncExpr, true, isLeftOuterJoin, true, context);
-        indexSubTree.dataSourceRef.setValue(indexPlanRootOp);
-
-        // Change join into a select with the same condition.
-        SelectOperator topSelect = new SelectOperator(new MutableObject<ILogicalExpression>(joinCond), isLeftOuterJoin,
-                newNullPlaceHolderVar);
-        topSelect.getInputs().add(indexSubTree.rootRef);
-        topSelect.setExecutionMode(ExecutionMode.LOCAL);
-        context.computeAndSetTypeEnvironmentForOperator(topSelect);
-        ILogicalOperator topOp = topSelect;
-
-        // Hook up the indexed-nested loop join path with the "panic" (non indexed) nested-loop join path by putting a union all on top.
-        if (panicJoinRef != null) {
-            LogicalVariable inputSearchVar = getInputSearchVar(optFuncExpr, indexSubTree);
-            indexSubTreeLiveVars.addAll(originalSubTreePKs);
-            indexSubTreeLiveVars.add(inputSearchVar);
-            List<LogicalVariable> panicPlanLiveVars = new ArrayList<LogicalVariable>();
-            VariableUtilities.getLiveVariables(panicJoinRef.getValue(), panicPlanLiveVars);
-            // Create variable mapping for union all operator.
-            List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> varMap = new ArrayList<Triple<LogicalVariable, LogicalVariable, LogicalVariable>>();
-            for (int i = 0; i < indexSubTreeLiveVars.size(); i++) {
-                LogicalVariable indexSubTreeVar = indexSubTreeLiveVars.get(i);
-                LogicalVariable panicPlanVar = panicVarMap.get(indexSubTreeVar);
-                if (panicPlanVar == null) {
-                    panicPlanVar = indexSubTreeVar;
-                }
-                varMap.add(new Triple<LogicalVariable, LogicalVariable, LogicalVariable>(indexSubTreeVar, panicPlanVar,
-                        indexSubTreeVar));
-            }
-            UnionAllOperator unionAllOp = new UnionAllOperator(varMap);
-            unionAllOp.getInputs().add(new MutableObject<ILogicalOperator>(topOp));
-            unionAllOp.getInputs().add(panicJoinRef);
-            unionAllOp.setExecutionMode(ExecutionMode.PARTITIONED);
-            context.computeAndSetTypeEnvironmentForOperator(unionAllOp);
-            topOp = unionAllOp;
-        }
-
-        // Place a top-level equi-join on top to retrieve the missing variables from the original probe subtree.
-        // The inner (build) branch of the join is the subtree with the data scan, since the result of the similarity join could potentially be big.
-        // This choice may not always be the most efficient, but it seems more robust than the alternative.
-        Mutable<ILogicalExpression> eqJoinConditionRef = createPrimaryKeysEqJoinCondition(originalSubTreePKs,
-                surrogateSubTreePKs);
-        InnerJoinOperator topEqJoin = new InnerJoinOperator(eqJoinConditionRef, originalProbeSubTreeRootRef,
-                new MutableObject<ILogicalOperator>(topOp));
-        topEqJoin.setExecutionMode(ExecutionMode.PARTITIONED);
-        joinRef.setValue(topEqJoin);
-        context.computeAndSetTypeEnvironmentForOperator(topEqJoin);
-
-        return true;
-    }
-
-    /**
-     * Copies the probeSubTree (using new variables), and reinitializes the probeSubTree to it.
-     * Accordingly replaces the variables in the given joinCond, and the optFuncExpr.
-     * Returns a reference to the original plan root.
-     */
-    private Mutable<ILogicalOperator> copyAndReinitProbeSubTree(OptimizableOperatorSubTree probeSubTree,
-            ILogicalExpression joinCond, IOptimizableFuncExpr optFuncExpr, List<LogicalVariable> originalSubTreePKs,
-            List<LogicalVariable> surrogateSubTreePKs, IOptimizationContext context) throws AlgebricksException {
-
-        probeSubTree.getPrimaryKeyVars(originalSubTreePKs);
-
-        // Create two copies of the original probe subtree.
-        // The first copy, which becomes the new probe subtree, will retain the primary-key and secondary-search key variables,
-        // but have all other variables replaced with new ones.
-        // The second copy, which will become an input to the top-level equi-join to resolve the surrogates,
-        // will have all primary-key and secondary-search keys replaced, but retains all other original variables.
-
-        // Variable replacement map for the first copy.
-        Map<LogicalVariable, LogicalVariable> newProbeSubTreeVarMap = new HashMap<LogicalVariable, LogicalVariable>();
-        // Variable replacement map for the second copy.
-        Map<LogicalVariable, LogicalVariable> joinInputSubTreeVarMap = new HashMap<LogicalVariable, LogicalVariable>();
-        // Init with all live vars.
-        List<LogicalVariable> liveVars = new ArrayList<LogicalVariable>();
-        VariableUtilities.getLiveVariables(probeSubTree.root, liveVars);
-        for (LogicalVariable var : liveVars) {
-            joinInputSubTreeVarMap.put(var, var);
-        }
-        // Fill variable replacement maps.
-        for (int i = 0; i < optFuncExpr.getNumLogicalVars(); i++) {
-            joinInputSubTreeVarMap.put(optFuncExpr.getLogicalVar(i), context.newVar());
-            newProbeSubTreeVarMap.put(optFuncExpr.getLogicalVar(i), optFuncExpr.getLogicalVar(i));
-        }
-        for (int i = 0; i < originalSubTreePKs.size(); i++) {
-            LogicalVariable newPKVar = context.newVar();
-            surrogateSubTreePKs.add(newPKVar);
-            joinInputSubTreeVarMap.put(originalSubTreePKs.get(i), newPKVar);
-            newProbeSubTreeVarMap.put(originalSubTreePKs.get(i), originalSubTreePKs.get(i));
-        }
-
-        // Create first copy.
-        Counter firstCounter = new Counter(context.getVarCounter());
-        LogicalOperatorDeepCopyVisitor firstDeepCopyVisitor = new LogicalOperatorDeepCopyVisitor(firstCounter,
-                newProbeSubTreeVarMap);
-        ILogicalOperator newProbeSubTree = firstDeepCopyVisitor.deepCopy(probeSubTree.root, null);
-        inferTypes(newProbeSubTree, context);
-        Mutable<ILogicalOperator> newProbeSubTreeRootRef = new MutableObject<ILogicalOperator>(newProbeSubTree);
-        context.setVarCounter(firstCounter.get());
-        // Create second copy.
-        Counter secondCounter = new Counter(context.getVarCounter());
-        LogicalOperatorDeepCopyVisitor secondDeepCopyVisitor = new LogicalOperatorDeepCopyVisitor(secondCounter,
-                joinInputSubTreeVarMap);
-        ILogicalOperator joinInputSubTree = secondDeepCopyVisitor.deepCopy(probeSubTree.root, null);
-        inferTypes(joinInputSubTree, context);
-        probeSubTree.rootRef.setValue(joinInputSubTree);
-        context.setVarCounter(secondCounter.get());
-
-        // Remember the original probe subtree reference so we can return it.
-        Mutable<ILogicalOperator> originalProbeSubTreeRootRef = probeSubTree.rootRef;
-
-        // Replace the original probe subtree with its copy.
-        Dataset origDataset = probeSubTree.dataset;
-        ARecordType origRecordType = probeSubTree.recordType;
-        probeSubTree.initFromSubTree(newProbeSubTreeRootRef);
-        probeSubTree.dataset = origDataset;
-        probeSubTree.recordType = origRecordType;
-
-        // Replace the variables in the join condition based on the mapping of variables
-        // in the new probe subtree.
-        Map<LogicalVariable, LogicalVariable> varMapping = firstDeepCopyVisitor.getVariableMapping();
-        for (Map.Entry<LogicalVariable, LogicalVariable> varMapEntry : varMapping.entrySet()) {
-            if (varMapEntry.getKey() != varMapEntry.getValue()) {
-                joinCond.substituteVar(varMapEntry.getKey(), varMapEntry.getValue());
-            }
-        }
-        return originalProbeSubTreeRootRef;
-    }
-
-    private Mutable<ILogicalExpression> createPrimaryKeysEqJoinCondition(List<LogicalVariable> originalSubTreePKs,
-            List<LogicalVariable> surrogateSubTreePKs) {
-        List<Mutable<ILogicalExpression>> eqExprs = new ArrayList<Mutable<ILogicalExpression>>();
-        int numPKVars = originalSubTreePKs.size();
-        for (int i = 0; i < numPKVars; i++) {
-            List<Mutable<ILogicalExpression>> args = new ArrayList<Mutable<ILogicalExpression>>();
-            args.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(surrogateSubTreePKs.get(i))));
-            args.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(originalSubTreePKs.get(i))));
-            ILogicalExpression eqFunc = new ScalarFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AlgebricksBuiltinFunctions.EQ), args);
-            eqExprs.add(new MutableObject<ILogicalExpression>(eqFunc));
-        }
-        if (eqExprs.size() == 1) {
-            return eqExprs.get(0);
-        } else {
-            ILogicalExpression andFunc = new ScalarFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AlgebricksBuiltinFunctions.AND), eqExprs);
-            return new MutableObject<ILogicalExpression>(andFunc);
-        }
-    }
-
-    private Mutable<ILogicalOperator> createPanicNestedLoopJoinPlan(Mutable<ILogicalOperator> joinRef,
-            OptimizableOperatorSubTree indexSubTree, OptimizableOperatorSubTree probeSubTree,
-            IOptimizableFuncExpr optFuncExpr, Index chosenIndex, Map<LogicalVariable, LogicalVariable> panicVarMap,
-            IOptimizationContext context) throws AlgebricksException {
-        LogicalVariable inputSearchVar = getInputSearchVar(optFuncExpr, indexSubTree);
-
-        // We split the plan into two "branches", and add selections on each side.
-        AbstractLogicalOperator replicateOp = new ReplicateOperator(2);
-        replicateOp.getInputs().add(new MutableObject<ILogicalOperator>(probeSubTree.root));
-        replicateOp.setExecutionMode(ExecutionMode.PARTITIONED);
-        context.computeAndSetTypeEnvironmentForOperator(replicateOp);
-
-        // Create select ops for removing tuples that are filterable and not filterable, respectively.
-        IVariableTypeEnvironment probeTypeEnv = context.getOutputTypeEnvironment(probeSubTree.root);
-        IAType inputSearchVarType;
-        if (chosenIndex.isEnforcingKeyFileds())
-            inputSearchVarType = optFuncExpr.getFieldType(optFuncExpr.findLogicalVar(inputSearchVar));
-        else
-            inputSearchVarType = (IAType) probeTypeEnv.getVarType(inputSearchVar);
-        Mutable<ILogicalOperator> isFilterableSelectOpRef = new MutableObject<ILogicalOperator>();
-        Mutable<ILogicalOperator> isNotFilterableSelectOpRef = new MutableObject<ILogicalOperator>();
-        createIsFilterableSelectOps(replicateOp, inputSearchVar, inputSearchVarType, optFuncExpr, chosenIndex, context,
-                isFilterableSelectOpRef, isNotFilterableSelectOpRef);
-
-        List<LogicalVariable> originalLiveVars = new ArrayList<LogicalVariable>();
-        VariableUtilities.getLiveVariables(indexSubTree.root, originalLiveVars);
-
-        // Copy the scan subtree in indexSubTree.
-        Counter counter = new Counter(context.getVarCounter());
-        LogicalOperatorDeepCopyVisitor deepCopyVisitor = new LogicalOperatorDeepCopyVisitor(counter);
-        ILogicalOperator scanSubTree = deepCopyVisitor.deepCopy(indexSubTree.root, null);
-
-        context.setVarCounter(counter.get());
-        Map<LogicalVariable, LogicalVariable> copyVarMap = deepCopyVisitor.getVariableMapping();
-        panicVarMap.putAll(copyVarMap);
-
-        List<LogicalVariable> copyLiveVars = new ArrayList<LogicalVariable>();
-        VariableUtilities.getLiveVariables(scanSubTree, copyLiveVars);
-
-        // Replace the inputs of the given join op, and replace variables in its
-        // condition since we deep-copied one of the scanner subtrees which
-        // changed variables.
-        AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) joinRef.getValue();
-        for (Map.Entry<LogicalVariable, LogicalVariable> entry : copyVarMap.entrySet()) {
-            joinOp.getCondition().getValue().substituteVar(entry.getKey(), entry.getValue());
-        }
-        joinOp.getInputs().clear();
-        joinOp.getInputs().add(new MutableObject<ILogicalOperator>(scanSubTree));
-        // Make sure that the build input (which may be materialized causing blocking) comes from
-        // the split+select, otherwise the plan will have a deadlock.
-        joinOp.getInputs().add(isNotFilterableSelectOpRef);
-        context.computeAndSetTypeEnvironmentForOperator(joinOp);
-
-        // Return the new root of the probeSubTree.
-        return isFilterableSelectOpRef;
-    }
-
-    private void createIsFilterableSelectOps(ILogicalOperator inputOp, LogicalVariable inputSearchVar,
-            IAType inputSearchVarType, IOptimizableFuncExpr optFuncExpr, Index chosenIndex,
-            IOptimizationContext context, Mutable<ILogicalOperator> isFilterableSelectOpRef,
-            Mutable<ILogicalOperator> isNotFilterableSelectOpRef) throws AlgebricksException {
-        // Create select operator for removing tuples that are not filterable.
-        // First determine the proper filter function and args based on the type of the input search var.
-        ILogicalExpression isFilterableExpr = null;
-        switch (inputSearchVarType.getTypeTag()) {
-            case STRING: {
-                List<Mutable<ILogicalExpression>> isFilterableArgs = new ArrayList<Mutable<ILogicalExpression>>(4);
-                isFilterableArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
-                        inputSearchVar)));
-                // Since we are optimizing a join, the similarity threshold should be the only constant in the optimizable function expression.
-                isFilterableArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(optFuncExpr
-                        .getConstantVal(0))));
-                isFilterableArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils
-                        .createInt32Constant(chosenIndex.getGramLength())));
-                boolean usePrePost = optFuncExpr.containsPartialField() ? false : true;
-                isFilterableArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils
-                        .createBooleanConstant(usePrePost)));
-                isFilterableExpr = new ScalarFunctionCallExpression(
-                        FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.EDIT_DISTANCE_STRING_IS_FILTERABLE),
-                        isFilterableArgs);
-                break;
-            }
-            case UNORDEREDLIST:
-            case ORDEREDLIST: {
-                List<Mutable<ILogicalExpression>> isFilterableArgs = new ArrayList<Mutable<ILogicalExpression>>(2);
-                isFilterableArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
-                        inputSearchVar)));
-                // Since we are optimizing a join, the similarity threshold should be the only constant in the optimizable function expression.
-                isFilterableArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(optFuncExpr
-                        .getConstantVal(0))));
-                isFilterableExpr = new ScalarFunctionCallExpression(
-                        FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.EDIT_DISTANCE_LIST_IS_FILTERABLE),
-                        isFilterableArgs);
-                break;
-            }
-            default: {
-                throw new AlgebricksException("Only strings, ordered and unordered list types supported.");
-            }
-        }
-
-        SelectOperator isFilterableSelectOp = new SelectOperator(
-                new MutableObject<ILogicalExpression>(isFilterableExpr), false, null);
-        isFilterableSelectOp.getInputs().add(new MutableObject<ILogicalOperator>(inputOp));
-        isFilterableSelectOp.setExecutionMode(ExecutionMode.LOCAL);
-        context.computeAndSetTypeEnvironmentForOperator(isFilterableSelectOp);
-
-        // Select operator for removing tuples that are filterable.
-        List<Mutable<ILogicalExpression>> isNotFilterableArgs = new ArrayList<Mutable<ILogicalExpression>>();
-        isNotFilterableArgs.add(new MutableObject<ILogicalExpression>(isFilterableExpr));
-        ILogicalExpression isNotFilterableExpr = new ScalarFunctionCallExpression(
-                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.NOT), isNotFilterableArgs);
-        SelectOperator isNotFilterableSelectOp = new SelectOperator(new MutableObject<ILogicalExpression>(
-                isNotFilterableExpr), false, null);
-        isNotFilterableSelectOp.getInputs().add(new MutableObject<ILogicalOperator>(inputOp));
-        isNotFilterableSelectOp.setExecutionMode(ExecutionMode.LOCAL);
-        context.computeAndSetTypeEnvironmentForOperator(isNotFilterableSelectOp);
-
-        isFilterableSelectOpRef.setValue(isFilterableSelectOp);
-        isNotFilterableSelectOpRef.setValue(isNotFilterableSelectOp);
-    }
-
-    private void addSearchKeyType(IOptimizableFuncExpr optFuncExpr, OptimizableOperatorSubTree indexSubTree,
-            IOptimizationContext context, InvertedIndexJobGenParams jobGenParams) throws AlgebricksException {
-        // If we have two variables in the optFunxExpr, then we are optimizing a join.
-        IAType type = null;
-        ATypeTag typeTag = null;
-        if (optFuncExpr.getNumLogicalVars() == 2) {
-            // Find the type of the variable that is going to feed into the index search.
-            if (optFuncExpr.getOperatorSubTree(0) == indexSubTree) {
-                // If the index is on a dataset in subtree 0, then subtree 1 will feed.
-                type = optFuncExpr.getFieldType(1);
-            } else {
-                // If the index is on a dataset in subtree 1, then subtree 0 will feed.
-                type = optFuncExpr.getFieldType(0);
-            }
-            typeTag = type.getTypeTag();
-        } else {
-            // We are optimizing a selection query. Add the type of the search key constant.
-            AsterixConstantValue constVal = (AsterixConstantValue) optFuncExpr.getConstantVal(0);
-            IAObject obj = constVal.getObject();
-            type = obj.getType();
-            typeTag = type.getTypeTag();
-            if (typeTag != ATypeTag.ORDEREDLIST && typeTag != ATypeTag.STRING && typeTag != ATypeTag.UNORDEREDLIST) {
-                throw new AlgebricksException("Only ordered lists, string, and unordered lists types supported.");
-            }
-        }
-        jobGenParams.setSearchKeyType(typeTag);
-    }
-
-    private void addFunctionSpecificArgs(IOptimizableFuncExpr optFuncExpr, InvertedIndexJobGenParams jobGenParams) {
-        if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.CONTAINS) {
-            jobGenParams.setSearchModifierType(SearchModifierType.CONJUNCTIVE);
-            jobGenParams.setSimilarityThreshold(new AsterixConstantValue(ANull.NULL));
-        }
-        if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.SIMILARITY_JACCARD_CHECK) {
-            jobGenParams.setSearchModifierType(SearchModifierType.JACCARD);
-            // Add the similarity threshold which, by convention, is the last constant value.
-            jobGenParams.setSimilarityThreshold(optFuncExpr.getConstantVal(optFuncExpr.getNumConstantVals() - 1));
-        }
-        if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CHECK
-                || optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CONTAINS) {
-            if (optFuncExpr.containsPartialField()) {
-                jobGenParams.setSearchModifierType(SearchModifierType.CONJUNCTIVE_EDIT_DISTANCE);
-            } else {
-                jobGenParams.setSearchModifierType(SearchModifierType.EDIT_DISTANCE);
-            }
-            // Add the similarity threshold which, by convention, is the last constant value.
-            jobGenParams.setSimilarityThreshold(optFuncExpr.getConstantVal(optFuncExpr.getNumConstantVals() - 1));
-        }
-    }
-
-    private void addKeyVarsAndExprs(IOptimizableFuncExpr optFuncExpr, ArrayList<LogicalVariable> keyVarList,
-            ArrayList<Mutable<ILogicalExpression>> keyExprList, IOptimizationContext context)
-            throws AlgebricksException {
-        // For now we are assuming a single secondary index key.
-        // Add a variable and its expr to the lists which will be passed into an assign op.
-        LogicalVariable keyVar = context.newVar();
-        keyVarList.add(keyVar);
-        keyExprList.add(new MutableObject<ILogicalExpression>(new ConstantExpression(optFuncExpr.getConstantVal(0))));
-        return;
-    }
-
-    @Override
-    public boolean exprIsOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) throws AlgebricksException {
-        if (optFuncExpr.getFuncExpr().getAnnotations()
-                .containsKey(SkipSecondaryIndexSearchExpressionAnnotation.INSTANCE)) {
-            return false;
-        }
-
-        if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CHECK
-                || optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CONTAINS) {
-            return isEditDistanceFuncOptimizable(index, optFuncExpr);
-        }
-
-        if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.SIMILARITY_JACCARD_CHECK) {
-            return isJaccardFuncOptimizable(index, optFuncExpr);
-        }
-
-        if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.CONTAINS) {
-            return isContainsFuncOptimizable(index, optFuncExpr);
-        }
-
-        return false;
-    }
-
-    private boolean isEditDistanceFuncOptimizable(Index index, IOptimizableFuncExpr optFuncExpr)
-            throws AlgebricksException {
-        if (optFuncExpr.getNumConstantVals() == 1) {
-            return isEditDistanceFuncJoinOptimizable(index, optFuncExpr);
-        } else {
-            return isEditDistanceFuncSelectOptimizable(index, optFuncExpr);
-        }
-    }
-
-    private boolean isEditDistanceFuncJoinOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) {
-        if (index.isEnforcingKeyFileds())
-            return isEditDistanceFuncCompatible(index.getKeyFieldTypes().get(0).getTypeTag(), index.getIndexType());
-        else
-            return isEditDistanceFuncCompatible(optFuncExpr.getFieldType(0).getTypeTag(), index.getIndexType());
-    }
-
-    private boolean isEditDistanceFuncCompatible(ATypeTag typeTag, IndexType indexType) {
-        // We can only optimize edit distance on strings using an ngram index.
-        if (typeTag == ATypeTag.STRING
-                && (indexType == IndexType.SINGLE_PARTITION_NGRAM_INVIX || indexType == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX)) {
-            return true;
-        }
-        // We can only optimize edit distance on lists using a word index.
-        if ((typeTag == ATypeTag.ORDEREDLIST)
-                && (indexType == IndexType.SINGLE_PARTITION_WORD_INVIX || indexType == IndexType.LENGTH_PARTITIONED_WORD_INVIX)) {
-            return true;
-        }
-        return false;
-    }
-
-    private boolean isEditDistanceFuncSelectOptimizable(Index index, IOptimizableFuncExpr optFuncExpr)
-            throws AlgebricksException {
-
-        // Check for panic in selection query.
-        // TODO: Panic also depends on prePost which is currently hardcoded to be true.
-        AsterixConstantValue listOrStrConstVal = (AsterixConstantValue) optFuncExpr.getConstantVal(0);
-        IAObject listOrStrObj = listOrStrConstVal.getObject();
-        ATypeTag typeTag = listOrStrObj.getType().getTypeTag();
-
-        if (!isEditDistanceFuncCompatible(typeTag, index.getIndexType())) {
-            return false;
-        }
-
-        AsterixConstantValue intConstVal = (AsterixConstantValue) optFuncExpr.getConstantVal(1);
-        IAObject intObj = intConstVal.getObject();
-
-        AInt32 edThresh = null;
-        // Apply type casting based on numeric types of the input to INT32 type.
-        try {
-            edThresh = (AInt32) ATypeHierarchy.convertNumericTypeObject(intObj, ATypeTag.INT32);
-        } catch (AsterixException e) {
-            throw new AlgebricksException(e);
-        }
-        int mergeThreshold = 0;
-
-        if (typeTag == ATypeTag.STRING) {
-            AString astr = (AString) listOrStrObj;
-            // Compute merge threshold depending on the query grams contain pre- and postfixing
-            if (optFuncExpr.containsPartialField()) {
-                mergeThreshold = (astr.getStringValue().length() - index.getGramLength() + 1)
-                        - edThresh.getIntegerValue() * index.getGramLength();
-            } else {
-                mergeThreshold = (astr.getStringValue().length() + index.getGramLength() - 1)
-                        - edThresh.getIntegerValue() * index.getGramLength();
-            }
-        }
-
-        if ((typeTag == ATypeTag.ORDEREDLIST)
-                && (index.getIndexType() == IndexType.SINGLE_PARTITION_WORD_INVIX || index.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX)) {
-            IACollection alist = (IACollection) listOrStrObj;
-            // Compute merge threshold.
-            mergeThreshold = alist.size() - edThresh.getIntegerValue();
-        }
-        if (mergeThreshold <= 0) {
-            // We cannot use index to optimize expr.
-            return false;
-        }
-        return true;
-    }
-
-    private boolean isJaccardFuncOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) {
-        //TODO we need to split join and select cases in order to check join case more thoroughly.
-
-        int variableCount = optFuncExpr.getNumLogicalVars();
-
-        //check whether gram-tokens function is optimizable
-        ScalarFunctionCallExpression funcExpr = null;
-        for (int i = 0; i < variableCount; i++) {
-            funcExpr = findTokensFunc(AsterixBuiltinFunctions.GRAM_TOKENS, optFuncExpr, i);
-            if (funcExpr != null) {
-                return isJaccardFuncCompatible(funcExpr, optFuncExpr.getFieldType(i).getTypeTag(), index.getIndexType());
-            }
-        }
-
-        //check whether word-tokens function is optimizable
-        for (int i = 0; i < variableCount; i++) {
-            funcExpr = findTokensFunc(AsterixBuiltinFunctions.WORD_TOKENS, optFuncExpr, i);
-            if (funcExpr != null) {
-                return isJaccardFuncCompatible(funcExpr, optFuncExpr.getFieldType(i).getTypeTag(), index.getIndexType());
-            }
-        }
-
-        //check whether a search variable is optimizable
-        OptimizableOperatorSubTree subTree = null;
-        LogicalVariable targetVar = null;
-        for (int i = 0; i < variableCount; i++) {
-            subTree = optFuncExpr.getOperatorSubTree(i);
-            if (subTree == null)
-                continue;
-            targetVar = optFuncExpr.getLogicalVar(i);
-            if (targetVar == null)
-                continue;
-            return isJaccardFuncCompatible(optFuncExpr.getFuncExpr().getArguments().get(i).getValue(), optFuncExpr
-                    .getFieldType(i).getTypeTag(), index.getIndexType());
-        }
-
-        return false;
-    }
-
-    private ScalarFunctionCallExpression findTokensFunc(FunctionIdentifier funcId, IOptimizableFuncExpr optFuncExpr,
-            int subTreeIndex) {
-        //find either a gram-tokens or a word-tokens function that exists in optFuncExpr.subTrees' assignsAndUnnests
-        OptimizableOperatorSubTree subTree = null;
-        LogicalVariable targetVar = null;
-
-        subTree = optFuncExpr.getOperatorSubTree(subTreeIndex);
-        if (subTree == null) {
-            return null;
-        }
-
-        targetVar = optFuncExpr.getLogicalVar(subTreeIndex);
-        if (targetVar == null) {
-            return null;
-        }
-
-        for (AbstractLogicalOperator op : subTree.assignsAndUnnests) {
-            if (op.getOperatorTag() != LogicalOperatorTag.ASSIGN)
-                continue;
-            List<Mutable<ILogicalExpression>> exprList = ((AssignOperator) op).getExpressions();
-            for (Mutable<ILogicalExpression> expr : exprList) {
-                if (expr.getValue().getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL)
-                    continue;
-                AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr.getValue();
-                if (funcExpr.getFunctionIdentifier() != funcId)
-                    continue;
-                ILogicalExpression varExpr = funcExpr.getArguments().get(0).getValue();
-                if (varExpr.getExpressionTag() != LogicalExpressionTag.VARIABLE)
-                    continue;
-                if (((VariableReferenceExpression) varExpr).getVariableReference() == targetVar)
-                    continue;
-                return (ScalarFunctionCallExpression) funcExpr;
-            }
-        }
-        return null;
-    }
-
-    private boolean isJaccardFuncCompatible(ILogicalExpression nonConstArg, ATypeTag typeTag, IndexType indexType) {
-        if (nonConstArg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-            AbstractFunctionCallExpression nonConstfuncExpr = (AbstractFunctionCallExpression) nonConstArg;
-            // We can use this index if the tokenization function matches the index type.
-            if (nonConstfuncExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.WORD_TOKENS
-                    && (indexType == IndexType.SINGLE_PARTITION_WORD_INVIX || indexType == IndexType.LENGTH_PARTITIONED_WORD_INVIX)) {
-                return true;
-            }
-            if (nonConstfuncExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.GRAM_TOKENS
-                    && (indexType == IndexType.SINGLE_PARTITION_NGRAM_INVIX || indexType == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX)) {
-                return true;
-            }
-        }
-
-        if (nonConstArg.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-            if ((typeTag == ATypeTag.ORDEREDLIST || typeTag == ATypeTag.UNORDEREDLIST)
-                    && (indexType == IndexType.SINGLE_PARTITION_WORD_INVIX || indexType == IndexType.LENGTH_PARTITIONED_WORD_INVIX)) {
-                return true;
-            }
-            // We assume that the given list variable doesn't have ngram list in it since it is unrealistic.
-        }
-        return false;
-    }
-
-    private boolean isContainsFuncOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) {
-        if (optFuncExpr.getNumLogicalVars() == 2) {
-            return isContainsFuncJoinOptimizable(index, optFuncExpr);
-        } else {
-            return isContainsFuncSelectOptimizable(index, optFuncExpr);
-        }
-    }
-
-    private boolean isContainsFuncSelectOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) {
-        AsterixConstantValue strConstVal = (AsterixConstantValue) optFuncExpr.getConstantVal(0);
-        IAObject strObj = strConstVal.getObject();
-        ATypeTag typeTag = strObj.getType().getTypeTag();
-
-        if (!isContainsFuncCompatible(typeTag, index.getIndexType())) {
-            return false;
-        }
-
-        // Check that the constant search string has at least gramLength characters.
-        if (strObj.getType().getTypeTag() == ATypeTag.STRING) {
-            AString astr = (AString) strObj;
-            if (astr.getStringValue().length() >= index.getGramLength()) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-    private boolean isContainsFuncJoinOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) {
-        if (index.isEnforcingKeyFileds())
-            return isContainsFuncCompatible(index.getKeyFieldTypes().get(0).getTypeTag(), index.getIndexType());
-        else
-            return isContainsFuncCompatible(optFuncExpr.getFieldType(0).getTypeTag(), index.getIndexType());
-    }
-
-    private boolean isContainsFuncCompatible(ATypeTag typeTag, IndexType indexType) {
-        //We can only optimize contains with ngram indexes.
-        if ((typeTag == ATypeTag.STRING)
-                && (indexType == IndexType.SINGLE_PARTITION_NGRAM_INVIX || indexType == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX)) {
-            return true;
-        }
-        return false;
-    }
-
-    public static IBinaryTokenizerFactory getBinaryTokenizerFactory(SearchModifierType searchModifierType,
-            ATypeTag searchKeyType, Index index) throws AlgebricksException {
-        switch (index.getIndexType()) {
-            case SINGLE_PARTITION_WORD_INVIX:
-            case LENGTH_PARTITIONED_WORD_INVIX: {
-                return AqlBinaryTokenizerFactoryProvider.INSTANCE.getWordTokenizerFactory(searchKeyType, false);
-            }
-            case SINGLE_PARTITION_NGRAM_INVIX:
-            case LENGTH_PARTITIONED_NGRAM_INVIX: {
-                // Make sure not to use pre- and postfixing for conjunctive searches.
-                boolean prePost = (searchModifierType == SearchModifierType.CONJUNCTIVE || searchModifierType == SearchModifierType.CONJUNCTIVE_EDIT_DISTANCE) ? false
-                        : true;
-                return AqlBinaryTokenizerFactoryProvider.INSTANCE.getNGramTokenizerFactory(searchKeyType,
-                        index.getGramLength(), prePost, false);
-            }
-            default: {
-                throw new AlgebricksException("Tokenizer not applicable to index kind '" + index.getIndexType() + "'.");
-            }
-        }
-    }
-
-    public static IInvertedIndexSearchModifierFactory getSearchModifierFactory(SearchModifierType searchModifierType,
-            IAObject simThresh, Index index) throws AlgebricksException {
-        switch (searchModifierType) {
-            case CONJUNCTIVE: {
-                return new ConjunctiveSearchModifierFactory();
-            }
-            case JACCARD: {
-                float jaccThresh = ((AFloat) simThresh).getFloatValue();
-                return new JaccardSearchModifierFactory(jaccThresh);
-            }
-            case EDIT_DISTANCE:
-            case CONJUNCTIVE_EDIT_DISTANCE: {
-                int edThresh = 0;
-                try {
-                    edThresh = ((AInt32) ATypeHierarchy.convertNumericTypeObject(simThresh, ATypeTag.INT32))
-                            .getIntegerValue();
-                } catch (AsterixException e) {
-                    throw new AlgebricksException(e);
-                }
-
-                switch (index.getIndexType()) {
-                    case SINGLE_PARTITION_NGRAM_INVIX:
-                    case LENGTH_PARTITIONED_NGRAM_INVIX: {
-                        // Edit distance on strings, filtered with overlapping grams.
-                        if (searchModifierType == SearchModifierType.EDIT_DISTANCE) {
-                            return new EditDistanceSearchModifierFactory(index.getGramLength(), edThresh);
-                        } else {
-                            return new ConjunctiveEditDistanceSearchModifierFactory(index.getGramLength(), edThresh);
-                        }
-                    }
-                    case SINGLE_PARTITION_WORD_INVIX:
-                    case LENGTH_PARTITIONED_WORD_INVIX: {
-                        // Edit distance on two lists. The list-elements are non-overlapping.
-                        if (searchModifierType == SearchModifierType.EDIT_DISTANCE) {
-                            return new ListEditDistanceSearchModifierFactory(edThresh);
-                        } else {
-                            return new ConjunctiveListEditDistanceSearchModifierFactory(edThresh);
-                        }
-                    }
-                    default: {
-                        throw new AlgebricksException("Incompatible search modifier '" + searchModifierType
-                                + "' for index type '" + index.getIndexType() + "'");
-                    }
-                }
-            }
-            default: {
-                throw new AlgebricksException("Unknown search modifier type '" + searchModifierType + "'.");
-            }
-        }
-    }
-
-    private void inferTypes(ILogicalOperator op, IOptimizationContext context) throws AlgebricksException {
-        for (Mutable<ILogicalOperator> childOpRef : op.getInputs()) {
-            inferTypes(childOpRef.getValue(), context);
-        }
-        context.computeAndSetTypeEnvironmentForOperator(op);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/InvertedIndexJobGenParams.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/InvertedIndexJobGenParams.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/InvertedIndexJobGenParams.java
deleted file mode 100644
index 2bb9641..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/InvertedIndexJobGenParams.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules.am;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.optimizer.rules.am.InvertedIndexAccessMethod.SearchModifierType;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IAlgebricksConstantValue;
-
-/**
- * Helper class for reading and writing job-gen parameters for RTree access methods to
- * and from a list of function arguments, typically of an unnest-map.
- */
-public class InvertedIndexJobGenParams extends AccessMethodJobGenParams {
-
-    protected SearchModifierType searchModifierType;
-    protected IAlgebricksConstantValue similarityThreshold;
-    protected ATypeTag searchKeyType;
-    protected List<LogicalVariable> keyVarList;
-    protected List<LogicalVariable> nonKeyVarList;
-
-    public InvertedIndexJobGenParams() {
-    }
-
-    public InvertedIndexJobGenParams(String indexName, IndexType indexType, String dataverseName, String datasetName,
-            boolean retainInput, boolean retainNull, boolean requiresBroadcast) {
-        super(indexName, indexType, dataverseName, datasetName, retainInput, retainNull, requiresBroadcast);
-    }
-
-    public void setSearchModifierType(SearchModifierType searchModifierType) {
-        this.searchModifierType = searchModifierType;
-    }
-
-    public void setSimilarityThreshold(IAlgebricksConstantValue similarityThreshold) {
-        this.similarityThreshold = similarityThreshold;
-    }
-
-    public void setSearchKeyType(ATypeTag searchKeyType) {
-        this.searchKeyType = searchKeyType;
-    }
-
-    public void setKeyVarList(List<LogicalVariable> keyVarList) {
-        this.keyVarList = keyVarList;
-    }
-
-    public void writeToFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
-        super.writeToFuncArgs(funcArgs);
-        // Write search modifier type.
-        funcArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils.createInt32Constant(searchModifierType
-                .ordinal())));
-        // Write similarity threshold.
-        funcArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(similarityThreshold)));
-        // Write search key type.
-        funcArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils.createInt32Constant(searchKeyType
-                .ordinal())));
-        // Write key var list.
-        writeVarList(keyVarList, funcArgs);
-        // Write non-key var list.
-        if (nonKeyVarList != null) {
-            writeVarList(nonKeyVarList, funcArgs);
-        }
-    }
-
-    public void readFromFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
-        super.readFromFuncArgs(funcArgs);
-        int index = super.getNumParams();
-        // Read search modifier type.
-        int searchModifierOrdinal = AccessMethodUtils.getInt32Constant(funcArgs.get(index));
-        searchModifierType = SearchModifierType.values()[searchModifierOrdinal];
-        // Read similarity threshold. Concrete type depends on search modifier.
-        similarityThreshold = ((AsterixConstantValue) ((ConstantExpression) funcArgs.get(index + 1).getValue())
-                .getValue());
-        // Read type of search key.
-        int typeTagOrdinal = AccessMethodUtils.getInt32Constant(funcArgs.get(index + 2));
-        searchKeyType = ATypeTag.values()[typeTagOrdinal];
-        // Read key var list.
-        keyVarList = new ArrayList<LogicalVariable>();
-        readVarList(funcArgs, index + 3, keyVarList);
-        // TODO: We could possibly simplify things if we did read the non-key var list here.
-        // We don't need to read the non-key var list.
-        nonKeyVarList = null;
-    }
-
-    public SearchModifierType getSearchModifierType() {
-        return searchModifierType;
-    }
-
-    public IAlgebricksConstantValue getSimilarityThreshold() {
-        return similarityThreshold;
-    }
-
-    public ATypeTag getSearchKeyType() {
-        return searchKeyType;
-    }
-
-    public List<LogicalVariable> getKeyVarList() {
-        return keyVarList;
-    }
-
-    public List<LogicalVariable> getNonKeyVarList() {
-        return nonKeyVarList;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/OptimizableFuncExpr.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/OptimizableFuncExpr.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/OptimizableFuncExpr.java
deleted file mode 100644
index 340bf3a..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/OptimizableFuncExpr.java
+++ /dev/null
@@ -1,215 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules.am;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IAlgebricksConstantValue;
-
-/**
- * General-purpose implementation of IOptimizableFuncExpr that supports any
- * number of constant args, variable args and field names.
- */
-public class OptimizableFuncExpr implements IOptimizableFuncExpr {
-    protected final AbstractFunctionCallExpression funcExpr;
-    protected final LogicalVariable[] logicalVars;
-    protected final LogicalVariable[] sourceVars;
-    protected final ILogicalExpression[] logicalExprs;
-    protected final List<List<String>> fieldNames;
-    protected final IAType[] fieldTypes;
-    protected final OptimizableOperatorSubTree[] subTrees;
-    protected final IAlgebricksConstantValue[] constantVals;
-    protected boolean partialField;
-
-    public OptimizableFuncExpr(AbstractFunctionCallExpression funcExpr, LogicalVariable[] logicalVars,
-            IAlgebricksConstantValue[] constantVals) {
-        this.funcExpr = funcExpr;
-        this.logicalVars = logicalVars;
-        this.sourceVars = new LogicalVariable[logicalVars.length];
-        this.logicalExprs = new ILogicalExpression[logicalVars.length];
-        this.constantVals = constantVals;
-        this.fieldNames = new ArrayList<List<String>>();
-        for (int i = 0; i < logicalVars.length; i++) {
-            fieldNames.add(new ArrayList<String>());
-        }
-        this.fieldTypes = new IAType[logicalVars.length];
-        this.subTrees = new OptimizableOperatorSubTree[logicalVars.length];
-
-        if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CONTAINS) {
-            this.partialField = true;
-        } else {
-            this.partialField = false;
-        }
-    }
-
-    // Special, more convenient c'tor for simple binary functions.
-    public OptimizableFuncExpr(AbstractFunctionCallExpression funcExpr, LogicalVariable logicalVar,
-            IAlgebricksConstantValue constantVal) {
-        this.funcExpr = funcExpr;
-        this.logicalVars = new LogicalVariable[] { logicalVar };
-        this.sourceVars = new LogicalVariable[1];
-        this.logicalExprs = new ILogicalExpression[1];
-        this.constantVals = new IAlgebricksConstantValue[] { constantVal };
-        this.fieldNames = new ArrayList<List<String>>();
-        for (int i = 0; i < logicalVars.length; i++) {
-            fieldNames.add(new ArrayList<String>());
-        }
-        this.fieldTypes = new IAType[logicalVars.length];
-        this.subTrees = new OptimizableOperatorSubTree[logicalVars.length];
-        if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CONTAINS) {
-            this.partialField = true;
-        } else {
-            this.partialField = false;
-        }
-    }
-
-    @Override
-    public AbstractFunctionCallExpression getFuncExpr() {
-        return funcExpr;
-    }
-
-    @Override
-    public int getNumLogicalVars() {
-        return logicalVars.length;
-    }
-
-    @Override
-    public int getNumConstantVals() {
-        return constantVals.length;
-    }
-
-    @Override
-    public LogicalVariable getLogicalVar(int index) {
-        return logicalVars[index];
-    }
-
-    @Override
-    public void setLogicalExpr(int index, ILogicalExpression logExpr) {
-        logicalExprs[index] = logExpr;
-    }
-
-    @Override
-    public ILogicalExpression getLogicalExpr(int index) {
-        return logicalExprs[index];
-    }
-
-    @Override
-    public void setFieldName(int index, List<String> fieldName) {
-        fieldNames.set(index, fieldName);
-    }
-
-    @Override
-    public List<String> getFieldName(int index) {
-        return fieldNames.get(index);
-    }
-
-    @Override
-    public void setFieldType(int index, IAType fieldType) {
-        fieldTypes[index] = fieldType;
-    }
-
-    @Override
-    public IAType getFieldType(int index) {
-        return fieldTypes[index];
-    }
-
-    @Override
-    public IAlgebricksConstantValue getConstantVal(int index) {
-        return constantVals[index];
-    }
-
-    @Override
-    public int findLogicalVar(LogicalVariable var) {
-        for (int i = 0; i < logicalVars.length; i++) {
-            if (var == logicalVars[i]) {
-                return i;
-            }
-        }
-        return -1;
-    }
-
-    @Override
-    public int findFieldName(List<String> fieldName) {
-        for (int i = 0; i < fieldNames.size(); i++) {
-            if (fieldName.equals(fieldNames.get(i))) {
-                return i;
-            }
-        }
-        return -1;
-    }
-
-    @Override
-    public void setOptimizableSubTree(int index, OptimizableOperatorSubTree subTree) {
-        subTrees[index] = subTree;
-    }
-
-    @Override
-    public OptimizableOperatorSubTree getOperatorSubTree(int index) {
-        return subTrees[index];
-    }
-
-    @Override
-    public void substituteVar(LogicalVariable original, LogicalVariable substitution) {
-        if (logicalVars != null) {
-            for (int i = 0; i < logicalVars.length; i++) {
-                if (logicalVars[i] == original) {
-                    logicalVars[i] = substitution;
-                    break;
-                }
-            }
-        }
-    }
-
-    @Override
-    public void setPartialField(boolean partialField) {
-        this.partialField = partialField;
-    }
-
-    @Override
-    public boolean containsPartialField() {
-        return partialField;
-    }
-
-    @Override
-    public int hashCode() {
-        return funcExpr.hashCode();
-
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (!(o instanceof OptimizableFuncExpr))
-            return false;
-        else
-            return funcExpr.equals(o);
-
-    }
-
-    public void setSourceVar(int index, LogicalVariable var) {
-        sourceVars[index] = var;
-    }
-
-    @Override
-    public LogicalVariable getSourceVar(int index) {
-        return sourceVars[index];
-    }
-
-}



[49/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/InvertedIndexPOperator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/InvertedIndexPOperator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/InvertedIndexPOperator.java
deleted file mode 100644
index 12b2723..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/InvertedIndexPOperator.java
+++ /dev/null
@@ -1,288 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.algebra.operators.physical;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
-import edu.uci.ics.asterix.common.context.AsterixVirtualBufferCacheProvider;
-import edu.uci.ics.asterix.common.dataflow.IAsterixApplicationContextInfo;
-import edu.uci.ics.asterix.common.ioopcallbacks.LSMInvertedIndexIOOperationCallbackFactory;
-import edu.uci.ics.asterix.metadata.MetadataException;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
-import edu.uci.ics.asterix.om.base.IAObject;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.asterix.optimizer.rules.am.InvertedIndexAccessMethod;
-import edu.uci.ics.asterix.optimizer.rules.am.InvertedIndexAccessMethod.SearchModifierType;
-import edu.uci.ics.asterix.optimizer.rules.am.InvertedIndexJobGenParams;
-import edu.uci.ics.asterix.transaction.management.opcallbacks.SecondaryIndexOperationTrackerProvider;
-import edu.uci.ics.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IAlgebricksConstantValue;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.ShortPointable;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifierFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.PartitionedLSMInvertedIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-
-/**
- * Contributes the runtime operator for an unnest-map representing an
- * inverted-index search.
- */
-public class InvertedIndexPOperator extends IndexSearchPOperator {
-    private final boolean isPartitioned;
-
-    public InvertedIndexPOperator(IDataSourceIndex<String, AqlSourceId> idx, boolean requiresBroadcast,
-            boolean isPartitioned) {
-        super(idx, requiresBroadcast);
-        this.isPartitioned = isPartitioned;
-    }
-
-    @Override
-    public PhysicalOperatorTag getOperatorTag() {
-        if (isPartitioned) {
-            return PhysicalOperatorTag.LENGTH_PARTITIONED_INVERTED_INDEX_SEARCH;
-        } else {
-            return PhysicalOperatorTag.SINGLE_PARTITION_INVERTED_INDEX_SEARCH;
-        }
-    }
-
-    @Override
-    public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
-            IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
-            throws AlgebricksException {
-        UnnestMapOperator unnestMapOp = (UnnestMapOperator) op;
-        ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
-        if (unnestExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            throw new IllegalStateException();
-        }
-        AbstractFunctionCallExpression unnestFuncExpr = (AbstractFunctionCallExpression) unnestExpr;
-        if (unnestFuncExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.INDEX_SEARCH) {
-            return;
-        }
-        InvertedIndexJobGenParams jobGenParams = new InvertedIndexJobGenParams();
-        jobGenParams.readFromFuncArgs(unnestFuncExpr.getArguments());
-
-        AqlMetadataProvider metadataProvider = (AqlMetadataProvider) context.getMetadataProvider();
-        Dataset dataset;
-        try {
-            dataset = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(),
-                    jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
-        } catch (MetadataException e) {
-            throw new AlgebricksException(e);
-        }
-        int[] keyIndexes = getKeyIndexes(jobGenParams.getKeyVarList(), inputSchemas);
-
-        int[] minFilterFieldIndexes = getKeyIndexes(unnestMapOp.getMinFilterVars(), inputSchemas);
-        int[] maxFilterFieldIndexes = getKeyIndexes(unnestMapOp.getMaxFilterVars(), inputSchemas);
-        // Build runtime.
-        Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> invIndexSearch = buildInvertedIndexRuntime(
-                metadataProvider, context, builder.getJobSpec(), unnestMapOp, opSchema, jobGenParams.getRetainInput(),
-                jobGenParams.getRetainNull(), jobGenParams.getDatasetName(), dataset, jobGenParams.getIndexName(),
-                jobGenParams.getSearchKeyType(), keyIndexes, jobGenParams.getSearchModifierType(),
-                jobGenParams.getSimilarityThreshold(), minFilterFieldIndexes, maxFilterFieldIndexes);
-
-        // Contribute operator in hyracks job.
-        builder.contributeHyracksOperator(unnestMapOp, invIndexSearch.first);
-        builder.contributeAlgebricksPartitionConstraint(invIndexSearch.first, invIndexSearch.second);
-        ILogicalOperator srcExchange = unnestMapOp.getInputs().get(0).getValue();
-        builder.contributeGraphEdge(srcExchange, 0, unnestMapOp, 0);
-    }
-
-    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildInvertedIndexRuntime(
-            AqlMetadataProvider metadataProvider, JobGenContext context, JobSpecification jobSpec,
-            UnnestMapOperator unnestMap, IOperatorSchema opSchema, boolean retainInput, boolean retainNull,
-            String datasetName, Dataset dataset, String indexName, ATypeTag searchKeyType, int[] keyFields,
-            SearchModifierType searchModifierType, IAlgebricksConstantValue similarityThreshold,
-            int[] minFilterFieldIndexes, int[] maxFilterFieldIndexes) throws AlgebricksException {
-
-        try {
-            IAObject simThresh = ((AsterixConstantValue) similarityThreshold).getObject();
-            IAType itemType = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(),
-                    dataset.getDataverseName(), dataset.getItemTypeName()).getDatatype();
-            int numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
-            Index secondaryIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(),
-                    dataset.getDataverseName(), dataset.getDatasetName(), indexName);
-            if (secondaryIndex == null) {
-                throw new AlgebricksException("Code generation error: no index " + indexName + " for dataset "
-                        + datasetName);
-            }
-            List<List<String>> secondaryKeyFieldEntries = secondaryIndex.getKeyFieldNames();
-            List<IAType> secondaryKeyTypeEntries = secondaryIndex.getKeyFieldTypes();
-            int numSecondaryKeys = secondaryKeyFieldEntries.size();
-            if (numSecondaryKeys != 1) {
-                throw new AlgebricksException(
-                        "Cannot use "
-                                + numSecondaryKeys
-                                + " fields as a key for an inverted index. There can be only one field as a key for the inverted index index.");
-            }
-            if (itemType.getTypeTag() != ATypeTag.RECORD) {
-                throw new AlgebricksException("Only record types can be indexed.");
-            }
-            ARecordType recordType = (ARecordType) itemType;
-            Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(secondaryKeyTypeEntries.get(0),
-                    secondaryKeyFieldEntries.get(0), recordType);
-            IAType secondaryKeyType = keyPairType.first;
-            if (secondaryKeyType == null) {
-                throw new AlgebricksException("Could not find field " + secondaryKeyFieldEntries.get(0)
-                        + " in the schema.");
-            }
-
-            // TODO: For now we assume the type of the generated tokens is the
-            // same as the indexed field.
-            // We need a better way of expressing this because tokens may be
-            // hashed, or an inverted-index may index a list type, etc.
-            int numTokenKeys = (!isPartitioned) ? numSecondaryKeys : numSecondaryKeys + 1;
-            ITypeTraits[] tokenTypeTraits = new ITypeTraits[numTokenKeys];
-            IBinaryComparatorFactory[] tokenComparatorFactories = new IBinaryComparatorFactory[numTokenKeys];
-            for (int i = 0; i < numSecondaryKeys; i++) {
-                tokenComparatorFactories[i] = NonTaggedFormatUtil.getTokenBinaryComparatorFactory(secondaryKeyType);
-                tokenTypeTraits[i] = NonTaggedFormatUtil.getTokenTypeTrait(secondaryKeyType);
-            }
-            if (isPartitioned) {
-                // The partitioning field is hardcoded to be a short *without* an Asterix type tag.
-                tokenComparatorFactories[numSecondaryKeys] = PointableBinaryComparatorFactory
-                        .of(ShortPointable.FACTORY);
-                tokenTypeTraits[numSecondaryKeys] = ShortPointable.TYPE_TRAITS;
-            }
-
-            IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(unnestMap);
-            List<LogicalVariable> outputVars = unnestMap.getVariables();
-            if (retainInput) {
-                outputVars = new ArrayList<LogicalVariable>();
-                VariableUtilities.getLiveVariables(unnestMap, outputVars);
-            }
-            RecordDescriptor outputRecDesc = JobGenHelper.mkRecordDescriptor(typeEnv, opSchema, context);
-
-            int start = outputRecDesc.getFieldCount() - numPrimaryKeys;
-            IBinaryComparatorFactory[] invListsComparatorFactories = JobGenHelper
-                    .variablesToAscBinaryComparatorFactories(outputVars, start, numPrimaryKeys, typeEnv, context);
-            ITypeTraits[] invListsTypeTraits = JobGenHelper.variablesToTypeTraits(outputVars, start, numPrimaryKeys,
-                    typeEnv, context);
-
-            ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, recordType);
-            IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(
-                    dataset, recordType, context.getBinaryComparatorFactoryProvider());
-
-            int[] filterFields = null;
-            int[] invertedIndexFields = null;
-            int[] filterFieldsForNonBulkLoadOps = null;
-            int[] invertedIndexFieldsForNonBulkLoadOps = null;
-            if (filterTypeTraits != null) {
-                filterFields = new int[1];
-                filterFields[0] = numTokenKeys + numPrimaryKeys;
-                invertedIndexFields = new int[numTokenKeys + numPrimaryKeys];
-                for (int k = 0; k < invertedIndexFields.length; k++) {
-                    invertedIndexFields[k] = k;
-                }
-
-                filterFieldsForNonBulkLoadOps = new int[1];
-                filterFieldsForNonBulkLoadOps[0] = numPrimaryKeys + numSecondaryKeys;
-                invertedIndexFieldsForNonBulkLoadOps = new int[numPrimaryKeys + numSecondaryKeys];
-                for (int k = 0; k < invertedIndexFieldsForNonBulkLoadOps.length; k++) {
-                    invertedIndexFieldsForNonBulkLoadOps[k] = k;
-                }
-            }
-
-            IAsterixApplicationContextInfo appContext = (IAsterixApplicationContextInfo) context.getAppContext();
-            Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider
-                    .splitProviderAndPartitionConstraintsForDataset(dataset.getDataverseName(), datasetName, indexName,
-                            dataset.getDatasetDetails().isTemp());
-            // TODO: Here we assume there is only one search key field.
-            int queryField = keyFields[0];
-            // Get tokenizer and search modifier factories.
-            IInvertedIndexSearchModifierFactory searchModifierFactory = InvertedIndexAccessMethod
-                    .getSearchModifierFactory(searchModifierType, simThresh, secondaryIndex);
-            IBinaryTokenizerFactory queryTokenizerFactory = InvertedIndexAccessMethod.getBinaryTokenizerFactory(
-                    searchModifierType, searchKeyType, secondaryIndex);
-            IIndexDataflowHelperFactory dataflowHelperFactory;
-
-            AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
-            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(
-                    dataset, metadataProvider.getMetadataTxnContext());
-            boolean temp = dataset.getDatasetDetails().isTemp();
-            if (!isPartitioned) {
-                dataflowHelperFactory = new LSMInvertedIndexDataflowHelperFactory(
-                        new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()), compactionInfo.first,
-                        compactionInfo.second, new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                        LSMInvertedIndexIOOperationCallbackFactory.INSTANCE,
-                        storageProperties.getBloomFilterFalsePositiveRate(), invertedIndexFields, filterTypeTraits,
-                        filterCmpFactories, filterFields, filterFieldsForNonBulkLoadOps,
-                        invertedIndexFieldsForNonBulkLoadOps, !temp);
-            } else {
-                dataflowHelperFactory = new PartitionedLSMInvertedIndexDataflowHelperFactory(
-                        new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()), compactionInfo.first,
-                        compactionInfo.second, new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                        LSMInvertedIndexIOOperationCallbackFactory.INSTANCE,
-                        storageProperties.getBloomFilterFalsePositiveRate(), invertedIndexFields, filterTypeTraits,
-                        filterCmpFactories, filterFields, filterFieldsForNonBulkLoadOps,
-                        invertedIndexFieldsForNonBulkLoadOps, !temp);
-            }
-            LSMInvertedIndexSearchOperatorDescriptor invIndexSearchOp = new LSMInvertedIndexSearchOperatorDescriptor(
-                    jobSpec, queryField, appContext.getStorageManagerInterface(), secondarySplitsAndConstraint.first,
-                    appContext.getIndexLifecycleManagerProvider(), tokenTypeTraits, tokenComparatorFactories,
-                    invListsTypeTraits, invListsComparatorFactories, dataflowHelperFactory, queryTokenizerFactory,
-                    searchModifierFactory, outputRecDesc, retainInput, retainNull, context.getNullWriterFactory(),
-                    NoOpOperationCallbackFactory.INSTANCE, minFilterFieldIndexes, maxFilterFieldIndexes);
-
-            return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(invIndexSearchOp,
-                    secondarySplitsAndConstraint.second);
-        } catch (MetadataException e) {
-            throw new AlgebricksException(e);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/RTreeSearchPOperator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/RTreeSearchPOperator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/RTreeSearchPOperator.java
deleted file mode 100644
index 33f906c..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/RTreeSearchPOperator.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.algebra.operators.physical;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.optimizer.rules.am.RTreeJobGenParams;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-
-/**
- * Contributes the runtime operator for an unnest-map representing a RTree
- * search.
- */
-public class RTreeSearchPOperator extends IndexSearchPOperator {
-
-    public RTreeSearchPOperator(IDataSourceIndex<String, AqlSourceId> idx, boolean requiresBroadcast) {
-        super(idx, requiresBroadcast);
-    }
-
-    @Override
-    public PhysicalOperatorTag getOperatorTag() {
-        return PhysicalOperatorTag.RTREE_SEARCH;
-    }
-
-    @Override
-    public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
-            IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
-            throws AlgebricksException {
-        UnnestMapOperator unnestMap = (UnnestMapOperator) op;
-        ILogicalExpression unnestExpr = unnestMap.getExpressionRef().getValue();
-        if (unnestExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            throw new IllegalStateException();
-        }
-        AbstractFunctionCallExpression unnestFuncExpr = (AbstractFunctionCallExpression) unnestExpr;
-        FunctionIdentifier funcIdent = unnestFuncExpr.getFunctionIdentifier();
-        if (!funcIdent.equals(AsterixBuiltinFunctions.INDEX_SEARCH)) {
-            return;
-        }
-        RTreeJobGenParams jobGenParams = new RTreeJobGenParams();
-        jobGenParams.readFromFuncArgs(unnestFuncExpr.getArguments());
-        int[] keyIndexes = getKeyIndexes(jobGenParams.getKeyVarList(), inputSchemas);
-
-        int[] minFilterFieldIndexes = getKeyIndexes(unnestMap.getMinFilterVars(), inputSchemas);
-        int[] maxFilterFieldIndexes = getKeyIndexes(unnestMap.getMaxFilterVars(), inputSchemas);
-
-        AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
-        Dataset dataset = mp.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
-        IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(unnestMap);
-        List<LogicalVariable> outputVars = unnestMap.getVariables();
-        if (jobGenParams.getRetainInput()) {
-            outputVars = new ArrayList<LogicalVariable>();
-            VariableUtilities.getLiveVariables(unnestMap, outputVars);
-        }
-        Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> rtreeSearch = mp.buildRtreeRuntime(
-                builder.getJobSpec(), outputVars, opSchema, typeEnv, context, jobGenParams.getRetainInput(),
-                jobGenParams.getRetainNull(), dataset, jobGenParams.getIndexName(), keyIndexes, minFilterFieldIndexes,
-                maxFilterFieldIndexes);
-
-        builder.contributeHyracksOperator(unnestMap, rtreeSearch.first);
-        builder.contributeAlgebricksPartitionConstraint(rtreeSearch.first, rtreeSearch.second);
-        ILogicalOperator srcExchange = unnestMap.getInputs().get(0).getValue();
-        builder.contributeGraphEdge(srcExchange, 0, unnestMap, 0);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/jobgen/AqlLogicalExpressionJobGen.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/jobgen/AqlLogicalExpressionJobGen.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/jobgen/AqlLogicalExpressionJobGen.java
deleted file mode 100644
index 99e8f25..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/jobgen/AqlLogicalExpressionJobGen.java
+++ /dev/null
@@ -1,217 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.jobgen;
-
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.functions.FunctionDescriptorTag;
-import edu.uci.ics.asterix.external.library.ExternalFunctionDescriptorProvider;
-import edu.uci.ics.asterix.formats.base.IDataFormat;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.functions.IExternalFunctionInfo;
-import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
-import edu.uci.ics.asterix.runtime.evaluators.comparisons.ComparisonEvalFactory;
-import edu.uci.ics.asterix.runtime.formats.FormatUtils;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ILogicalExpressionJobGen;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.StatefulFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions.ComparisonKind;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
-import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyAggregateFunctionFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyRunningAggregateFunctionFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.base.ICopySerializableAggregateFunctionFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyUnnestingFunctionFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.evaluators.ColumnAccessEvalFactory;
-
-public class AqlLogicalExpressionJobGen implements ILogicalExpressionJobGen {
-
-    public static final AqlLogicalExpressionJobGen INSTANCE = new AqlLogicalExpressionJobGen();
-
-    private AqlLogicalExpressionJobGen() {
-    }
-
-    @Override
-    public ICopyAggregateFunctionFactory createAggregateFunctionFactory(AggregateFunctionCallExpression expr,
-            IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)
-            throws AlgebricksException {
-        ICopyEvaluatorFactory[] args = codegenArguments(expr, env, inputSchemas, context);
-        IFunctionDescriptor fd = getFunctionDescriptor(expr, env, context);
-        switch (fd.getFunctionDescriptorTag()) {
-            case SERIALAGGREGATE:
-                return null;
-            case AGGREGATE:
-                return fd.createAggregateFunctionFactory(args);
-            default:
-                throw new IllegalStateException("Invalid function descriptor " + fd.getFunctionDescriptorTag()
-                        + " expected " + FunctionDescriptorTag.SERIALAGGREGATE + " or "
-                        + FunctionDescriptorTag.AGGREGATE);
-        }
-    }
-
-    @Override
-    public ICopyRunningAggregateFunctionFactory createRunningAggregateFunctionFactory(
-            StatefulFunctionCallExpression expr, IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas,
-            JobGenContext context) throws AlgebricksException {
-        ICopyEvaluatorFactory[] args = codegenArguments(expr, env, inputSchemas, context);
-        return getFunctionDescriptor(expr, env, context).createRunningAggregateFunctionFactory(args);
-    }
-
-    @Override
-    public ICopyUnnestingFunctionFactory createUnnestingFunctionFactory(UnnestingFunctionCallExpression expr,
-            IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)
-            throws AlgebricksException {
-        ICopyEvaluatorFactory[] args = codegenArguments(expr, env, inputSchemas, context);
-        return getFunctionDescriptor(expr, env, context).createUnnestingFunctionFactory(args);
-    }
-
-    @Override
-    public ICopyEvaluatorFactory createEvaluatorFactory(ILogicalExpression expr, IVariableTypeEnvironment env,
-            IOperatorSchema[] inputSchemas, JobGenContext context) throws AlgebricksException {
-        ICopyEvaluatorFactory copyEvaluatorFactory = null;
-        switch (expr.getExpressionTag()) {
-            case VARIABLE: {
-                VariableReferenceExpression v = (VariableReferenceExpression) expr;
-                copyEvaluatorFactory = createVariableEvaluatorFactory(v, inputSchemas, context);
-                return copyEvaluatorFactory;
-            }
-            case CONSTANT: {
-                ConstantExpression c = (ConstantExpression) expr;
-                copyEvaluatorFactory = createConstantEvaluatorFactory(c, inputSchemas, context);
-                return copyEvaluatorFactory;
-            }
-            case FUNCTION_CALL: {
-                copyEvaluatorFactory = createScalarFunctionEvaluatorFactory((AbstractFunctionCallExpression) expr, env,
-                        inputSchemas, context);
-                return copyEvaluatorFactory;
-            }
-            default:
-                throw new IllegalStateException();
-        }
-
-    }
-
-    private ICopyEvaluatorFactory createVariableEvaluatorFactory(VariableReferenceExpression expr,
-            IOperatorSchema[] inputSchemas, JobGenContext context) throws AlgebricksException {
-        LogicalVariable variable = expr.getVariableReference();
-        for (IOperatorSchema scm : inputSchemas) {
-            int pos = scm.findVariable(variable);
-            if (pos >= 0) {
-                return new ColumnAccessEvalFactory(pos);
-            }
-        }
-        throw new AlgebricksException("Variable " + variable + " could not be found in any input schema.");
-    }
-
-    private ICopyEvaluatorFactory createScalarFunctionEvaluatorFactory(AbstractFunctionCallExpression expr,
-            IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)
-            throws AlgebricksException {
-        ICopyEvaluatorFactory[] args = codegenArguments(expr, env, inputSchemas, context);
-        FunctionIdentifier fi = expr.getFunctionIdentifier();
-        ComparisonKind ck = AlgebricksBuiltinFunctions.getComparisonType(fi);
-        if (ck != null) {
-            return new ComparisonEvalFactory(args[0], args[1], ck);
-        }
-
-        IFunctionDescriptor fd = null;
-        if (!(expr.getFunctionInfo() instanceof IExternalFunctionInfo)) {
-            AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
-            IDataFormat format = FormatUtils.getDefaultFormat();
-            fd = format.resolveFunction(expr, env);
-        } else {
-            try {
-                fd = ExternalFunctionDescriptorProvider.getExternalFunctionDescriptor((IExternalFunctionInfo) expr
-                        .getFunctionInfo());
-            } catch (AsterixException ae) {
-                throw new AlgebricksException(ae);
-            }
-        }
-        return fd.createEvaluatorFactory(args);
-    }
-
-    private ICopyEvaluatorFactory createConstantEvaluatorFactory(ConstantExpression expr,
-            IOperatorSchema[] inputSchemas, JobGenContext context) throws AlgebricksException {
-        AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
-        IDataFormat format = FormatUtils.getDefaultFormat();
-        return format.getConstantEvalFactory(expr.getValue());
-    }
-
-    private ICopyEvaluatorFactory[] codegenArguments(AbstractFunctionCallExpression expr, IVariableTypeEnvironment env,
-            IOperatorSchema[] inputSchemas, JobGenContext context) throws AlgebricksException {
-        List<Mutable<ILogicalExpression>> arguments = expr.getArguments();
-        int n = arguments.size();
-        ICopyEvaluatorFactory[] args = new ICopyEvaluatorFactory[n];
-        int i = 0;
-        for (Mutable<ILogicalExpression> a : arguments) {
-            args[i++] = createEvaluatorFactory(a.getValue(), env, inputSchemas, context);
-        }
-        return args;
-    }
-
-    @Override
-    public ICopySerializableAggregateFunctionFactory createSerializableAggregateFunctionFactory(
-            AggregateFunctionCallExpression expr, IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas,
-            JobGenContext context) throws AlgebricksException {
-        ICopyEvaluatorFactory[] args = codegenArguments(expr, env, inputSchemas, context);
-        IFunctionDescriptor fd = getFunctionDescriptor(expr, env, context);
-
-        switch (fd.getFunctionDescriptorTag()) {
-            case AGGREGATE: {
-                if (AsterixBuiltinFunctions.isAggregateFunctionSerializable(fd.getIdentifier())) {
-                    AggregateFunctionCallExpression serialAggExpr = AsterixBuiltinFunctions
-                            .makeSerializableAggregateFunctionExpression(fd.getIdentifier(), expr.getArguments());
-                    IFunctionDescriptor afdd = getFunctionDescriptor(serialAggExpr, env, context);
-                    return afdd.createSerializableAggregateFunctionFactory(args);
-                } else {
-                    throw new AlgebricksException(
-                            "Trying to create a serializable aggregate from a non-serializable aggregate function descriptor. (fi="
-                                    + expr.getFunctionIdentifier() + ")");
-                }
-            }
-            case SERIALAGGREGATE: {
-                return fd.createSerializableAggregateFunctionFactory(args);
-            }
-
-            default:
-                throw new IllegalStateException("Invalid function descriptor " + fd.getFunctionDescriptorTag()
-                        + " expected " + FunctionDescriptorTag.SERIALAGGREGATE + " or "
-                        + FunctionDescriptorTag.AGGREGATE);
-        }
-    }
-
-    private IFunctionDescriptor getFunctionDescriptor(AbstractFunctionCallExpression expr,
-            IVariableTypeEnvironment env, JobGenContext context) throws AlgebricksException {
-        IFunctionDescriptor fd;
-        AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
-        fd = FormatUtils.getDefaultFormat().resolveFunction(expr, env);
-        return fd;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/AnalysisUtil.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/AnalysisUtil.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/AnalysisUtil.java
deleted file mode 100644
index 2d3ca0a..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/AnalysisUtil.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.base;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractDataSourceOperator;
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-
-public class AnalysisUtil {
-    /*
-     * If the first child of op is of type opType, then it returns that child,
-     * o/w returns null.
-     */
-    public final static ILogicalOperator firstChildOfType(AbstractLogicalOperator op, LogicalOperatorTag opType) {
-        List<Mutable<ILogicalOperator>> ins = op.getInputs();
-        if (ins == null || ins.isEmpty()) {
-            return null;
-        }
-        Mutable<ILogicalOperator> opRef2 = ins.get(0);
-        AbstractLogicalOperator op2 = (AbstractLogicalOperator) opRef2.getValue();
-        if (op2.getOperatorTag() == opType) {
-            return op2;
-        } else {
-            return null;
-        }
-    }
-
-    public static int numberOfVarsInExpr(ILogicalExpression e) {
-        switch (((AbstractLogicalExpression) e).getExpressionTag()) {
-            case CONSTANT: {
-                return 0;
-            }
-            case FUNCTION_CALL: {
-                AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) e;
-                int s = 0;
-                for (Mutable<ILogicalExpression> arg : f.getArguments()) {
-                    s += numberOfVarsInExpr(arg.getValue());
-                }
-                return s;
-            }
-            case VARIABLE: {
-                return 1;
-            }
-            default: {
-                assert false;
-                throw new IllegalArgumentException();
-            }
-        }
-    }
-
-    public static boolean isRunnableFieldAccessFunction(FunctionIdentifier fid) {
-        return fieldAccessFunctions.contains(fid);
-    }
-
-    public static boolean isDataSetCall(ILogicalExpression e) {
-        if (((AbstractLogicalExpression) e).getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            return false;
-        }
-        AbstractFunctionCallExpression fe = (AbstractFunctionCallExpression) e;
-        return AsterixBuiltinFunctions.isDatasetFunction(fe.getFunctionIdentifier());
-    }
-
-    public static boolean isRunnableAccessToFieldRecord(ILogicalExpression expr) {
-        if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-            AbstractFunctionCallExpression fc = (AbstractFunctionCallExpression) expr;
-            FunctionIdentifier fid = fc.getFunctionIdentifier();
-            if (AnalysisUtil.isRunnableFieldAccessFunction(fid)) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-    public static boolean isAccessByNameToFieldRecord(ILogicalExpression expr) {
-        if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-            AbstractFunctionCallExpression fc = (AbstractFunctionCallExpression) expr;
-            FunctionIdentifier fid = fc.getFunctionIdentifier();
-            if (fid.equals(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME)) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-    public static boolean isAccessToFieldRecord(ILogicalExpression expr) {
-        if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-            AbstractFunctionCallExpression fc = (AbstractFunctionCallExpression) expr;
-            FunctionIdentifier fid = fc.getFunctionIdentifier();
-            if (fid.equals(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX)
-                    || fid.equals(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME)
-                    || fid.equals(AsterixBuiltinFunctions.FIELD_ACCESS_NESTED)) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-    public static Pair<String, String> getDatasetInfo(AbstractDataSourceOperator op) throws AlgebricksException {
-        AqlSourceId srcId = (AqlSourceId) op.getDataSource().getId();
-        return new Pair<String, String>(srcId.getDataverseName(), srcId.getDatasourceName());
-    }
-
-    private static List<FunctionIdentifier> fieldAccessFunctions = new ArrayList<FunctionIdentifier>();
-    static {
-        fieldAccessFunctions.add(AsterixBuiltinFunctions.GET_DATA);
-        fieldAccessFunctions.add(AsterixBuiltinFunctions.GET_HANDLE);
-        fieldAccessFunctions.add(AsterixBuiltinFunctions.TYPE_OF);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/FuzzyUtils.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/FuzzyUtils.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/FuzzyUtils.java
deleted file mode 100644
index 063c887..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/FuzzyUtils.java
+++ /dev/null
@@ -1,119 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.base;
-
-import java.util.ArrayList;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.om.base.AFloat;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.IAObject;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-
-public class FuzzyUtils {
-
-    private final static String DEFAULT_SIM_FUNCTION = "jaccard";
-    private final static float JACCARD_DEFAULT_SIM_THRESHOLD = .8f;
-    private final static int EDIT_DISTANCE_DEFAULT_SIM_THRESHOLD = 1;
-
-    private final static String SIM_FUNCTION_PROP_NAME = "simfunction";
-    private final static String SIM_THRESHOLD_PROP_NAME = "simthreshold";
-
-    public final static String JACCARD_FUNCTION_NAME = "jaccard";
-    public final static String EDIT_DISTANCE_FUNCTION_NAME = "edit-distance";
-
-    public static FunctionIdentifier getTokenizer(ATypeTag inputTag) {
-        switch (inputTag) {
-            case STRING:
-                return AsterixBuiltinFunctions.COUNTHASHED_WORD_TOKENS;
-            case UNORDEREDLIST:
-            case ORDEREDLIST:
-            case ANY:
-                return null;
-            default:
-                throw new NotImplementedException("No tokenizer for type " + inputTag);
-        }
-    }
-
-    public static IAObject getSimThreshold(AqlMetadataProvider metadata, String simFuncName) {
-        String simThresholValue = metadata.getPropertyValue(SIM_THRESHOLD_PROP_NAME);
-        IAObject ret = null;
-        if (simFuncName.equals(JACCARD_FUNCTION_NAME)) {
-            if (simThresholValue != null) {
-                float jaccThresh = Float.parseFloat(simThresholValue);
-                ret = new AFloat(jaccThresh);
-            } else {
-                ret = new AFloat(JACCARD_DEFAULT_SIM_THRESHOLD);
-            }
-        } else if (simFuncName.equals(EDIT_DISTANCE_FUNCTION_NAME)) {
-            if (simThresholValue != null) {
-                int edThresh = Integer.parseInt(simThresholValue);
-                ret = new AInt32(edThresh);
-            } else {
-                ret = new AFloat(EDIT_DISTANCE_DEFAULT_SIM_THRESHOLD);
-            }
-        }
-        return ret;
-    }
-
-    public static FunctionIdentifier getFunctionIdentifier(String simFuncName) {
-        if (simFuncName.equals(JACCARD_FUNCTION_NAME)) {
-            return AsterixBuiltinFunctions.SIMILARITY_JACCARD;
-        } else if (simFuncName.equals(EDIT_DISTANCE_FUNCTION_NAME)) {
-            return AsterixBuiltinFunctions.EDIT_DISTANCE;
-        }
-        return null;
-    }
-
-    public static ScalarFunctionCallExpression getComparisonExpr(String simFuncName,
-            ArrayList<Mutable<ILogicalExpression>> cmpArgs) {
-        if (simFuncName.equals(JACCARD_FUNCTION_NAME)) {
-            return new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(AlgebricksBuiltinFunctions.GE),
-                    cmpArgs);
-        } else if (simFuncName.equals(EDIT_DISTANCE_FUNCTION_NAME)) {
-            return new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(AlgebricksBuiltinFunctions.LE),
-                    cmpArgs);
-        }
-        return null;
-    }
-
-    public static float getSimThreshold(AqlMetadataProvider metadata) {
-        float simThreshold = JACCARD_DEFAULT_SIM_THRESHOLD;
-        String simThresholValue = metadata.getPropertyValue(SIM_THRESHOLD_PROP_NAME);
-        if (simThresholValue != null) {
-            simThreshold = Float.parseFloat(simThresholValue);
-        }
-        return simThreshold;
-    }
-
-    // TODO: The default function depend on the input types. 
-    public static String getSimFunction(AqlMetadataProvider metadata) {
-        String simFunction = metadata.getPropertyValue(SIM_FUNCTION_PROP_NAME);
-        if (simFunction == null) {
-            simFunction = DEFAULT_SIM_FUNCTION;
-        }
-        simFunction = simFunction.toLowerCase();
-        return simFunction;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java
deleted file mode 100644
index a200874..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java
+++ /dev/null
@@ -1,331 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.optimizer.base;
-
-import java.util.LinkedList;
-import java.util.List;
-
-import edu.uci.ics.asterix.optimizer.rules.AddEquivalenceClassForRecordConstructorRule;
-import edu.uci.ics.asterix.optimizer.rules.AsterixExtractFunctionsFromJoinConditionRule;
-import edu.uci.ics.asterix.optimizer.rules.AsterixInlineVariablesRule;
-import edu.uci.ics.asterix.optimizer.rules.AsterixIntroduceGroupByCombinerRule;
-import edu.uci.ics.asterix.optimizer.rules.AsterixMoveFreeVariableOperatorOutOfSubplanRule;
-import edu.uci.ics.asterix.optimizer.rules.ByNameToByIndexFieldAccessRule;
-import edu.uci.ics.asterix.optimizer.rules.CancelUnnestWithNestedListifyRule;
-import edu.uci.ics.asterix.optimizer.rules.CheckFilterExpressionTypeRule;
-import edu.uci.ics.asterix.optimizer.rules.ConstantFoldingRule;
-import edu.uci.ics.asterix.optimizer.rules.CountVarToCountOneRule;
-import edu.uci.ics.asterix.optimizer.rules.DisjunctivePredicateToJoinRule;
-import edu.uci.ics.asterix.optimizer.rules.ExtractDistinctByExpressionsRule;
-import edu.uci.ics.asterix.optimizer.rules.ExtractOrderExpressionsRule;
-import edu.uci.ics.asterix.optimizer.rules.FeedScanCollectionToUnnest;
-import edu.uci.ics.asterix.optimizer.rules.FuzzyEqRule;
-import edu.uci.ics.asterix.optimizer.rules.IfElseToSwitchCaseFunctionRule;
-import edu.uci.ics.asterix.optimizer.rules.InlineUnnestFunctionRule;
-import edu.uci.ics.asterix.optimizer.rules.IntroduceAutogenerateIDRule;
-import edu.uci.ics.asterix.optimizer.rules.IntroduceDynamicTypeCastForExternalFunctionRule;
-import edu.uci.ics.asterix.optimizer.rules.IntroduceDynamicTypeCastRule;
-import edu.uci.ics.asterix.optimizer.rules.IntroduceEnforcedListTypeRule;
-import edu.uci.ics.asterix.optimizer.rules.IntroduceInstantLockSearchCallbackRule;
-import edu.uci.ics.asterix.optimizer.rules.IntroduceMaterializationForInsertWithSelfScanRule;
-import edu.uci.ics.asterix.optimizer.rules.IntroduceRandomPartitioningFeedComputationRule;
-import edu.uci.ics.asterix.optimizer.rules.IntroduceRapidFrameFlushProjectAssignRule;
-import edu.uci.ics.asterix.optimizer.rules.IntroduceSecondaryIndexInsertDeleteRule;
-import edu.uci.ics.asterix.optimizer.rules.IntroduceStaticTypeCastForInsertRule;
-import edu.uci.ics.asterix.optimizer.rules.IntroduceUnionRule;
-import edu.uci.ics.asterix.optimizer.rules.IntroduceUnnestForCollectionToSequenceRule;
-import edu.uci.ics.asterix.optimizer.rules.LoadRecordFieldsRule;
-import edu.uci.ics.asterix.optimizer.rules.NestGroupByRule;
-import edu.uci.ics.asterix.optimizer.rules.PushAggFuncIntoStandaloneAggregateRule;
-import edu.uci.ics.asterix.optimizer.rules.PushAggregateIntoGroupbyRule;
-import edu.uci.ics.asterix.optimizer.rules.PushFieldAccessRule;
-import edu.uci.ics.asterix.optimizer.rules.PushGroupByThroughProduct;
-import edu.uci.ics.asterix.optimizer.rules.PushProperJoinThroughProduct;
-import edu.uci.ics.asterix.optimizer.rules.PushSimilarityFunctionsBelowJoin;
-import edu.uci.ics.asterix.optimizer.rules.RemoveRedundantListifyRule;
-import edu.uci.ics.asterix.optimizer.rules.RemoveRedundantSelectRule;
-import edu.uci.ics.asterix.optimizer.rules.RemoveSortInFeedIngestionRule;
-import edu.uci.ics.asterix.optimizer.rules.RemoveUnusedOneToOneEquiJoinRule;
-import edu.uci.ics.asterix.optimizer.rules.ReplaceSinkOpWithCommitOpRule;
-import edu.uci.ics.asterix.optimizer.rules.SetAsterixPhysicalOperatorsRule;
-import edu.uci.ics.asterix.optimizer.rules.SetClosedRecordConstructorsRule;
-import edu.uci.ics.asterix.optimizer.rules.SimilarityCheckRule;
-import edu.uci.ics.asterix.optimizer.rules.SweepIllegalNonfunctionalFunctions;
-import edu.uci.ics.asterix.optimizer.rules.UnnestToDataScanRule;
-import edu.uci.ics.asterix.optimizer.rules.am.IntroduceJoinAccessMethodRule;
-import edu.uci.ics.asterix.optimizer.rules.am.IntroduceLSMComponentFilterRule;
-import edu.uci.ics.asterix.optimizer.rules.am.IntroduceSelectAccessMethodRule;
-import edu.uci.ics.asterix.optimizer.rules.temporal.TranslateIntervalExpressionRule;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.HeuristicOptimizer;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.BreakSelectIntoConjunctsRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.ComplexJoinInferenceRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.ComplexUnnestToProductRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.ConsolidateAssignsRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.ConsolidateSelectsRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.CopyLimitDownRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.EliminateGroupByEmptyKeyRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.EliminateSubplanRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.EliminateSubplanWithInputCardinalityOneRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.EnforceOrderByAfterSubplan;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.EnforceStructuralPropertiesRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.ExtractCommonExpressionsRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.ExtractCommonOperatorsRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.ExtractGbyExpressionsRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.FactorRedundantGroupAndDecorVarsRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.InferTypesRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.InlineAssignIntoAggregateRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.InlineSingleReferenceVariablesRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.InsertOuterJoinRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.InsertProjectBeforeUnionRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.IntroHashPartitionMergeExchange;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.IntroJoinInsideSubplanRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.IntroduceAggregateCombinerRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.IntroduceGroupByForSubplanRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.IntroduceProjectsRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.IsolateHyracksOperatorsRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.NestedSubplanToJoinRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.PullSelectOutOfEqJoin;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushAssignBelowUnionAllRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushGroupByIntoSortRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushMapOperatorDownThroughProductRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushNestedOrderByUnderPreSortedGroupByRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushProjectDownRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushSelectDownRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushSelectIntoJoinRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushSubplanIntoGroupByRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushSubplanWithAggregateDownThroughProductRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushUnnestDownThroughUnionRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.ReinferAllTypesRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.RemoveRedundantGroupByDecorVars;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.RemoveRedundantVariablesRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.RemoveUnusedAssignAndAggregateRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.SetAlgebricksPhysicalOperatorsRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.SetExecutionModeRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.SimpleUnnestToProductRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.SubplanOutOfGroupRule;
-
-public final class RuleCollections {
-
-    public final static List<IAlgebraicRewriteRule> buildInitialTranslationRuleCollection() {
-        List<IAlgebraicRewriteRule> typeInfer = new LinkedList<IAlgebraicRewriteRule>();
-        typeInfer.add(new TranslateIntervalExpressionRule());
-        return typeInfer;
-    }
-
-    public final static List<IAlgebraicRewriteRule> buildTypeInferenceRuleCollection() {
-        List<IAlgebraicRewriteRule> typeInfer = new LinkedList<IAlgebraicRewriteRule>();
-        typeInfer.add(new InlineUnnestFunctionRule());
-        typeInfer.add(new InferTypesRule());
-        typeInfer.add(new CheckFilterExpressionTypeRule());
-        return typeInfer;
-    }
-
-    public final static List<IAlgebraicRewriteRule> buildAutogenerateIDRuleCollection() {
-        List<IAlgebraicRewriteRule> autogen = new LinkedList<>();
-        autogen.add(new IntroduceAutogenerateIDRule());
-        return autogen;
-    }
-
-    public final static List<IAlgebraicRewriteRule> buildNormalizationRuleCollection() {
-        List<IAlgebraicRewriteRule> normalization = new LinkedList<IAlgebraicRewriteRule>();
-        normalization.add(new IntroduceUnnestForCollectionToSequenceRule());
-        normalization.add(new EliminateSubplanRule());
-        normalization.add(new EnforceOrderByAfterSubplan());
-        normalization.add(new PushAggFuncIntoStandaloneAggregateRule());
-        normalization.add(new BreakSelectIntoConjunctsRule());
-        normalization.add(new ExtractGbyExpressionsRule());
-        normalization.add(new ExtractDistinctByExpressionsRule());
-        normalization.add(new ExtractOrderExpressionsRule());
-        normalization.add(new AsterixMoveFreeVariableOperatorOutOfSubplanRule());
-
-        // IntroduceStaticTypeCastRule should go before
-        // IntroduceDynamicTypeCastRule to
-        // avoid unnecessary dynamic casting
-        normalization.add(new IntroduceStaticTypeCastForInsertRule());
-        normalization.add(new IntroduceDynamicTypeCastRule());
-        normalization.add(new IntroduceDynamicTypeCastForExternalFunctionRule());
-        normalization.add(new IntroduceEnforcedListTypeRule());
-        normalization.add(new ExtractCommonExpressionsRule());
-        normalization.add(new ConstantFoldingRule());
-        normalization.add(new RemoveRedundantSelectRule());
-        normalization.add(new UnnestToDataScanRule());
-        normalization.add(new IfElseToSwitchCaseFunctionRule());
-        normalization.add(new FuzzyEqRule());
-        normalization.add(new SimilarityCheckRule());
-        return normalization;
-    }
-
-    public final static List<IAlgebraicRewriteRule> buildCondPushDownAndJoinInferenceRuleCollection() {
-        List<IAlgebraicRewriteRule> condPushDownAndJoinInference = new LinkedList<IAlgebraicRewriteRule>();
-
-        condPushDownAndJoinInference.add(new PushSelectDownRule());
-        condPushDownAndJoinInference.add(new RemoveRedundantListifyRule());
-        condPushDownAndJoinInference.add(new CancelUnnestWithNestedListifyRule());
-        condPushDownAndJoinInference.add(new SimpleUnnestToProductRule());
-        condPushDownAndJoinInference.add(new ComplexUnnestToProductRule());
-        condPushDownAndJoinInference.add(new ComplexJoinInferenceRule());
-        condPushDownAndJoinInference.add(new DisjunctivePredicateToJoinRule());
-        condPushDownAndJoinInference.add(new PushSelectIntoJoinRule());
-        condPushDownAndJoinInference.add(new IntroJoinInsideSubplanRule());
-        condPushDownAndJoinInference.add(new PushMapOperatorDownThroughProductRule());
-        condPushDownAndJoinInference.add(new PushSubplanWithAggregateDownThroughProductRule());
-        condPushDownAndJoinInference.add(new IntroduceGroupByForSubplanRule());
-        condPushDownAndJoinInference.add(new SubplanOutOfGroupRule());
-        condPushDownAndJoinInference.add(new InsertOuterJoinRule());
-        condPushDownAndJoinInference.add(new AsterixExtractFunctionsFromJoinConditionRule());
-
-        condPushDownAndJoinInference.add(new RemoveRedundantVariablesRule());
-        condPushDownAndJoinInference.add(new AsterixInlineVariablesRule());
-        condPushDownAndJoinInference.add(new RemoveUnusedAssignAndAggregateRule());
-
-        condPushDownAndJoinInference.add(new FactorRedundantGroupAndDecorVarsRule());
-        condPushDownAndJoinInference.add(new PushAggregateIntoGroupbyRule());
-        condPushDownAndJoinInference.add(new EliminateSubplanRule());
-        condPushDownAndJoinInference.add(new PushProperJoinThroughProduct());
-        condPushDownAndJoinInference.add(new PushGroupByThroughProduct());
-        condPushDownAndJoinInference.add(new NestGroupByRule());
-        condPushDownAndJoinInference.add(new EliminateGroupByEmptyKeyRule());
-        condPushDownAndJoinInference.add(new PushSubplanIntoGroupByRule());
-        condPushDownAndJoinInference.add(new NestedSubplanToJoinRule());
-        condPushDownAndJoinInference.add(new EliminateSubplanWithInputCardinalityOneRule());
-
-        return condPushDownAndJoinInference;
-    }
-
-    public final static List<IAlgebraicRewriteRule> buildLoadFieldsRuleCollection() {
-        List<IAlgebraicRewriteRule> fieldLoads = new LinkedList<IAlgebraicRewriteRule>();
-        fieldLoads.add(new LoadRecordFieldsRule());
-        fieldLoads.add(new PushFieldAccessRule());
-        // fieldLoads.add(new ByNameToByHandleFieldAccessRule()); -- disabled
-        fieldLoads.add(new ByNameToByIndexFieldAccessRule());
-        fieldLoads.add(new RemoveRedundantVariablesRule());
-        fieldLoads.add(new AsterixInlineVariablesRule());
-        fieldLoads.add(new RemoveUnusedAssignAndAggregateRule());
-        fieldLoads.add(new ConstantFoldingRule());
-        fieldLoads.add(new FeedScanCollectionToUnnest());
-        fieldLoads.add(new ComplexJoinInferenceRule());
-        return fieldLoads;
-    }
-
-    public final static List<IAlgebraicRewriteRule> buildFuzzyJoinRuleCollection() {
-        List<IAlgebraicRewriteRule> fuzzy = new LinkedList<IAlgebraicRewriteRule>();
-        // fuzzy.add(new FuzzyJoinRule()); -- The non-indexed fuzzy join will be temporarily disabled. It should be enabled some time in the near future.
-        fuzzy.add(new InferTypesRule());
-        return fuzzy;
-    }
-
-    public final static List<IAlgebraicRewriteRule> buildConsolidationRuleCollection() {
-        List<IAlgebraicRewriteRule> consolidation = new LinkedList<IAlgebraicRewriteRule>();
-        consolidation.add(new ConsolidateSelectsRule());
-        consolidation.add(new ConsolidateAssignsRule());
-        consolidation.add(new InlineAssignIntoAggregateRule());
-        consolidation.add(new AsterixIntroduceGroupByCombinerRule());
-        consolidation.add(new IntroduceAggregateCombinerRule());
-        consolidation.add(new CountVarToCountOneRule());
-        consolidation.add(new RemoveUnusedAssignAndAggregateRule());
-        consolidation.add(new RemoveRedundantGroupByDecorVars());
-        consolidation.add(new NestedSubplanToJoinRule());
-        //unionRule => PushUnnestDownUnion => RemoveRedundantListifyRule cause these rules are correlated
-        consolidation.add(new IntroduceUnionRule());
-        consolidation.add(new PushUnnestDownThroughUnionRule());
-        consolidation.add(new RemoveRedundantListifyRule());
-
-        return consolidation;
-    }
-
-    public final static List<IAlgebraicRewriteRule> buildAccessMethodRuleCollection() {
-        List<IAlgebraicRewriteRule> accessMethod = new LinkedList<IAlgebraicRewriteRule>();
-        accessMethod.add(new IntroduceSelectAccessMethodRule());
-        accessMethod.add(new IntroduceJoinAccessMethodRule());
-        accessMethod.add(new IntroduceLSMComponentFilterRule());
-        accessMethod.add(new IntroduceSecondaryIndexInsertDeleteRule());
-        accessMethod.add(new RemoveUnusedOneToOneEquiJoinRule());
-        accessMethod.add(new PushSimilarityFunctionsBelowJoin());
-        accessMethod.add(new RemoveUnusedAssignAndAggregateRule());
-        return accessMethod;
-    }
-
-    public final static List<IAlgebraicRewriteRule> buildPlanCleanupRuleCollection() {
-        List<IAlgebraicRewriteRule> planCleanupRules = new LinkedList<IAlgebraicRewriteRule>();
-        planCleanupRules.add(new PushAssignBelowUnionAllRule());
-        planCleanupRules.add(new ExtractCommonExpressionsRule());
-        planCleanupRules.add(new RemoveRedundantVariablesRule());
-        planCleanupRules.add(new PushProjectDownRule());
-        planCleanupRules.add(new PushSelectDownRule());
-        planCleanupRules.add(new SetClosedRecordConstructorsRule());
-        planCleanupRules.add(new IntroduceDynamicTypeCastRule());
-        planCleanupRules.add(new IntroduceDynamicTypeCastForExternalFunctionRule());
-        planCleanupRules.add(new RemoveUnusedAssignAndAggregateRule());
-        return planCleanupRules;
-    }
-
-    public final static List<IAlgebraicRewriteRule> buildDataExchangeRuleCollection() {
-        List<IAlgebraicRewriteRule> dataExchange = new LinkedList<IAlgebraicRewriteRule>();
-        dataExchange.add(new SetExecutionModeRule());
-        return dataExchange;
-    }
-
-    public final static List<IAlgebraicRewriteRule> buildPhysicalRewritesAllLevelsRuleCollection() {
-        List<IAlgebraicRewriteRule> physicalRewritesAllLevels = new LinkedList<IAlgebraicRewriteRule>();
-        physicalRewritesAllLevels.add(new PullSelectOutOfEqJoin());
-        //Turned off the following rule for now not to change OptimizerTest results.
-        //physicalRewritesAllLevels.add(new IntroduceTransactionCommitByAssignOpRule());
-        physicalRewritesAllLevels.add(new ReplaceSinkOpWithCommitOpRule());
-        physicalRewritesAllLevels.add(new SetAlgebricksPhysicalOperatorsRule());
-        physicalRewritesAllLevels.add(new SetAsterixPhysicalOperatorsRule());
-        physicalRewritesAllLevels.add(new IntroduceInstantLockSearchCallbackRule());
-        physicalRewritesAllLevels.add(new AddEquivalenceClassForRecordConstructorRule());
-        physicalRewritesAllLevels.add(new EnforceStructuralPropertiesRule());
-        physicalRewritesAllLevels.add(new RemoveSortInFeedIngestionRule());
-        physicalRewritesAllLevels.add(new IntroHashPartitionMergeExchange());
-        physicalRewritesAllLevels.add(new PushProjectDownRule());
-        physicalRewritesAllLevels.add(new InsertProjectBeforeUnionRule());
-        physicalRewritesAllLevels.add(new IntroduceMaterializationForInsertWithSelfScanRule());
-        physicalRewritesAllLevels.add(new InlineSingleReferenceVariablesRule());
-        physicalRewritesAllLevels.add(new RemoveUnusedAssignAndAggregateRule());
-        physicalRewritesAllLevels.add(new ConsolidateAssignsRule());
-        // After adding projects, we may need need to set physical operators again.
-        physicalRewritesAllLevels.add(new SetAlgebricksPhysicalOperatorsRule());
-        return physicalRewritesAllLevels;
-    }
-
-    public final static List<IAlgebraicRewriteRule> buildPhysicalRewritesTopLevelRuleCollection() {
-        List<IAlgebraicRewriteRule> physicalRewritesTopLevel = new LinkedList<IAlgebraicRewriteRule>();
-        physicalRewritesTopLevel.add(new PushNestedOrderByUnderPreSortedGroupByRule());
-        physicalRewritesTopLevel.add(new CopyLimitDownRule());
-        physicalRewritesTopLevel.add(new IntroduceProjectsRule());
-        physicalRewritesTopLevel.add(new SetAlgebricksPhysicalOperatorsRule());
-        physicalRewritesTopLevel.add(new IntroduceRapidFrameFlushProjectAssignRule());
-        physicalRewritesTopLevel.add(new SetExecutionModeRule());
-        physicalRewritesTopLevel.add(new IntroduceRandomPartitioningFeedComputationRule());
-        return physicalRewritesTopLevel;
-    }
-
-    public final static List<IAlgebraicRewriteRule> prepareForJobGenRuleCollection() {
-        List<IAlgebraicRewriteRule> prepareForJobGenRewrites = new LinkedList<IAlgebraicRewriteRule>();
-        prepareForJobGenRewrites.add(new IsolateHyracksOperatorsRule(
-                HeuristicOptimizer.hyraxOperatorsBelowWhichJobGenIsDisabled));
-        prepareForJobGenRewrites.add(new ExtractCommonOperatorsRule());
-        // Re-infer all types, so that, e.g., the effect of not-is-null is
-        // propagated.
-        prepareForJobGenRewrites.add(new ReinferAllTypesRule());
-        prepareForJobGenRewrites.add(new PushGroupByIntoSortRule());
-        prepareForJobGenRewrites.add(new SetExecutionModeRule());
-        prepareForJobGenRewrites.add(new SweepIllegalNonfunctionalFunctions());
-        return prepareForJobGenRewrites;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/handle/FieldIndexAndTypeHandle.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/handle/FieldIndexAndTypeHandle.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/handle/FieldIndexAndTypeHandle.java
deleted file mode 100644
index 7eee1ff..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/handle/FieldIndexAndTypeHandle.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.handle;
-
-import edu.uci.ics.asterix.om.types.IAType;
-
-public class FieldIndexAndTypeHandle implements IHandle {
-
-    private int fieldIndex;
-    private IAType fieldType;
-
-    public FieldIndexAndTypeHandle(int fieldIndex, IAType fieldType) {
-        this.fieldIndex = fieldIndex;
-        this.fieldType = fieldType;
-    }
-
-    @Override
-    public HandleType getHandleType() {
-        return HandleType.FIELD_INDEX_AND_TYPE;
-    }
-
-    public int getFieldIndex() {
-        return fieldIndex;
-    }
-
-    public IAType getFieldType() {
-        return fieldType;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/handle/FieldNameHandle.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/handle/FieldNameHandle.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/handle/FieldNameHandle.java
deleted file mode 100644
index a1b8409..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/handle/FieldNameHandle.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.handle;
-
-import edu.uci.ics.asterix.om.types.IAType;
-
-public class FieldNameHandle implements IHandle {
-
-    private String fieldName;
-    private IAType fieldType;
-
-    public FieldNameHandle(String fieldName) {
-        this.fieldName = fieldName;
-    }
-
-    public IAType getFieldType() {
-        return fieldType;
-    }
-
-    public void setFieldType(IAType fieldType) {
-        this.fieldType = fieldType;
-    }
-
-    @Override
-    public HandleType getHandleType() {
-        return HandleType.FIELD_NAME;
-    }
-
-    public String getFieldName() {
-        return fieldName;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/handle/IHandle.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/handle/IHandle.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/handle/IHandle.java
deleted file mode 100644
index 80103b2..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/handle/IHandle.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.handle;
-
-/**
- * A handle is a way of accessing an ADM instance or a collection of ADM
- * instances nested within another ADM instance.
- *
- * @author Nicola
- */
-
-public interface IHandle {
-    public enum HandleType {
-        FIELD_INDEX_AND_TYPE,
-        FIELD_NAME
-    }
-
-    public HandleType getHandleType();
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AddEquivalenceClassForRecordConstructorRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AddEquivalenceClassForRecordConstructorRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AddEquivalenceClassForRecordConstructorRule.java
deleted file mode 100644
index c738720..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AddEquivalenceClassForRecordConstructorRule.java
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-import org.mortbay.util.SingletonList;
-
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.EquivalenceClass;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.util.PhysicalOptimizationsUtil;
-
-/**
- * Adds equivalent classes for record-constructors.
- * For example, for $x:=record-constructor("field1": $v, "field2": $t),
- * two equivalent classes will be added:
- * <$v, field-access-by-index($x, 0)>
- * <$t, field-access-by-index($x, 1)>
- *
- * @author yingyi
- */
-public class AddEquivalenceClassForRecordConstructorRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (op.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
-            return false;
-        }
-        // Computes FDs and equivalence classes for the operator.
-        PhysicalOptimizationsUtil.computeFDsAndEquivalenceClasses(op, context);
-        AssignOperator assignOp = (AssignOperator) op;
-        List<LogicalVariable> vars = assignOp.getVariables();
-        List<Mutable<ILogicalExpression>> exprRefs = assignOp.getExpressions();
-        return addEquivalenceClassesForRecordConstructor(vars, exprRefs, assignOp, context);
-    }
-
-    private boolean addEquivalenceClassesForRecordConstructor(List<LogicalVariable> vars,
-            List<Mutable<ILogicalExpression>> exprRefs, AssignOperator assignOp, IOptimizationContext context) {
-        boolean changed = false;
-        for (int exprIndex = 0; exprIndex < exprRefs.size(); ++exprIndex) {
-            ILogicalExpression expr = exprRefs.get(exprIndex).getValue();
-            if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                ScalarFunctionCallExpression funcExpr = (ScalarFunctionCallExpression) expr;
-                FunctionIdentifier fid = funcExpr.getFunctionIdentifier();
-                if (fid == AsterixBuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR
-                        || fid == AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR) {
-                    changed |= propagateEquivalenceClassesForRecordConstructor(vars.get(exprIndex), funcExpr, assignOp,
-                            context);
-                }
-            }
-        }
-        return changed;
-    }
-
-    @SuppressWarnings("unchecked")
-    private boolean propagateEquivalenceClassesForRecordConstructor(LogicalVariable recordVar,
-            ScalarFunctionCallExpression funcExpr, AssignOperator assignOp, IOptimizationContext context) {
-        List<Mutable<ILogicalExpression>> argRefs = funcExpr.getArguments();
-        boolean changed = false;
-        // Only odd position arguments are field value expressions.
-        for (int parameterIndex = 1; parameterIndex < argRefs.size(); parameterIndex += 2) {
-            ILogicalExpression fieldExpr = argRefs.get(parameterIndex).getValue();
-            // Adds equivalent classes if a field is from a variable reference.
-            if (fieldExpr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-                VariableReferenceExpression varExpr = (VariableReferenceExpression) fieldExpr;
-                LogicalVariable fieldVar = varExpr.getVariableReference();
-                Map<LogicalVariable, EquivalenceClass> ecs = context.getEquivalenceClassMap(assignOp);
-                if (ecs == null) {
-                    ecs = new HashMap<LogicalVariable, EquivalenceClass>();
-                    context.putEquivalenceClassMap(assignOp, ecs);
-                }
-                ILogicalExpression expr = new ScalarFunctionCallExpression(
-                        FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX),
-                        new MutableObject<ILogicalExpression>(new VariableReferenceExpression(recordVar)),
-                        new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
-                                new AInt32(parameterIndex / 2))))); // Every two parameters corresponds to a field.
-                EquivalenceClass equivClass = new EquivalenceClass(SingletonList.newSingletonList(fieldVar), fieldVar,
-                        SingletonList.newSingletonList(expr));
-                ecs.put(fieldVar, equivClass);
-                changed = true;
-            }
-        }
-        return changed;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AsterixExtractFunctionsFromJoinConditionRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AsterixExtractFunctionsFromJoinConditionRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AsterixExtractFunctionsFromJoinConditionRule.java
deleted file mode 100644
index 5457e55..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/AsterixExtractFunctionsFromJoinConditionRule.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.ExtractFunctionsFromJoinConditionRule;
-
-public class AsterixExtractFunctionsFromJoinConditionRule extends ExtractFunctionsFromJoinConditionRule {
-
-    @Override
-    protected boolean processArgumentsToFunction(FunctionIdentifier fi) {
-        return fi.equals(AsterixBuiltinFunctions.GET_ITEM);
-    }
-
-    @Override
-    protected boolean isComparisonFunction(FunctionIdentifier fi) {
-        return AsterixBuiltinFunctions.isSimilarityFunction(fi);
-    }
-
-}


[13/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryRTreeOperationsHelper.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryRTreeOperationsHelper.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryRTreeOperationsHelper.java
deleted file mode 100644
index d91e820..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryRTreeOperationsHelper.java
+++ /dev/null
@@ -1,408 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.file;
-
-import java.util.List;
-
-import edu.uci.ics.asterix.common.api.ILocalResourceMetadata;
-import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
-import edu.uci.ics.asterix.common.config.GlobalConfig;
-import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
-import edu.uci.ics.asterix.common.context.AsterixVirtualBufferCacheProvider;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.ioopcallbacks.LSMRTreeIOOperationCallbackFactory;
-import edu.uci.ics.asterix.dataflow.data.nontagged.valueproviders.AqlPrimitiveValueProviderFactory;
-import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
-import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
-import edu.uci.ics.asterix.formats.nontagged.AqlTypeTraitProvider;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.asterix.metadata.external.IndexingConstants;
-import edu.uci.ics.asterix.metadata.feeds.ExternalDataScanOperatorDescriptor;
-import edu.uci.ics.asterix.metadata.utils.ExternalDatasetsRegistry;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.asterix.transaction.management.opcallbacks.SecondaryIndexOperationTrackerProvider;
-import edu.uci.ics.asterix.transaction.management.resource.ExternalRTreeLocalResourceMetadata;
-import edu.uci.ics.asterix.transaction.management.resource.LSMRTreeLocalResourceMetadata;
-import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceFactoryProvider;
-import edu.uci.ics.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.ConnectorPolicyAssignmentPolicy;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.operators.base.SinkRuntimeFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexCompactOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.dataflow.ExternalRTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.dataflow.LSMRTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceFactoryProvider;
-import edu.uci.ics.hyracks.storage.common.file.LocalResource;
-
-@SuppressWarnings("rawtypes")
-public class SecondaryRTreeOperationsHelper extends SecondaryIndexOperationsHelper {
-
-    protected IPrimitiveValueProviderFactory[] valueProviderFactories;
-    protected int numNestedSecondaryKeyFields;
-    protected ATypeTag keyType;
-    protected int[] primaryKeyFields;
-    protected int[] rtreeFields;
-
-    protected SecondaryRTreeOperationsHelper(PhysicalOptimizationConfig physOptConf,
-            IAsterixPropertiesProvider propertiesProvider) {
-        super(physOptConf, propertiesProvider);
-    }
-
-    @Override
-    public JobSpecification buildCreationJobSpec() throws AsterixException, AlgebricksException {
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-
-        AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
-        boolean temp = dataset.getDatasetDetails().isTemp();
-        IIndexDataflowHelperFactory indexDataflowHelperFactory;
-        ILocalResourceFactoryProvider localResourceFactoryProvider;
-        if (dataset.getDatasetType() == DatasetType.INTERNAL) {
-            //prepare a LocalResourceMetadata which will be stored in NC's local resource repository
-            ILocalResourceMetadata localResourceMetadata = new LSMRTreeLocalResourceMetadata(secondaryTypeTraits,
-                    secondaryComparatorFactories, primaryComparatorFactories, valueProviderFactories,
-                    RTreePolicyType.RTREE, AqlMetadataProvider.proposeLinearizer(keyType,
-                            secondaryComparatorFactories.length), dataset.getDatasetId(), mergePolicyFactory,
-                    mergePolicyFactoryProperties, filterTypeTraits, filterCmpFactories, rtreeFields, primaryKeyFields,
-                    secondaryFilterFields);
-            localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(localResourceMetadata,
-                    LocalResource.LSMRTreeResource);
-            indexDataflowHelperFactory = new LSMRTreeDataflowHelperFactory(valueProviderFactories,
-                    RTreePolicyType.RTREE, primaryComparatorFactories, new AsterixVirtualBufferCacheProvider(
-                            dataset.getDatasetId()), mergePolicyFactory, mergePolicyFactoryProperties,
-                    new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMRTreeIOOperationCallbackFactory.INSTANCE,
-                    AqlMetadataProvider.proposeLinearizer(keyType, secondaryComparatorFactories.length),
-                    storageProperties.getBloomFilterFalsePositiveRate(), rtreeFields, primaryKeyFields,
-                    filterTypeTraits, filterCmpFactories, secondaryFilterFields, !temp);
-        } else {
-            // External dataset
-            // Prepare a LocalResourceMetadata which will be stored in NC's local resource repository
-            ILocalResourceMetadata localResourceMetadata = new ExternalRTreeLocalResourceMetadata(secondaryTypeTraits,
-                    secondaryComparatorFactories, ExternalIndexingOperations.getBuddyBtreeComparatorFactories(),
-                    valueProviderFactories, RTreePolicyType.RTREE, AqlMetadataProvider.proposeLinearizer(keyType,
-                            secondaryComparatorFactories.length), dataset.getDatasetId(), mergePolicyFactory,
-                    mergePolicyFactoryProperties, primaryKeyFields);
-            localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(localResourceMetadata,
-                    LocalResource.ExternalRTreeResource);
-
-            indexDataflowHelperFactory = new ExternalRTreeDataflowHelperFactory(valueProviderFactories,
-                    RTreePolicyType.RTREE, ExternalIndexingOperations.getBuddyBtreeComparatorFactories(),
-                    mergePolicyFactory, mergePolicyFactoryProperties, new SecondaryIndexOperationTrackerProvider(
-                            dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                    LSMRTreeIOOperationCallbackFactory.INSTANCE, AqlMetadataProvider.proposeLinearizer(keyType,
-                            secondaryComparatorFactories.length), storageProperties.getBloomFilterFalsePositiveRate(),
-                    new int[] { numNestedSecondaryKeyFields },
-                    ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true);
-        }
-
-        TreeIndexCreateOperatorDescriptor secondaryIndexCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                secondaryFileSplitProvider, secondaryTypeTraits, secondaryComparatorFactories, null,
-                indexDataflowHelperFactory, localResourceFactoryProvider, NoOpOperationCallbackFactory.INSTANCE);
-
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, secondaryIndexCreateOp,
-                secondaryPartitionConstraint);
-        spec.addRoot(secondaryIndexCreateOp);
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-        return spec;
-    }
-
-    @Override
-    protected int getNumSecondaryKeys() {
-        return numNestedSecondaryKeyFields;
-    }
-
-    @Override
-    protected void setSecondaryRecDescAndComparators(IndexType indexType, List<List<String>> secondaryKeyFields,
-            List<IAType> secondaryKeyTypes, int gramLength, AqlMetadataProvider metadata) throws AlgebricksException,
-            AsterixException {
-        int numSecondaryKeys = secondaryKeyFields.size();
-        if (numSecondaryKeys != 1) {
-            throw new AsterixException(
-                    "Cannot use "
-                            + numSecondaryKeys
-                            + " fields as a key for the R-tree index. There can be only one field as a key for the R-tree index.");
-        }
-        Pair<IAType, Boolean> spatialTypePair = Index.getNonNullableOpenFieldType(secondaryKeyTypes.get(0),
-                secondaryKeyFields.get(0), itemType);
-        IAType spatialType = spatialTypePair.first;
-        anySecondaryKeyIsNullable = spatialTypePair.second;
-        if (spatialType == null) {
-            throw new AsterixException("Could not find field " + secondaryKeyFields.get(0) + " in the schema.");
-        }
-        int numDimensions = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
-        numNestedSecondaryKeyFields = numDimensions * 2;
-        int recordColumn = dataset.getDatasetType() == DatasetType.INTERNAL ? numPrimaryKeys : 0;
-        secondaryFieldAccessEvalFactories = metadata.getFormat().createMBRFactory(
-                isEnforcingKeyTypes ? enforcedItemType : itemType, secondaryKeyFields.get(0), recordColumn,
-                numDimensions, filterFieldName);
-        secondaryComparatorFactories = new IBinaryComparatorFactory[numNestedSecondaryKeyFields];
-        valueProviderFactories = new IPrimitiveValueProviderFactory[numNestedSecondaryKeyFields];
-        ISerializerDeserializer[] secondaryRecFields = new ISerializerDeserializer[numPrimaryKeys
-                + numNestedSecondaryKeyFields + numFilterFields];
-        ISerializerDeserializer[] enforcedRecFields = new ISerializerDeserializer[1 + numPrimaryKeys + numFilterFields];
-        secondaryTypeTraits = new ITypeTraits[numNestedSecondaryKeyFields + numPrimaryKeys];
-        ITypeTraits[] enforcedTypeTraits = new ITypeTraits[1 + numPrimaryKeys];
-        IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(spatialType.getTypeTag());
-        keyType = nestedKeyType.getTypeTag();
-        for (int i = 0; i < numNestedSecondaryKeyFields; i++) {
-            ISerializerDeserializer keySerde = AqlSerializerDeserializerProvider.INSTANCE
-                    .getSerializerDeserializer(nestedKeyType);
-            secondaryRecFields[i] = keySerde;
-            secondaryComparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(
-                    nestedKeyType, true);
-            secondaryTypeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(nestedKeyType);
-            valueProviderFactories[i] = AqlPrimitiveValueProviderFactory.INSTANCE;
-
-        }
-        // Add serializers and comparators for primary index fields.
-        if (dataset.getDatasetType() == DatasetType.INTERNAL) {
-            for (int i = 0; i < numPrimaryKeys; i++) {
-                secondaryRecFields[numNestedSecondaryKeyFields + i] = primaryRecDesc.getFields()[i];
-                secondaryTypeTraits[numNestedSecondaryKeyFields + i] = primaryRecDesc.getTypeTraits()[i];
-                enforcedRecFields[i] = primaryRecDesc.getFields()[i];
-                enforcedTypeTraits[i] = primaryRecDesc.getTypeTraits()[i];
-            }
-        } else {
-            for (int i = 0; i < numPrimaryKeys; i++) {
-                secondaryRecFields[numNestedSecondaryKeyFields + i] = IndexingConstants.getSerializerDeserializer(i);
-                secondaryTypeTraits[numNestedSecondaryKeyFields + i] = IndexingConstants.getTypeTraits(i);
-                enforcedRecFields[i] = IndexingConstants.getSerializerDeserializer(i);
-                enforcedTypeTraits[i] = IndexingConstants.getTypeTraits(i);
-            }
-        }
-        enforcedRecFields[numPrimaryKeys] = AqlSerializerDeserializerProvider.INSTANCE
-                .getSerializerDeserializer(itemType);
-        enforcedRecDesc = new RecordDescriptor(enforcedRecFields, enforcedTypeTraits);
-        if (numFilterFields > 0) {
-            rtreeFields = new int[numNestedSecondaryKeyFields + numPrimaryKeys];
-            for (int i = 0; i < rtreeFields.length; i++) {
-                rtreeFields[i] = i;
-            }
-
-            Pair<IAType, Boolean> typePair = Index.getNonNullableKeyFieldType(filterFieldName, itemType);
-            IAType type = typePair.first;
-            ISerializerDeserializer serde = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(type);
-            secondaryRecFields[numPrimaryKeys + numNestedSecondaryKeyFields] = serde;
-        }
-        secondaryRecDesc = new RecordDescriptor(secondaryRecFields);
-        primaryKeyFields = new int[numPrimaryKeys];
-        for (int i = 0; i < primaryKeyFields.length; i++) {
-            primaryKeyFields[i] = i + numNestedSecondaryKeyFields;
-        }
-    }
-
-    @Override
-    public JobSpecification buildLoadingJobSpec() throws AsterixException, AlgebricksException {
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        boolean temp = dataset.getDatasetDetails().isTemp();
-        if (dataset.getDatasetType() == DatasetType.INTERNAL) {
-            // Create dummy key provider for feeding the primary index scan. 
-            AbstractOperatorDescriptor keyProviderOp = createDummyKeyProviderOp(spec);
-
-            // Create primary index scan op.
-            BTreeSearchOperatorDescriptor primaryScanOp = createPrimaryIndexScanOp(spec);
-
-            // Assign op.
-            AbstractOperatorDescriptor sourceOp = primaryScanOp;
-            if (isEnforcingKeyTypes) {
-                sourceOp = createCastOp(spec, primaryScanOp, numSecondaryKeys, dataset.getDatasetType());
-                spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, sourceOp, 0);
-            }
-            AlgebricksMetaOperatorDescriptor asterixAssignOp = createAssignOp(spec, sourceOp,
-                    numNestedSecondaryKeyFields);
-
-            // If any of the secondary fields are nullable, then add a select op that filters nulls.
-            AlgebricksMetaOperatorDescriptor selectOp = null;
-            if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
-                selectOp = createFilterNullsSelectOp(spec, numNestedSecondaryKeyFields);
-            }
-
-            // Sort by secondary keys.
-            ExternalSortOperatorDescriptor sortOp = createSortOp(spec,
-                    new IBinaryComparatorFactory[] { AqlMetadataProvider.proposeLinearizer(keyType,
-                            secondaryComparatorFactories.length) }, secondaryRecDesc);
-
-            AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
-            // Create secondary RTree bulk load op.
-            TreeIndexBulkLoadOperatorDescriptor secondaryBulkLoadOp = createTreeIndexBulkLoadOp(
-                    spec,
-                    numNestedSecondaryKeyFields,
-                    new LSMRTreeDataflowHelperFactory(valueProviderFactories, RTreePolicyType.RTREE,
-                            primaryComparatorFactories, new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
-                            mergePolicyFactory, mergePolicyFactoryProperties,
-                            new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                            LSMRTreeIOOperationCallbackFactory.INSTANCE, AqlMetadataProvider.proposeLinearizer(keyType,
-                                    secondaryComparatorFactories.length), storageProperties
-                                    .getBloomFilterFalsePositiveRate(), rtreeFields, primaryKeyFields,
-                            filterTypeTraits, filterCmpFactories, secondaryFilterFields, !temp),
-                    GlobalConfig.DEFAULT_TREE_FILL_FACTOR);
-            AlgebricksMetaOperatorDescriptor metaOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
-                    new IPushRuntimeFactory[] { new SinkRuntimeFactory() }, new RecordDescriptor[] {});
-            // Connect the operators.
-            spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryScanOp, 0);
-            spec.connect(new OneToOneConnectorDescriptor(spec), sourceOp, 0, asterixAssignOp, 0);
-            if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
-                spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, selectOp, 0);
-                spec.connect(new OneToOneConnectorDescriptor(spec), selectOp, 0, sortOp, 0);
-            } else {
-                spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, sortOp, 0);
-            }
-            spec.connect(new OneToOneConnectorDescriptor(spec), sortOp, 0, secondaryBulkLoadOp, 0);
-            spec.connect(new OneToOneConnectorDescriptor(spec), secondaryBulkLoadOp, 0, metaOp, 0);
-            spec.addRoot(metaOp);
-            spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-        } else {
-            // External dataset
-            /*
-             * In case of external data, this method is used to build loading jobs for both initial load on index creation
-             * and transaction load on dataset referesh
-             */
-            // Create external indexing scan operator
-            ExternalDataScanOperatorDescriptor primaryScanOp = createExternalIndexingOp(spec);
-            AbstractOperatorDescriptor sourceOp = primaryScanOp;
-            if (isEnforcingKeyTypes) {
-                sourceOp = createCastOp(spec, primaryScanOp, numSecondaryKeys, dataset.getDatasetType());
-                spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, sourceOp, 0);
-            }
-            // Assign op.
-            AlgebricksMetaOperatorDescriptor asterixAssignOp = createExternalAssignOp(spec, numNestedSecondaryKeyFields);
-
-            // If any of the secondary fields are nullable, then add a select op that filters nulls.
-            AlgebricksMetaOperatorDescriptor selectOp = null;
-            if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
-                selectOp = createFilterNullsSelectOp(spec, numSecondaryKeys);
-            }
-
-            // Sort by secondary keys.
-            ExternalSortOperatorDescriptor sortOp = createSortOp(spec,
-                    new IBinaryComparatorFactory[] { AqlMetadataProvider.proposeLinearizer(keyType,
-                            secondaryComparatorFactories.length) }, secondaryRecDesc);
-            AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
-
-            // Create the dataflow helper factory
-            ExternalRTreeDataflowHelperFactory dataflowHelperFactory = new ExternalRTreeDataflowHelperFactory(
-                    valueProviderFactories, RTreePolicyType.RTREE, primaryComparatorFactories, mergePolicyFactory,
-                    mergePolicyFactoryProperties, new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMRTreeIOOperationCallbackFactory.INSTANCE,
-                    AqlMetadataProvider.proposeLinearizer(keyType, secondaryComparatorFactories.length),
-                    storageProperties.getBloomFilterFalsePositiveRate(), new int[] { numNestedSecondaryKeyFields },
-                    ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true);
-            // Create secondary RTree bulk load op.
-            IOperatorDescriptor root;
-            AbstractTreeIndexOperatorDescriptor secondaryBulkLoadOp;
-            if (externalFiles != null) {
-                // Transaction load
-                secondaryBulkLoadOp = createExternalIndexBulkModifyOp(spec, numNestedSecondaryKeyFields,
-                        dataflowHelperFactory, GlobalConfig.DEFAULT_TREE_FILL_FACTOR);
-                root = secondaryBulkLoadOp;
-            } else {
-                // Initial load
-                secondaryBulkLoadOp = createTreeIndexBulkLoadOp(spec, numNestedSecondaryKeyFields,
-                        dataflowHelperFactory, GlobalConfig.DEFAULT_TREE_FILL_FACTOR);
-                AlgebricksMetaOperatorDescriptor metaOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
-                        new IPushRuntimeFactory[] { new SinkRuntimeFactory() },
-                        new RecordDescriptor[] { secondaryRecDesc });
-                spec.connect(new OneToOneConnectorDescriptor(spec), secondaryBulkLoadOp, 0, metaOp, 0);
-                root = metaOp;
-            }
-
-            spec.connect(new OneToOneConnectorDescriptor(spec), sourceOp, 0, asterixAssignOp, 0);
-            if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
-                spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, selectOp, 0);
-                spec.connect(new OneToOneConnectorDescriptor(spec), selectOp, 0, sortOp, 0);
-            } else {
-                spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, sortOp, 0);
-            }
-            spec.connect(new OneToOneConnectorDescriptor(spec), sortOp, 0, secondaryBulkLoadOp, 0);
-            spec.addRoot(root);
-            spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-        }
-        return spec;
-    }
-
-    @Override
-    public JobSpecification buildCompactJobSpec() throws AsterixException, AlgebricksException {
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-
-        AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
-        boolean temp = dataset.getDatasetDetails().isTemp();
-        LSMTreeIndexCompactOperatorDescriptor compactOp;
-        if (dataset.getDatasetType() == DatasetType.INTERNAL) {
-            compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec,
-                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, secondaryFileSplitProvider, secondaryTypeTraits,
-                    secondaryComparatorFactories, secondaryBloomFilterKeyFields, new LSMRTreeDataflowHelperFactory(
-                            valueProviderFactories, RTreePolicyType.RTREE, primaryComparatorFactories,
-                            new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()), mergePolicyFactory,
-                            mergePolicyFactoryProperties, new SecondaryIndexOperationTrackerProvider(
-                                    dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                            LSMRTreeIOOperationCallbackFactory.INSTANCE, AqlMetadataProvider.proposeLinearizer(keyType,
-                                    secondaryComparatorFactories.length),
-                            storageProperties.getBloomFilterFalsePositiveRate(), rtreeFields, primaryKeyFields,
-                            filterTypeTraits, filterCmpFactories, secondaryFilterFields, !temp),
-                    NoOpOperationCallbackFactory.INSTANCE);
-        } else {
-            // External dataset
-            compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec,
-                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, secondaryFileSplitProvider, secondaryTypeTraits,
-                    secondaryComparatorFactories, secondaryBloomFilterKeyFields,
-                    new ExternalRTreeDataflowHelperFactory(valueProviderFactories, RTreePolicyType.RTREE,
-                            primaryComparatorFactories, mergePolicyFactory, mergePolicyFactoryProperties,
-                            new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                            LSMRTreeIOOperationCallbackFactory.INSTANCE, AqlMetadataProvider.proposeLinearizer(keyType,
-                                    secondaryComparatorFactories.length), storageProperties
-                                    .getBloomFilterFalsePositiveRate(), new int[] { numNestedSecondaryKeyFields },
-                            ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true),
-                    NoOpOperationCallbackFactory.INSTANCE);
-        }
-
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp,
-                secondaryPartitionConstraint);
-        spec.addRoot(compactOp);
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-        return spec;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/AsterixGlobalRecoveryManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/AsterixGlobalRecoveryManager.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/AsterixGlobalRecoveryManager.java
deleted file mode 100644
index 8e633a9..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/AsterixGlobalRecoveryManager.java
+++ /dev/null
@@ -1,213 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.hyracks.bootstrap;
-
-import java.util.List;
-import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.common.api.IClusterEventsSubscriber;
-import edu.uci.ics.asterix.common.api.IClusterManagementWork;
-import edu.uci.ics.asterix.common.api.IClusterManagementWorkResponse;
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.common.config.DatasetConfig.ExternalDatasetTransactionState;
-import edu.uci.ics.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
-import edu.uci.ics.asterix.feeds.CentralFeedManager;
-import edu.uci.ics.asterix.file.ExternalIndexingOperations;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
-import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.Dataverse;
-import edu.uci.ics.asterix.metadata.entities.ExternalDatasetDetails;
-import edu.uci.ics.asterix.metadata.entities.ExternalFile;
-import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
-import edu.uci.ics.asterix.common.api.IClusterManagementWork.ClusterState;
-import edu.uci.ics.hyracks.api.client.HyracksConnection;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-
-public class AsterixGlobalRecoveryManager implements IClusterEventsSubscriber {
-
-    private static ClusterState state;
-    private static final Logger LOGGER = Logger.getLogger(AsterixGlobalRecoveryManager.class.getName());
-    private HyracksConnection hcc;
-    public static AsterixGlobalRecoveryManager INSTANCE;
-
-    public AsterixGlobalRecoveryManager(HyracksConnection hcc) throws Exception {
-        state = AsterixClusterProperties.INSTANCE.getState();
-        this.hcc = hcc;
-    }
-
-    @Override
-    public Set<IClusterManagementWork> notifyNodeFailure(Set<String> deadNodeIds) {
-        state = AsterixClusterProperties.INSTANCE.getState();
-        AsterixClusterProperties.INSTANCE.setGlobalRecoveryCompleted(false);
-        return null;
-    }
-
-    @Override
-    public Set<IClusterManagementWork> notifyNodeJoin(String joinedNodeId) {
-        // perform global recovery if state changed to active
-        final ClusterState newState = AsterixClusterProperties.INSTANCE.getState();
-        boolean needToRecover = !newState.equals(state) && (newState == ClusterState.ACTIVE);
-        if (needToRecover) {
-            Thread recoveryThread = new Thread(new Runnable() {
-                @Override
-                public void run() {
-                    LOGGER.info("Starting AsterixDB's Global Recovery");
-                    MetadataTransactionContext mdTxnCtx = null;
-                    try {
-                        Thread.sleep(4000);
-                        MetadataManager.INSTANCE.init();
-                        // Loop over datasets
-                        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-                        List<Dataverse> dataverses = MetadataManager.INSTANCE.getDataverses(mdTxnCtx);
-                        for (Dataverse dataverse : dataverses) {
-                            if (!dataverse.getDataverseName().equals(MetadataConstants.METADATA_DATAVERSE_NAME)) {
-                                AqlMetadataProvider metadataProvider = new AqlMetadataProvider(dataverse, CentralFeedManager.getInstance());
-                                List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx,
-                                        dataverse.getDataverseName());
-                                for (Dataset dataset : datasets) {
-                                    if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
-                                        // External dataset
-                                        // Get indexes
-                                        List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx,
-                                                dataset.getDataverseName(), dataset.getDatasetName());
-                                        if (indexes.size() > 0) {
-                                            // Get the state of the dataset
-                                            ExternalDatasetDetails dsd = (ExternalDatasetDetails) dataset
-                                                    .getDatasetDetails();
-                                            ExternalDatasetTransactionState datasetState = dsd.getState();
-                                            if (datasetState == ExternalDatasetTransactionState.BEGIN) {
-                                                List<ExternalFile> files = MetadataManager.INSTANCE
-                                                        .getDatasetExternalFiles(mdTxnCtx, dataset);
-                                                // if persumed abort, roll backward
-                                                // 1. delete all pending files
-                                                for (ExternalFile file : files) {
-                                                    if (file.getPendingOp() != ExternalFilePendingOp.PENDING_NO_OP) {
-                                                        MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
-                                                    }
-                                                }
-                                                // 2. clean artifacts in NCs
-                                                metadataProvider.setMetadataTxnContext(mdTxnCtx);
-                                                JobSpecification jobSpec = ExternalIndexingOperations.buildAbortOp(
-                                                        dataset, indexes, metadataProvider);
-                                                executeHyracksJob(jobSpec);
-                                                // 3. correct the dataset state
-                                                ((ExternalDatasetDetails) dataset.getDatasetDetails())
-                                                        .setState(ExternalDatasetTransactionState.COMMIT);
-                                                MetadataManager.INSTANCE.updateDataset(mdTxnCtx, dataset);
-                                                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                                                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-                                            } else if (datasetState == ExternalDatasetTransactionState.READY_TO_COMMIT) {
-                                                List<ExternalFile> files = MetadataManager.INSTANCE
-                                                        .getDatasetExternalFiles(mdTxnCtx, dataset);
-                                                // if ready to commit, roll forward
-                                                // 1. commit indexes in NCs
-                                                metadataProvider.setMetadataTxnContext(mdTxnCtx);
-                                                JobSpecification jobSpec = ExternalIndexingOperations.buildRecoverOp(
-                                                        dataset, indexes, metadataProvider);
-                                                executeHyracksJob(jobSpec);
-                                                // 2. add pending files in metadata
-                                                for (ExternalFile file : files) {
-                                                    if (file.getPendingOp() == ExternalFilePendingOp.PENDING_ADD_OP) {
-                                                        MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
-                                                        file.setPendingOp(ExternalFilePendingOp.PENDING_NO_OP);
-                                                        MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
-                                                    } else if (file.getPendingOp() == ExternalFilePendingOp.PENDING_DROP_OP) {
-                                                        // find original file
-                                                        for (ExternalFile originalFile : files) {
-                                                            if (originalFile.getFileName().equals(file.getFileName())) {
-                                                                MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
-                                                                        file);
-                                                                MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
-                                                                        originalFile);
-                                                                break;
-                                                            }
-                                                        }
-                                                    } else if (file.getPendingOp() == ExternalFilePendingOp.PENDING_APPEND_OP) {
-                                                        // find original file
-                                                        for (ExternalFile originalFile : files) {
-                                                            if (originalFile.getFileName().equals(file.getFileName())) {
-                                                                MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
-                                                                        file);
-                                                                MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
-                                                                        originalFile);
-                                                                originalFile.setSize(file.getSize());
-                                                                MetadataManager.INSTANCE.addExternalFile(mdTxnCtx,
-                                                                        originalFile);
-                                                            }
-                                                        }
-                                                    }
-                                                }
-                                                // 3. correct the dataset state
-                                                ((ExternalDatasetDetails) dataset.getDatasetDetails())
-                                                        .setState(ExternalDatasetTransactionState.COMMIT);
-                                                MetadataManager.INSTANCE.updateDataset(mdTxnCtx, dataset);
-                                                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                                                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-                                            }
-                                        }
-                                    }
-                                }
-                            }
-                        }
-                        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                    } catch (Exception e) {
-                        // This needs to be fixed <-- Needs to shutdown the system -->
-                        /*
-                         * Note: Throwing this illegal state exception will terminate this thread
-                         * and feeds listeners will not be notified.
-                         */
-                        LOGGER.severe("Global recovery was not completed successfully" + e);
-                        try {
-                            MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
-                        } catch (Exception e1) {
-                            if (LOGGER.isLoggable(Level.SEVERE)) {
-                                LOGGER.severe("Exception in aborting" + e.getMessage());
-                            }
-                            throw new IllegalStateException(e1);
-                        }
-                    }
-                    AsterixClusterProperties.INSTANCE.setGlobalRecoveryCompleted(true);
-                    LOGGER.info("Global Recovery Completed");
-                }
-            });
-            state = newState;
-            recoveryThread.start();
-        }
-        return null;
-    }
-
-    private void executeHyracksJob(JobSpecification spec) throws Exception {
-        spec.setMaxReattempts(0);
-        JobId jobId = hcc.startJob(spec);
-        hcc.waitForCompletion(jobId);
-    }
-
-    @Override
-    public void notifyRequestCompletion(IClusterManagementWorkResponse response) {
-        // Do nothing
-    }
-
-    @Override
-    public void notifyStateChange(ClusterState previousState, ClusterState newState) {
-        // Do nothing?
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/AsterixStateDumpHandler.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/AsterixStateDumpHandler.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/AsterixStateDumpHandler.java
deleted file mode 100644
index ac760bd..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/AsterixStateDumpHandler.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.hyracks.bootstrap;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-
-import edu.uci.ics.hyracks.api.application.IStateDumpHandler;
-import edu.uci.ics.hyracks.api.lifecycle.ILifeCycleComponentManager;
-
-public class AsterixStateDumpHandler implements IStateDumpHandler {
-    private final String nodeId;
-    private final Path dumpPath;
-    private final ILifeCycleComponentManager lccm;
-
-    public AsterixStateDumpHandler(String nodeId, String dumpPath, ILifeCycleComponentManager lccm) {
-        this.nodeId = nodeId;
-        this.dumpPath = Paths.get(dumpPath);
-        this.lccm = lccm;
-    }
-
-    @Override
-    public void dumpState(OutputStream os) throws IOException {
-        dumpPath.toFile().mkdirs();
-        File df = dumpPath.resolve(nodeId + "-" + System.currentTimeMillis() + ".dump").toFile();
-        try (FileOutputStream fos = new FileOutputStream(df)) {
-            lccm.dumpState(fos);
-        }
-        os.write(df.getAbsolutePath().getBytes("UTF-8"));
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
deleted file mode 100644
index 99d883c..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.hyracks.bootstrap;
-
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.servlet.ServletContextHandler;
-import org.eclipse.jetty.servlet.ServletHolder;
-import org.eclipse.jetty.util.component.AbstractLifeCycle;
-
-import edu.uci.ics.asterix.api.http.servlet.APIServlet;
-import edu.uci.ics.asterix.api.http.servlet.AQLAPIServlet;
-import edu.uci.ics.asterix.api.http.servlet.ConnectorAPIServlet;
-import edu.uci.ics.asterix.api.http.servlet.DDLAPIServlet;
-import edu.uci.ics.asterix.api.http.servlet.FeedServlet;
-import edu.uci.ics.asterix.api.http.servlet.QueryAPIServlet;
-import edu.uci.ics.asterix.api.http.servlet.QueryResultAPIServlet;
-import edu.uci.ics.asterix.api.http.servlet.QueryStatusAPIServlet;
-import edu.uci.ics.asterix.api.http.servlet.ShutdownAPIServlet;
-import edu.uci.ics.asterix.api.http.servlet.UpdateAPIServlet;
-import edu.uci.ics.asterix.common.api.AsterixThreadFactory;
-import edu.uci.ics.asterix.common.config.AsterixExternalProperties;
-import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
-import edu.uci.ics.asterix.common.feeds.api.ICentralFeedManager;
-import edu.uci.ics.asterix.feeds.CentralFeedManager;
-import edu.uci.ics.asterix.feeds.FeedLifecycleListener;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.metadata.api.IAsterixStateProxy;
-import edu.uci.ics.asterix.metadata.bootstrap.AsterixStateProxy;
-import edu.uci.ics.asterix.metadata.cluster.ClusterManager;
-import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
-import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
-import edu.uci.ics.hyracks.api.application.ICCApplicationEntryPoint;
-import edu.uci.ics.hyracks.api.client.HyracksConnection;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.lifecycle.LifeCycleComponentManager;
-
-public class CCApplicationEntryPoint implements ICCApplicationEntryPoint {
-    private static final Logger LOGGER = Logger.getLogger(CCApplicationEntryPoint.class.getName());
-
-    private static final String HYRACKS_CONNECTION_ATTR = "edu.uci.ics.asterix.HYRACKS_CONNECTION";
-
-    private Server webServer;
-    private Server jsonAPIServer;
-    private Server feedServer;
-    private ICentralFeedManager centralFeedManager;
-
-    private static IAsterixStateProxy proxy;
-    private ICCApplicationContext appCtx;
-
-    @Override
-    public void start(ICCApplicationContext ccAppCtx, String[] args) throws Exception {
-        this.appCtx = ccAppCtx;
-
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Starting Asterix cluster controller");
-        }
-
-        appCtx.setThreadFactory(new AsterixThreadFactory(new LifeCycleComponentManager()));
-        AsterixAppContextInfo.initialize(appCtx, getNewHyracksClientConnection());
-
-        proxy = AsterixStateProxy.registerRemoteObject();
-        appCtx.setDistributedState(proxy);
-
-        AsterixMetadataProperties metadataProperties = AsterixAppContextInfo.getInstance().getMetadataProperties();
-        MetadataManager.INSTANCE = new MetadataManager(proxy, metadataProperties);
-
-        AsterixAppContextInfo.getInstance().getCCApplicationContext()
-        .addJobLifecycleListener(FeedLifecycleListener.INSTANCE);
-
-        AsterixExternalProperties externalProperties = AsterixAppContextInfo.getInstance().getExternalProperties();
-        setupWebServer(externalProperties);
-        webServer.start();
-
-        setupJSONAPIServer(externalProperties);
-        jsonAPIServer.start();
-        ExternalLibraryBootstrap.setUpExternaLibraries(false);
-
-        setupFeedServer(externalProperties);
-        feedServer.start();
-        centralFeedManager = CentralFeedManager.getInstance(); 
-        centralFeedManager.start();
-
-        waitUntilServerStart(webServer);
-        waitUntilServerStart(jsonAPIServer);
-        waitUntilServerStart(feedServer);
-
-        AsterixGlobalRecoveryManager.INSTANCE = new AsterixGlobalRecoveryManager(
-                (HyracksConnection) getNewHyracksClientConnection());
-        ClusterManager.INSTANCE.registerSubscriber(AsterixGlobalRecoveryManager.INSTANCE);
-
-        ccAppCtx.addClusterLifecycleListener(ClusterLifecycleListener.INSTANCE);
-    }
-
-    private void waitUntilServerStart(AbstractLifeCycle webServer) throws Exception {
-        while (!webServer.isStarted()) {
-            if (webServer.isFailed()) {
-                throw new Exception("Server failed to start");
-            }
-            wait(1000);
-        }
-    }
-
-    @Override
-    public void stop() throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Stopping Asterix cluster controller");
-        }
-        AsterixStateProxy.unregisterRemoteObject();
-
-        webServer.stop();
-        jsonAPIServer.stop();
-        feedServer.stop();
-    }
-
-    private IHyracksClientConnection getNewHyracksClientConnection() throws Exception {
-        String strIP = appCtx.getCCContext().getClusterControllerInfo().getClientNetAddress();
-        int port = appCtx.getCCContext().getClusterControllerInfo().getClientNetPort();
-        return new HyracksConnection(strIP, port);
-    }
-
-    private void setupWebServer(AsterixExternalProperties externalProperties) throws Exception {
-
-        webServer = new Server(externalProperties.getWebInterfacePort());
-
-        ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
-        context.setContextPath("/");
-
-        IHyracksClientConnection hcc = getNewHyracksClientConnection();
-        context.setAttribute(HYRACKS_CONNECTION_ATTR, hcc);
-
-        webServer.setHandler(context);
-        context.addServlet(new ServletHolder(new APIServlet()), "/*");
-    }
-
-    private void setupJSONAPIServer(AsterixExternalProperties externalProperties) throws Exception {
-        jsonAPIServer = new Server(externalProperties.getAPIServerPort());
-
-        ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
-        context.setContextPath("/");
-
-        IHyracksClientConnection hcc = getNewHyracksClientConnection();
-        context.setAttribute(HYRACKS_CONNECTION_ATTR, hcc);
-
-        jsonAPIServer.setHandler(context);
-        context.addServlet(new ServletHolder(new QueryAPIServlet()), "/query");
-        context.addServlet(new ServletHolder(new QueryStatusAPIServlet()), "/query/status");
-        context.addServlet(new ServletHolder(new QueryResultAPIServlet()), "/query/result");
-        context.addServlet(new ServletHolder(new UpdateAPIServlet()), "/update");
-        context.addServlet(new ServletHolder(new DDLAPIServlet()), "/ddl");
-        context.addServlet(new ServletHolder(new AQLAPIServlet()), "/aql");
-        context.addServlet(new ServletHolder(new ConnectorAPIServlet()), "/connector");
-        context.addServlet(new ServletHolder(new ShutdownAPIServlet()), "/admin/shutdown");
-    }
-
-    private void setupFeedServer(AsterixExternalProperties externalProperties) throws Exception {
-        feedServer = new Server(externalProperties.getFeedServerPort());
-
-        ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
-        context.setContextPath("/");
-
-        IHyracksClientConnection hcc = getNewHyracksClientConnection();
-        context.setAttribute(HYRACKS_CONNECTION_ATTR, hcc);
-
-        feedServer.setHandler(context);
-        context.addServlet(new ServletHolder(new FeedServlet()), "/");
-   
-        // add paths here
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ClusterLifecycleListener.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
deleted file mode 100644
index c7821b2..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
+++ /dev/null
@@ -1,204 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.hyracks.bootstrap;
-
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.common.api.IClusterEventsSubscriber;
-import edu.uci.ics.asterix.common.api.IClusterManagementWork;
-import edu.uci.ics.asterix.common.api.IClusterManagementWorkResponse;
-import edu.uci.ics.asterix.common.api.IClusterManagementWorkResponse.Status;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.event.schema.cluster.Node;
-import edu.uci.ics.asterix.metadata.cluster.AddNodeWork;
-import edu.uci.ics.asterix.metadata.cluster.AddNodeWorkResponse;
-import edu.uci.ics.asterix.metadata.cluster.ClusterManager;
-import edu.uci.ics.asterix.metadata.cluster.RemoveNodeWork;
-import edu.uci.ics.asterix.metadata.cluster.RemoveNodeWorkResponse;
-import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
-import edu.uci.ics.hyracks.api.application.IClusterLifecycleListener;
-
-public class ClusterLifecycleListener implements IClusterLifecycleListener {
-
-    private static final Logger LOGGER = Logger.getLogger(ClusterLifecycleListener.class.getName());
-
-    private static final LinkedBlockingQueue<Set<IClusterManagementWork>> workRequestQueue = new LinkedBlockingQueue<Set<IClusterManagementWork>>();
-
-    private static ClusterWorkExecutor eventHandler = new ClusterWorkExecutor(workRequestQueue);
-
-    private static List<IClusterManagementWorkResponse> pendingWorkResponses = new ArrayList<IClusterManagementWorkResponse>();
-
-    public static ClusterLifecycleListener INSTANCE = new ClusterLifecycleListener();
-
-    private ClusterLifecycleListener() {
-        Thread t = new Thread(eventHandler);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Starting cluster event handler");
-        }
-        t.start();
-    }
-
-    public enum ClusterEventType {
-        NODE_JOIN,
-        NODE_FAILURE
-    }
-
-    @Override
-    public void notifyNodeJoin(String nodeId, Map<String, String> ncConfiguration) {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("NC: " + nodeId + " joined");
-        }
-        AsterixClusterProperties.INSTANCE.addNCConfiguration(nodeId, ncConfiguration);
-        Set<String> nodeAddition = new HashSet<String>();
-        nodeAddition.add(nodeId);
-        updateProgress(ClusterEventType.NODE_JOIN, nodeAddition);
-        Set<IClusterEventsSubscriber> subscribers = ClusterManager.INSTANCE.getRegisteredClusterEventSubscribers();
-        Set<IClusterManagementWork> work = new HashSet<IClusterManagementWork>();
-        for (IClusterEventsSubscriber sub : subscribers) {
-            Set<IClusterManagementWork> workRequest = sub.notifyNodeJoin(nodeId);
-            if (workRequest != null && !workRequest.isEmpty()) {
-                work.addAll(workRequest);
-            }
-        }
-        if (!work.isEmpty()) {
-            executeWorkSet(work);
-        }
-
-    }
-
-    public void notifyNodeFailure(Set<String> deadNodeIds) {
-        for (String deadNode : deadNodeIds) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("NC: " + deadNode + " left");
-            }
-            AsterixClusterProperties.INSTANCE.removeNCConfiguration(deadNode);
-        }
-        updateProgress(ClusterEventType.NODE_FAILURE, deadNodeIds);
-        Set<IClusterEventsSubscriber> subscribers = ClusterManager.INSTANCE.getRegisteredClusterEventSubscribers();
-        Set<IClusterManagementWork> work = new HashSet<IClusterManagementWork>();
-        for (IClusterEventsSubscriber sub : subscribers) {
-            Set<IClusterManagementWork> workRequest = sub.notifyNodeFailure(deadNodeIds);
-            if (workRequest != null && !workRequest.isEmpty()) {
-                work.addAll(workRequest);
-            }
-        }
-        if (!work.isEmpty()) {
-            executeWorkSet(work);
-        }
-    }
-
-    private void updateProgress(ClusterEventType eventType, Set<String> nodeIds) {
-        List<IClusterManagementWorkResponse> completedResponses = new ArrayList<IClusterManagementWorkResponse>();
-        boolean isComplete = false;
-        for (IClusterManagementWorkResponse resp : pendingWorkResponses) {
-            switch (eventType) {
-                case NODE_FAILURE:
-                    isComplete = ((RemoveNodeWorkResponse) resp).updateProgress(nodeIds);
-                    if (isComplete) {
-                        resp.setStatus(Status.SUCCESS);
-                        resp.getWork().getSourceSubscriber().notifyRequestCompletion(resp);
-                        completedResponses.add(resp);
-                    }
-                    break;
-
-                case NODE_JOIN:
-                    isComplete = ((AddNodeWorkResponse) resp).updateProgress(nodeIds.iterator().next());
-                    if (isComplete) {
-                        resp.setStatus(Status.SUCCESS);
-                        resp.getWork().getSourceSubscriber().notifyRequestCompletion(resp);
-                        completedResponses.add(resp);
-                    }
-                    break;
-            }
-        }
-        pendingWorkResponses.removeAll(completedResponses);
-    }
-
-    private void executeWorkSet(Set<IClusterManagementWork> workSet) {
-        int nodesToAdd = 0;
-        Set<String> nodesToRemove = new HashSet<String>();
-        Set<AddNodeWork> nodeAdditionRequests = new HashSet<AddNodeWork>();
-        Set<IClusterManagementWork> nodeRemovalRequests = new HashSet<IClusterManagementWork>();
-        for (IClusterManagementWork w : workSet) {
-            switch (w.getClusterManagementWorkType()) {
-                case ADD_NODE:
-                    if (nodesToAdd < ((AddNodeWork) w).getNumberOfNodesRequested()) {
-                        nodesToAdd = ((AddNodeWork) w).getNumberOfNodesRequested();
-                    }
-                    nodeAdditionRequests.add((AddNodeWork) w);
-                    break;
-                case REMOVE_NODE:
-                    nodesToRemove.addAll(((RemoveNodeWork) w).getNodesToBeRemoved());
-                    nodeRemovalRequests.add(w);
-                    RemoveNodeWorkResponse response = new RemoveNodeWorkResponse((RemoveNodeWork) w, Status.IN_PROGRESS);
-                    pendingWorkResponses.add(response);
-                    break;
-            }
-        }
-
-        List<String> addedNodes = new ArrayList<String>();
-        String asterixInstanceName = AsterixClusterProperties.INSTANCE.getCluster().getInstanceName();
-        for (int i = 0; i < nodesToAdd; i++) {
-            Node node = AsterixClusterProperties.INSTANCE.getAvailableSubstitutionNode();
-            if (node != null) {
-                try {
-                    ClusterManager.INSTANCE.addNode(node);
-                    addedNodes.add(asterixInstanceName + "_" + node.getId());
-                    if (LOGGER.isLoggable(Level.INFO)) {
-                        LOGGER.info("Added NC at:" + node.getId());
-                    }
-                } catch (AsterixException e) {
-                    if (LOGGER.isLoggable(Level.WARNING)) {
-                        LOGGER.warning("Unable to add NC at:" + node.getId());
-                    }
-                    e.printStackTrace();
-                }
-            } else {
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("Unable to add NC: no more available nodes");
-                }
-
-            }
-        }
-
-        for (AddNodeWork w : nodeAdditionRequests) {
-            int n = w.getNumberOfNodesRequested();
-            List<String> nodesToBeAddedForWork = new ArrayList<String>();
-            for (int i = 0; i < n && i < addedNodes.size(); i++) {
-                nodesToBeAddedForWork.add(addedNodes.get(i));
-            }
-            if (nodesToBeAddedForWork.isEmpty()) {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Unable to satisfy request by " + w);
-                }
-                AddNodeWorkResponse response = new AddNodeWorkResponse(w, nodesToBeAddedForWork);
-                response.setStatus(Status.FAILURE);
-                w.getSourceSubscriber().notifyRequestCompletion(response);
-
-            } else {
-                AddNodeWorkResponse response = new AddNodeWorkResponse(w, nodesToBeAddedForWork);
-                pendingWorkResponses.add(response);
-            }
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ClusterWorkExecutor.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ClusterWorkExecutor.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ClusterWorkExecutor.java
deleted file mode 100644
index a4faea7..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ClusterWorkExecutor.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.hyracks.bootstrap;
-
-import java.util.HashSet;
-import java.util.Set;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.common.api.IClusterManagementWork;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.event.schema.cluster.Node;
-import edu.uci.ics.asterix.metadata.cluster.AddNodeWork;
-import edu.uci.ics.asterix.metadata.cluster.ClusterManager;
-import edu.uci.ics.asterix.metadata.cluster.RemoveNodeWork;
-import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
-
-public class ClusterWorkExecutor implements Runnable {
-
-    private static final Logger LOGGER = Logger.getLogger(ClusterWorkExecutor.class.getName());
-
-    private final LinkedBlockingQueue<Set<IClusterManagementWork>> inbox;
-
-    public ClusterWorkExecutor(LinkedBlockingQueue<Set<IClusterManagementWork>> inbox) {
-        this.inbox = inbox;
-    }
-
-    @Override
-    public void run() {
-        while (true) {
-            try {
-                Set<IClusterManagementWork> workSet = inbox.take();
-                int nodesToAdd = 0;
-                Set<String> nodesToRemove = new HashSet<String>();
-                Set<IClusterManagementWork> nodeAdditionRequests = new HashSet<IClusterManagementWork>();
-                Set<IClusterManagementWork> nodeRemovalRequests = new HashSet<IClusterManagementWork>();
-                for (IClusterManagementWork w : workSet) {
-                    switch (w.getClusterManagementWorkType()) {
-                        case ADD_NODE:
-                            if (nodesToAdd < ((AddNodeWork) w).getNumberOfNodesRequested()) {
-                                nodesToAdd = ((AddNodeWork) w).getNumberOfNodesRequested();
-                            }
-                            nodeAdditionRequests.add(w);
-                            break;
-                        case REMOVE_NODE:
-                            nodesToRemove.addAll(((RemoveNodeWork) w).getNodesToBeRemoved());
-                            nodeRemovalRequests.add(w);
-                            break;
-                    }
-                }
-
-                Set<Node> addedNodes = new HashSet<Node>();
-                for (int i = 0; i < nodesToAdd; i++) {
-                    Node node = AsterixClusterProperties.INSTANCE.getAvailableSubstitutionNode();
-                    if (node != null) {
-                        try {
-                            ClusterManager.INSTANCE.addNode(node);
-                            addedNodes.add(node);
-                            if (LOGGER.isLoggable(Level.INFO)) {
-                                LOGGER.info("Added NC at:" + node.getId());
-                            }
-                        } catch (AsterixException e) {
-                            if (LOGGER.isLoggable(Level.WARNING)) {
-                                LOGGER.warning("Unable to add NC at:" + node.getId());
-                            }
-                            e.printStackTrace();
-                        }
-                    } else {
-                        if (LOGGER.isLoggable(Level.WARNING)) {
-                            LOGGER.warning("Unable to add NC: no more available nodes");
-                        }
-                    }
-                }
-
-            } catch (InterruptedException e) {
-                if (LOGGER.isLoggable(Level.SEVERE)) {
-                    LOGGER.severe("interruped" + e.getMessage());
-                }
-                throw new IllegalStateException(e);
-            } catch (Exception e) {
-                if (LOGGER.isLoggable(Level.SEVERE)) {
-                    LOGGER.severe("Unexpected exception in handling cluster event" + e.getMessage());
-                }
-            }
-
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ExternalLibraryBootstrap.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ExternalLibraryBootstrap.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ExternalLibraryBootstrap.java
deleted file mode 100755
index 31aca3d..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/ExternalLibraryBootstrap.java
+++ /dev/null
@@ -1,318 +0,0 @@
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.hyracks.bootstrap;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.rmi.RemoteException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import javax.xml.bind.JAXBContext;
-import javax.xml.bind.Unmarshaller;
-
-import edu.uci.ics.asterix.common.exceptions.ACIDException;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
-import edu.uci.ics.asterix.external.library.ExternalLibrary;
-import edu.uci.ics.asterix.external.library.LibraryAdapter;
-import edu.uci.ics.asterix.external.library.LibraryFunction;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
-import edu.uci.ics.asterix.metadata.api.IMetadataEntity;
-import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter;
-import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter.AdapterType;
-import edu.uci.ics.asterix.metadata.entities.Dataverse;
-import edu.uci.ics.asterix.metadata.feeds.AdapterIdentifier;
-import edu.uci.ics.asterix.metadata.functions.ExternalLibraryManager;
-import edu.uci.ics.asterix.runtime.formats.NonTaggedDataFormat;
-
-public class ExternalLibraryBootstrap {
-
-    private static Logger LOGGER = Logger.getLogger(ExternalLibraryBootstrap.class.getName());
-
-    public static void setUpExternaLibraries(boolean isMetadataNode) throws Exception {
-
-        Map<String, List<String>> uninstalledLibs = null;
-        if (isMetadataNode) {
-            uninstalledLibs = uninstallLibraries();
-        }
-
-        File installLibDir = getLibraryInstallDir();
-        if (installLibDir.exists()) {
-            for (String dataverse : installLibDir.list()) {
-                File dataverseDir = new File(installLibDir, dataverse);
-                String[] libraries = dataverseDir.list();
-                for (String library : libraries) {
-                    registerLibrary(dataverse, library, isMetadataNode, installLibDir);
-                    if (isMetadataNode) {
-                        File libraryDir = new File(installLibDir.getAbsolutePath() + File.separator + dataverse
-                                + File.separator + library);
-                        installLibraryIfNeeded(dataverse, libraryDir, uninstalledLibs);
-                    }
-                }
-            }
-        }
-    }
-
-    private static Map<String, List<String>> uninstallLibraries() throws Exception {
-        Map<String, List<String>> uninstalledLibs = new HashMap<String, List<String>>();
-        File uninstallLibDir = getLibraryUninstallDir();
-        String[] uninstallLibNames;
-        if (uninstallLibDir.exists()) {
-            uninstallLibNames = uninstallLibDir.list();
-            for (String uninstallLibName : uninstallLibNames) {
-                String[] components = uninstallLibName.split("\\.");
-                String dataverse = components[0];
-                String libName = components[1];
-                uninstallLibrary(dataverse, libName);
-                new File(uninstallLibDir, uninstallLibName).delete();
-                List<String> uinstalledLibsInDv = uninstalledLibs.get(dataverse);
-                if (uinstalledLibsInDv == null) {
-                    uinstalledLibsInDv = new ArrayList<String>();
-                    uninstalledLibs.put(dataverse, uinstalledLibsInDv);
-                }
-                uinstalledLibsInDv.add(libName);
-            }
-        }
-        return uninstalledLibs;
-    }
-
-    private static boolean uninstallLibrary(String dataverse, String libraryName) throws AsterixException,
-            RemoteException, ACIDException {
-        MetadataTransactionContext mdTxnCtx = null;
-        try {
-            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-            Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
-            if (dv == null) {
-                return false;
-            }
-
-            edu.uci.ics.asterix.metadata.entities.Library library = MetadataManager.INSTANCE.getLibrary(mdTxnCtx,
-                    dataverse, libraryName);
-            if (library == null) {
-                return false;
-            }
-
-            List<edu.uci.ics.asterix.metadata.entities.Function> functions = MetadataManager.INSTANCE
-                    .getDataverseFunctions(mdTxnCtx, dataverse);
-            for (edu.uci.ics.asterix.metadata.entities.Function function : functions) {
-                if (function.getName().startsWith(libraryName + "#")) {
-                    MetadataManager.INSTANCE.dropFunction(mdTxnCtx, new FunctionSignature(dataverse,
-                            function.getName(), function.getArity()));
-                }
-            }
-
-            List<edu.uci.ics.asterix.metadata.entities.DatasourceAdapter> adapters = MetadataManager.INSTANCE
-                    .getDataverseAdapters(mdTxnCtx, dataverse);
-            for (edu.uci.ics.asterix.metadata.entities.DatasourceAdapter adapter : adapters) {
-                if (adapter.getAdapterIdentifier().getName().startsWith(libraryName + "#")) {
-                    MetadataManager.INSTANCE.dropAdapter(mdTxnCtx, dataverse, adapter.getAdapterIdentifier().getName());
-                }
-            }
-
-            MetadataManager.INSTANCE.dropLibrary(mdTxnCtx, dataverse, libraryName);
-            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-        } catch (Exception e) {
-            MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
-            throw new AsterixException(e);
-        }
-        return true;
-    }
-
-    // Each element of a library is installed as part of a transaction. Any
-    // failure in installing an element does not effect installation of other
-    // libraries
-    private static void installLibraryIfNeeded(String dataverse, final File libraryDir,
-            Map<String, List<String>> uninstalledLibs) throws Exception {
-
-        String libraryName = libraryDir.getName().trim();
-        List<String> uninstalledLibsInDv = uninstalledLibs.get(dataverse);
-        boolean wasUninstalled = uninstalledLibsInDv != null && uninstalledLibsInDv.contains(libraryName);
-
-        MetadataTransactionContext mdTxnCtx = null;
-        MetadataManager.INSTANCE.acquireWriteLatch();
-        try {
-            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-            edu.uci.ics.asterix.metadata.entities.Library libraryInMetadata = MetadataManager.INSTANCE.getLibrary(
-                    mdTxnCtx, dataverse, libraryName);
-            if (libraryInMetadata != null && !wasUninstalled) {
-                return;
-            }
-
-            String[] libraryDescriptors = libraryDir.list(new FilenameFilter() {
-                @Override
-                public boolean accept(File dir, String name) {
-                    return name.endsWith(".xml");
-                }
-            });
-
-            ExternalLibrary library = getLibrary(new File(libraryDir + File.separator + libraryDescriptors[0]));
-
-            if (libraryDescriptors.length == 0) {
-                throw new Exception("No library descriptor defined");
-            } else if (libraryDescriptors.length > 1) {
-                throw new Exception("More than 1 library descriptors defined");
-            }
-
-            Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
-            if (dv == null) {
-                MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dataverse,
-                        NonTaggedDataFormat.NON_TAGGED_DATA_FORMAT, IMetadataEntity.PENDING_NO_OP));
-            }
-            if (library.getLibraryFunctions() != null) {
-                for (LibraryFunction function : library.getLibraryFunctions().getLibraryFunction()) {
-                    String[] fargs = function.getArguments().trim().split(",");
-                    List<String> args = new ArrayList<String>();
-                    for (String arg : fargs) {
-                        args.add(arg);
-                    }
-                    edu.uci.ics.asterix.metadata.entities.Function f = new edu.uci.ics.asterix.metadata.entities.Function(
-                            dataverse, libraryName + "#" + function.getName().trim(), args.size(), args, function
-                                    .getReturnType().trim(), function.getDefinition().trim(), library.getLanguage()
-                                    .trim(), function.getFunctionType().trim());
-                    MetadataManager.INSTANCE.addFunction(mdTxnCtx, f);
-                    if (LOGGER.isLoggable(Level.INFO)) {
-                        LOGGER.info("Installed function: " + libraryName + "#" + function.getName().trim());
-                    }
-                }
-            }
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Installed functions contain in library :" + libraryName);
-            }
-
-            if (library.getLibraryAdapters() != null) {
-                for (LibraryAdapter adapter : library.getLibraryAdapters().getLibraryAdapter()) {
-                    String adapterFactoryClass = adapter.getFactoryClass().trim();
-                    String adapterName = libraryName + "#" + adapter.getName().trim();
-                    AdapterIdentifier aid = new AdapterIdentifier(dataverse, adapterName);
-                    DatasourceAdapter dsa = new DatasourceAdapter(aid, adapterFactoryClass, AdapterType.EXTERNAL);
-                    MetadataManager.INSTANCE.addAdapter(mdTxnCtx, dsa);
-                    if (LOGGER.isLoggable(Level.INFO)) {
-                        LOGGER.info("Installed adapter: " + adapterName);
-                    }
-                }
-            }
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Installed adapters contain in library :" + libraryName);
-            }
-
-            MetadataManager.INSTANCE.addLibrary(mdTxnCtx, new edu.uci.ics.asterix.metadata.entities.Library(dataverse,
-                    libraryName));
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Added library " + libraryName + "to Metadata");
-            }
-
-            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-        } catch (Exception e) {
-            e.printStackTrace();
-            if (LOGGER.isLoggable(Level.SEVERE)) {
-                LOGGER.info("Exception in installing library " + libraryName);
-            }
-            MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
-        } finally {
-            MetadataManager.INSTANCE.releaseWriteLatch();
-        }
-    }
-
-    private static void registerLibrary(String dataverse, String libraryName, boolean isMetadataNode, File installLibDir)
-            throws Exception {
-        ClassLoader classLoader = getLibraryClassLoader(dataverse, libraryName);
-        ExternalLibraryManager.registerLibraryClassLoader(dataverse, libraryName, classLoader);
-    }
-
-    private static ExternalLibrary getLibrary(File libraryXMLPath) throws Exception {
-        JAXBContext configCtx = JAXBContext.newInstance(ExternalLibrary.class);
-        Unmarshaller unmarshaller = configCtx.createUnmarshaller();
-        ExternalLibrary library = (ExternalLibrary) unmarshaller.unmarshal(libraryXMLPath);
-        return library;
-    }
-
-    private static ClassLoader getLibraryClassLoader(String dataverse, String libraryName) throws Exception {
-
-        File installDir = getLibraryInstallDir();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Installing lirbary " + libraryName + " in dataverse " + dataverse + "."
-                    + " Install Directory: " + installDir.getAbsolutePath());
-        }
-
-        File libDir = new File(installDir.getAbsolutePath() + File.separator + dataverse + File.separator + libraryName);
-        FilenameFilter jarFileFilter = new FilenameFilter() {
-            public boolean accept(File dir, String name) {
-                return name.endsWith(".jar");
-            }
-        };
-
-        String[] jarsInLibDir = libDir.list(jarFileFilter);
-        if (jarsInLibDir.length > 1) {
-            throw new Exception("Incorrect library structure: found multiple library jars");
-        }
-        if (jarsInLibDir.length < 0) {
-            throw new Exception("Incorrect library structure: could not find library jar");
-        }
-
-        File libJar = new File(libDir, jarsInLibDir[0]);
-        File libDependencyDir = new File(libDir.getAbsolutePath() + File.separator + "lib");
-        int numDependencies = 1;
-        String[] libraryDependencies = null;
-        if (libDependencyDir.exists()) {
-            libraryDependencies = libDependencyDir.list(jarFileFilter);
-            numDependencies += libraryDependencies.length;
-        }
-
-        ClassLoader parentClassLoader = ExternalLibraryBootstrap.class.getClassLoader();
-        URL[] urls = new URL[numDependencies];
-        int count = 0;
-        urls[count++] = libJar.toURL();
-
-        if (libraryDependencies != null && libraryDependencies.length > 0) {
-            for (String dependency : libraryDependencies) {
-                File file = new File(libDependencyDir + File.separator + dependency);
-                urls[count++] = file.toURL();
-            }
-        }
-
-        if (LOGGER.isLoggable(Level.INFO)) {
-            StringBuilder logMesg = new StringBuilder("Classpath for library " + libraryName + "\n");
-            for (URL url : urls) {
-                logMesg.append(url.getFile() + "\n");
-            }
-            LOGGER.info(logMesg.toString());
-        }
-
-        ClassLoader classLoader = new URLClassLoader(urls, parentClassLoader);
-        return classLoader;
-    }
-
-    private static File getLibraryInstallDir() {
-        String workingDir = System.getProperty("user.dir");
-        return new File(workingDir + File.separator + "library");
-    }
-
-    private static File getLibraryUninstallDir() {
-        String workingDir = System.getProperty("user.dir");
-        return new File(workingDir + File.separator + "uninstall");
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/FeedBootstrap.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/FeedBootstrap.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/FeedBootstrap.java
deleted file mode 100644
index 7d22091..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/FeedBootstrap.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.hyracks.bootstrap;
-
-import edu.uci.ics.asterix.feeds.CentralFeedManager;
-import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.asterix.om.types.IAType;
-
-public class FeedBootstrap {
-
-    public final static String FEEDS_METADATA_DV = "feeds_metadata";
-    public final static String FAILED_TUPLE_DATASET = "failed_tuple";
-    public final static String FAILED_TUPLE_DATASET_TYPE = "FailedTupleType";
-    public final static String FAILED_TUPLE_DATASET_KEY = "id";
-
-    public static void setUpInitialArtifacts() throws Exception {
-
-        StringBuilder builder = new StringBuilder();
-        try {
-            builder.append("create dataverse " + FEEDS_METADATA_DV + ";" + "\n");
-            builder.append("use dataverse " + FEEDS_METADATA_DV + ";" + "\n");
-
-            builder.append("create type " + FAILED_TUPLE_DATASET_TYPE + " as open { ");
-
-            String[] fieldNames = new String[] { "id", "dataverseName", "feedName", "targetDataset", "tuple",
-                    "message", "timestamp" };
-            IAType[] fieldTypes = new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
-                    BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING };
-
-            for (int i = 0; i < fieldNames.length; i++) {
-                if (i > 0) {
-                    builder.append(",");
-                }
-                builder.append(fieldNames[i] + ":");
-                builder.append(fieldTypes[i].getTypeName());
-            }
-            builder.append("}" + ";" + "\n");
-
-            builder.append("create dataset " + FAILED_TUPLE_DATASET + " " + "(" + FAILED_TUPLE_DATASET_TYPE + ")" + " "
-                    + "primary key " + FAILED_TUPLE_DATASET_KEY + " on  " + MetadataConstants.METADATA_NODEGROUP_NAME
-                    + ";");
-
-            CentralFeedManager.AQLExecutor.executeAQL(builder.toString());
-        } catch (Exception e) {
-            e.printStackTrace();
-            System.out.println("Error: " + builder.toString());
-            throw e;
-        }
-    }
-
-}


[18/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
deleted file mode 100644
index 93917cc..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
+++ /dev/null
@@ -1,3069 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.translator;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.PrintWriter;
-import java.rmi.RemoteException;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Properties;
-import java.util.Random;
-import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.commons.lang3.StringUtils;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import edu.uci.ics.asterix.api.common.APIFramework;
-import edu.uci.ics.asterix.api.common.Job;
-import edu.uci.ics.asterix.api.common.SessionConfig;
-import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.CompactStatement;
-import edu.uci.ics.asterix.aql.expression.ConnectFeedStatement;
-import edu.uci.ics.asterix.aql.expression.CreateDataverseStatement;
-import edu.uci.ics.asterix.aql.expression.CreateFeedPolicyStatement;
-import edu.uci.ics.asterix.aql.expression.CreateFeedStatement;
-import edu.uci.ics.asterix.aql.expression.CreateFunctionStatement;
-import edu.uci.ics.asterix.aql.expression.CreateIndexStatement;
-import edu.uci.ics.asterix.aql.expression.CreatePrimaryFeedStatement;
-import edu.uci.ics.asterix.aql.expression.CreateSecondaryFeedStatement;
-import edu.uci.ics.asterix.aql.expression.DatasetDecl;
-import edu.uci.ics.asterix.aql.expression.DataverseDecl;
-import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
-import edu.uci.ics.asterix.aql.expression.DeleteStatement;
-import edu.uci.ics.asterix.aql.expression.DisconnectFeedStatement;
-import edu.uci.ics.asterix.aql.expression.DropStatement;
-import edu.uci.ics.asterix.aql.expression.ExternalDetailsDecl;
-import edu.uci.ics.asterix.aql.expression.FeedDropStatement;
-import edu.uci.ics.asterix.aql.expression.FeedPolicyDropStatement;
-import edu.uci.ics.asterix.aql.expression.FunctionDecl;
-import edu.uci.ics.asterix.aql.expression.FunctionDropStatement;
-import edu.uci.ics.asterix.aql.expression.IDatasetDetailsDecl;
-import edu.uci.ics.asterix.aql.expression.Identifier;
-import edu.uci.ics.asterix.aql.expression.IndexDropStatement;
-import edu.uci.ics.asterix.aql.expression.InsertStatement;
-import edu.uci.ics.asterix.aql.expression.InternalDetailsDecl;
-import edu.uci.ics.asterix.aql.expression.LoadStatement;
-import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
-import edu.uci.ics.asterix.aql.expression.NodegroupDecl;
-import edu.uci.ics.asterix.aql.expression.Query;
-import edu.uci.ics.asterix.aql.expression.RefreshExternalDatasetStatement;
-import edu.uci.ics.asterix.aql.expression.RunStatement;
-import edu.uci.ics.asterix.aql.expression.SetStatement;
-import edu.uci.ics.asterix.aql.expression.SubscribeFeedStatement;
-import edu.uci.ics.asterix.aql.expression.TypeDecl;
-import edu.uci.ics.asterix.aql.expression.TypeDropStatement;
-import edu.uci.ics.asterix.aql.expression.TypeExpression;
-import edu.uci.ics.asterix.aql.expression.WriteStatement;
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.common.config.AsterixCompilerProperties;
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.common.config.DatasetConfig.ExternalDatasetTransactionState;
-import edu.uci.ics.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
-import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
-import edu.uci.ics.asterix.common.config.GlobalConfig;
-import edu.uci.ics.asterix.common.exceptions.ACIDException;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.feeds.FeedActivity.FeedActivityDetails;
-import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
-import edu.uci.ics.asterix.common.feeds.FeedConnectionRequest;
-import edu.uci.ics.asterix.common.feeds.FeedId;
-import edu.uci.ics.asterix.common.feeds.FeedJointKey;
-import edu.uci.ics.asterix.common.feeds.FeedPolicyAccessor;
-import edu.uci.ics.asterix.common.feeds.api.IFeedJoint;
-import edu.uci.ics.asterix.common.feeds.api.IFeedJoint.FeedJointType;
-import edu.uci.ics.asterix.common.feeds.api.IFeedLifecycleEventSubscriber;
-import edu.uci.ics.asterix.common.feeds.api.IFeedLifecycleEventSubscriber.FeedLifecycleEvent;
-import edu.uci.ics.asterix.common.feeds.api.IFeedLifecycleListener.ConnectionLocation;
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
-import edu.uci.ics.asterix.feeds.CentralFeedManager;
-import edu.uci.ics.asterix.feeds.FeedJoint;
-import edu.uci.ics.asterix.feeds.FeedLifecycleListener;
-import edu.uci.ics.asterix.file.DatasetOperations;
-import edu.uci.ics.asterix.file.DataverseOperations;
-import edu.uci.ics.asterix.file.ExternalIndexingOperations;
-import edu.uci.ics.asterix.file.FeedOperations;
-import edu.uci.ics.asterix.file.IndexOperations;
-import edu.uci.ics.asterix.formats.nontagged.AqlTypeTraitProvider;
-import edu.uci.ics.asterix.metadata.IDatasetDetails;
-import edu.uci.ics.asterix.metadata.MetadataException;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
-import edu.uci.ics.asterix.metadata.api.IMetadataEntity;
-import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
-import edu.uci.ics.asterix.metadata.dataset.hints.DatasetHints;
-import edu.uci.ics.asterix.metadata.dataset.hints.DatasetHints.DatasetNodegroupCardinalityHint;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.entities.CompactionPolicy;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.Datatype;
-import edu.uci.ics.asterix.metadata.entities.Dataverse;
-import edu.uci.ics.asterix.metadata.entities.ExternalDatasetDetails;
-import edu.uci.ics.asterix.metadata.entities.ExternalFile;
-import edu.uci.ics.asterix.metadata.entities.Feed;
-import edu.uci.ics.asterix.metadata.entities.Feed.FeedType;
-import edu.uci.ics.asterix.metadata.entities.FeedPolicy;
-import edu.uci.ics.asterix.metadata.entities.Function;
-import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
-import edu.uci.ics.asterix.metadata.entities.NodeGroup;
-import edu.uci.ics.asterix.metadata.entities.PrimaryFeed;
-import edu.uci.ics.asterix.metadata.entities.SecondaryFeed;
-import edu.uci.ics.asterix.metadata.feeds.FeedLifecycleEventSubscriber;
-import edu.uci.ics.asterix.metadata.feeds.FeedUtil;
-import edu.uci.ics.asterix.metadata.feeds.IFeedAdapterFactory;
-import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
-import edu.uci.ics.asterix.metadata.utils.ExternalDatasetsRegistry;
-import edu.uci.ics.asterix.metadata.utils.MetadataLockManager;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.types.TypeSignature;
-import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
-import edu.uci.ics.asterix.optimizer.rules.IntroduceSecondaryIndexInsertDeleteRule;
-import edu.uci.ics.asterix.result.ResultReader;
-import edu.uci.ics.asterix.result.ResultUtils;
-import edu.uci.ics.asterix.runtime.job.listener.JobEventListenerFactory;
-import edu.uci.ics.asterix.runtime.operators.std.FlushDatasetOperatorDescriptor;
-import edu.uci.ics.asterix.transaction.management.service.transaction.DatasetIdFactory;
-import edu.uci.ics.asterix.transaction.management.service.transaction.JobIdFactory;
-import edu.uci.ics.asterix.translator.AbstractAqlTranslator;
-import edu.uci.ics.asterix.translator.CompiledStatements.CompiledConnectFeedStatement;
-import edu.uci.ics.asterix.translator.CompiledStatements.CompiledCreateIndexStatement;
-import edu.uci.ics.asterix.translator.CompiledStatements.CompiledDatasetDropStatement;
-import edu.uci.ics.asterix.translator.CompiledStatements.CompiledDeleteStatement;
-import edu.uci.ics.asterix.translator.CompiledStatements.CompiledIndexCompactStatement;
-import edu.uci.ics.asterix.translator.CompiledStatements.CompiledIndexDropStatement;
-import edu.uci.ics.asterix.translator.CompiledStatements.CompiledInsertStatement;
-import edu.uci.ics.asterix.translator.CompiledStatements.CompiledLoadFromFileStatement;
-import edu.uci.ics.asterix.translator.CompiledStatements.CompiledSubscribeFeedStatement;
-import edu.uci.ics.asterix.translator.CompiledStatements.ICompiledDmlStatement;
-import edu.uci.ics.asterix.translator.TypeTranslator;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression.FunctionKind;
-import edu.uci.ics.hyracks.algebricks.data.IAWriterFactory;
-import edu.uci.ics.hyracks.algebricks.data.IResultSerializerFactoryProvider;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
-import edu.uci.ics.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.serializer.ResultSerializerFactoryProvider;
-import edu.uci.ics.hyracks.algebricks.runtime.writers.PrinterBasedWriterFactory;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
-import edu.uci.ics.hyracks.api.dataset.ResultSetId;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
-
-/*
- * Provides functionality for executing a batch of AQL statements (queries included)
- * sequentially.
- */
-public class AqlTranslator extends AbstractAqlTranslator {
-
-    private static Logger LOGGER = Logger.getLogger(AqlTranslator.class.getName());
-
-    private enum ProgressState {
-        NO_PROGRESS,
-        ADDED_PENDINGOP_RECORD_TO_METADATA
-    }
-
-    public static enum ResultDelivery {
-        SYNC,
-        ASYNC,
-        ASYNC_DEFERRED
-    }
-
-    public static final boolean IS_DEBUG_MODE = false;//true
-    private final List<Statement> aqlStatements;
-    private final SessionConfig sessionConfig;
-    private Dataverse activeDefaultDataverse;
-    private final List<FunctionDecl> declaredFunctions;
-
-    public AqlTranslator(List<Statement> aqlStatements, SessionConfig conf) throws MetadataException, AsterixException {
-        this.aqlStatements = aqlStatements;
-        this.sessionConfig = conf;
-        declaredFunctions = getDeclaredFunctions(aqlStatements);
-    }
-
-    private List<FunctionDecl> getDeclaredFunctions(List<Statement> statements) {
-        List<FunctionDecl> functionDecls = new ArrayList<FunctionDecl>();
-        for (Statement st : statements) {
-            if (st.getKind().equals(Statement.Kind.FUNCTION_DECL)) {
-                functionDecls.add((FunctionDecl) st);
-            }
-        }
-        return functionDecls;
-    }
-
-    /**
-     * Compiles and submits for execution a list of AQL statements.
-     * 
-     * @param hcc
-     *            A Hyracks client connection that is used to submit a jobspec to Hyracks.
-     * @param hdc
-     *            A Hyracks dataset client object that is used to read the results.
-     * @param resultDelivery
-     *            True if the results should be read asynchronously or false if we should wait for results to be read.
-     * @return A List<QueryResult> containing a QueryResult instance corresponding to each submitted query.
-     * @throws Exception
-     */
-    public void compileAndExecute(IHyracksClientConnection hcc, IHyracksDataset hdc, ResultDelivery resultDelivery)
-            throws Exception {
-        int resultSetIdCounter = 0;
-        FileSplit outputFile = null;
-        IAWriterFactory writerFactory = PrinterBasedWriterFactory.INSTANCE;
-        IResultSerializerFactoryProvider resultSerializerFactoryProvider = ResultSerializerFactoryProvider.INSTANCE;
-        Map<String, String> config = new HashMap<String, String>();
-
-        for (Statement stmt : aqlStatements) {
-            validateOperation(activeDefaultDataverse, stmt);
-            AqlMetadataProvider metadataProvider = new AqlMetadataProvider(activeDefaultDataverse,
-                    CentralFeedManager.getInstance());
-            metadataProvider.setWriterFactory(writerFactory);
-            metadataProvider.setResultSerializerFactoryProvider(resultSerializerFactoryProvider);
-            metadataProvider.setOutputFile(outputFile);
-            metadataProvider.setConfig(config);
-            switch (stmt.getKind()) {
-                case SET: {
-                    handleSetStatement(metadataProvider, stmt, config);
-                    break;
-                }
-                case DATAVERSE_DECL: {
-                    activeDefaultDataverse = handleUseDataverseStatement(metadataProvider, stmt);
-                    break;
-                }
-                case CREATE_DATAVERSE: {
-                    handleCreateDataverseStatement(metadataProvider, stmt);
-                    break;
-                }
-                case DATASET_DECL: {
-                    handleCreateDatasetStatement(metadataProvider, stmt, hcc);
-                    break;
-                }
-                case CREATE_INDEX: {
-                    handleCreateIndexStatement(metadataProvider, stmt, hcc);
-                    break;
-                }
-                case TYPE_DECL: {
-                    handleCreateTypeStatement(metadataProvider, stmt);
-                    break;
-                }
-                case NODEGROUP_DECL: {
-                    handleCreateNodeGroupStatement(metadataProvider, stmt);
-                    break;
-                }
-                case DATAVERSE_DROP: {
-                    handleDataverseDropStatement(metadataProvider, stmt, hcc);
-                    break;
-                }
-                case DATASET_DROP: {
-                    handleDatasetDropStatement(metadataProvider, stmt, hcc);
-                    break;
-                }
-                case INDEX_DROP: {
-                    handleIndexDropStatement(metadataProvider, stmt, hcc);
-                    break;
-                }
-                case TYPE_DROP: {
-                    handleTypeDropStatement(metadataProvider, stmt);
-                    break;
-                }
-                case NODEGROUP_DROP: {
-                    handleNodegroupDropStatement(metadataProvider, stmt);
-                    break;
-                }
-
-                case CREATE_FUNCTION: {
-                    handleCreateFunctionStatement(metadataProvider, stmt);
-                    break;
-                }
-
-                case FUNCTION_DROP: {
-                    handleFunctionDropStatement(metadataProvider, stmt);
-                    break;
-                }
-
-                case LOAD: {
-                    handleLoadStatement(metadataProvider, stmt, hcc);
-                    break;
-                }
-                case INSERT: {
-                    handleInsertStatement(metadataProvider, stmt, hcc);
-                    break;
-                }
-                case DELETE: {
-                    handleDeleteStatement(metadataProvider, stmt, hcc);
-                    break;
-                }
-
-                case CREATE_PRIMARY_FEED:
-                case CREATE_SECONDARY_FEED: {
-                    handleCreateFeedStatement(metadataProvider, stmt, hcc);
-                    break;
-                }
-
-                case DROP_FEED: {
-                    handleDropFeedStatement(metadataProvider, stmt, hcc);
-                    break;
-                }
-
-                case DROP_FEED_POLICY: {
-                    handleDropFeedPolicyStatement(metadataProvider, stmt, hcc);
-                    break;
-                }
-
-                case CONNECT_FEED: {
-                    handleConnectFeedStatement(metadataProvider, stmt, hcc);
-                    break;
-                }
-
-                case DISCONNECT_FEED: {
-                    handleDisconnectFeedStatement(metadataProvider, stmt, hcc);
-                    break;
-                }
-
-                case SUBSCRIBE_FEED: {
-                    handleSubscribeFeedStatement(metadataProvider, stmt, hcc);
-                    break;
-                }
-
-                case CREATE_FEED_POLICY: {
-                    handleCreateFeedPolicyStatement(metadataProvider, stmt, hcc);
-                    break;
-                }
-
-                case QUERY: {
-                    metadataProvider.setResultSetId(new ResultSetId(resultSetIdCounter++));
-                    metadataProvider.setResultAsyncMode(resultDelivery == ResultDelivery.ASYNC
-                            || resultDelivery == ResultDelivery.ASYNC_DEFERRED);
-                    handleQuery(metadataProvider, (Query) stmt, hcc, hdc, resultDelivery);
-                    break;
-                }
-
-                case COMPACT: {
-                    handleCompactStatement(metadataProvider, stmt, hcc);
-                    break;
-                }
-
-                case EXTERNAL_DATASET_REFRESH: {
-                    handleExternalDatasetRefreshStatement(metadataProvider, stmt, hcc);
-                    break;
-                }
-
-                case WRITE: {
-                    Pair<IAWriterFactory, FileSplit> result = handleWriteStatement(metadataProvider, stmt);
-                    if (result.first != null) {
-                        writerFactory = result.first;
-                    }
-                    outputFile = result.second;
-                    break;
-                }
-
-                case RUN: {
-                    handleRunStatement(metadataProvider, stmt, hcc);
-                    break;
-                }
-            }
-        }
-    }
-
-    private void handleSetStatement(AqlMetadataProvider metadataProvider, Statement stmt, Map<String, String> config)
-            throws RemoteException, ACIDException {
-        SetStatement ss = (SetStatement) stmt;
-        String pname = ss.getPropName();
-        String pvalue = ss.getPropValue();
-        config.put(pname, pvalue);
-    }
-
-    private Pair<IAWriterFactory, FileSplit> handleWriteStatement(AqlMetadataProvider metadataProvider, Statement stmt)
-            throws InstantiationException, IllegalAccessException, ClassNotFoundException {
-        WriteStatement ws = (WriteStatement) stmt;
-        File f = new File(ws.getFileName());
-        FileSplit outputFile = new FileSplit(ws.getNcName().getValue(), new FileReference(f));
-        IAWriterFactory writerFactory = null;
-        if (ws.getWriterClassName() != null) {
-            writerFactory = (IAWriterFactory) Class.forName(ws.getWriterClassName()).newInstance();
-        }
-        return new Pair<IAWriterFactory, FileSplit>(writerFactory, outputFile);
-    }
-
-    private Dataverse handleUseDataverseStatement(AqlMetadataProvider metadataProvider, Statement stmt)
-            throws Exception {
-        DataverseDecl dvd = (DataverseDecl) stmt;
-        String dvName = dvd.getDataverseName().getValue();
-        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-        metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        MetadataLockManager.INSTANCE.acquireDataverseReadLock(dvName);
-        try {
-            Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName);
-            if (dv == null) {
-                throw new MetadataException("Unknown dataverse " + dvName);
-            }
-            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-            return dv;
-        } catch (Exception e) {
-            abort(e, e, mdTxnCtx);
-            throw new MetadataException(e);
-        } finally {
-            MetadataLockManager.INSTANCE.releaseDataverseReadLock(dvName);
-        }
-    }
-
-    private void handleCreateDataverseStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
-
-        CreateDataverseStatement stmtCreateDataverse = (CreateDataverseStatement) stmt;
-        String dvName = stmtCreateDataverse.getDataverseName().getValue();
-        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-        metadataProvider.setMetadataTxnContext(mdTxnCtx);
-
-        MetadataLockManager.INSTANCE.acquireDataverseReadLock(dvName);
-        try {
-            Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName);
-            if (dv != null) {
-                if (stmtCreateDataverse.getIfNotExists()) {
-                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                    return;
-                } else {
-                    throw new AlgebricksException("A dataverse with this name " + dvName + " already exists.");
-                }
-            }
-            MetadataManager.INSTANCE.addDataverse(metadataProvider.getMetadataTxnContext(), new Dataverse(dvName,
-                    stmtCreateDataverse.getFormat(), IMetadataEntity.PENDING_NO_OP));
-            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-        } catch (Exception e) {
-            abort(e, e, mdTxnCtx);
-            throw e;
-        } finally {
-            MetadataLockManager.INSTANCE.releaseDataverseReadLock(dvName);
-        }
-    }
-
-    private void validateCompactionPolicy(String compactionPolicy, Map<String, String> compactionPolicyProperties,
-            MetadataTransactionContext mdTxnCtx, boolean isExternalDataset) throws AsterixException, Exception {
-        CompactionPolicy compactionPolicyEntity = MetadataManager.INSTANCE.getCompactionPolicy(mdTxnCtx,
-                MetadataConstants.METADATA_DATAVERSE_NAME, compactionPolicy);
-        if (compactionPolicyEntity == null) {
-            throw new AsterixException("Unknown compaction policy: " + compactionPolicy);
-        }
-        String compactionPolicyFactoryClassName = compactionPolicyEntity.getClassName();
-        ILSMMergePolicyFactory mergePolicyFactory = (ILSMMergePolicyFactory) Class.forName(
-                compactionPolicyFactoryClassName).newInstance();
-        if (isExternalDataset && mergePolicyFactory.getName().compareTo("correlated-prefix") == 0) {
-            throw new AsterixException("The correlated-prefix merge policy cannot be used with external dataset.");
-        }
-        if (compactionPolicyProperties == null) {
-            if (mergePolicyFactory.getName().compareTo("no-merge") != 0) {
-                throw new AsterixException("Compaction policy properties are missing.");
-            }
-        } else {
-            for (Map.Entry<String, String> entry : compactionPolicyProperties.entrySet()) {
-                if (!mergePolicyFactory.getPropertiesNames().contains(entry.getKey())) {
-                    throw new AsterixException("Invalid compaction policy property: " + entry.getKey());
-                }
-            }
-            for (String p : mergePolicyFactory.getPropertiesNames()) {
-                if (!compactionPolicyProperties.containsKey(p)) {
-                    throw new AsterixException("Missing compaction policy property: " + p);
-                }
-            }
-        }
-    }
-
-    private void handleCreateDatasetStatement(AqlMetadataProvider metadataProvider, Statement stmt,
-            IHyracksClientConnection hcc) throws AsterixException, Exception {
-
-        ProgressState progress = ProgressState.NO_PROGRESS;
-        DatasetDecl dd = (DatasetDecl) stmt;
-        String dataverseName = getActiveDataverse(dd.getDataverse());
-        String datasetName = dd.getName().getValue();
-        DatasetType dsType = dd.getDatasetType();
-        String itemTypeName = dd.getItemTypeName().getValue();
-        Identifier ngNameId = dd.getNodegroupName();
-        String nodegroupName = getNodeGroupName(ngNameId, dd, dataverseName);
-        String compactionPolicy = dd.getCompactionPolicy();
-        Map<String, String> compactionPolicyProperties = dd.getCompactionPolicyProperties();
-        boolean defaultCompactionPolicy = (compactionPolicy == null);
-        boolean temp = dd.getDatasetDetailsDecl().isTemp();
-
-        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-        boolean bActiveTxn = true;
-        metadataProvider.setMetadataTxnContext(mdTxnCtx);
-
-        MetadataLockManager.INSTANCE.createDatasetBegin(dataverseName, dataverseName + "." + itemTypeName,
-                nodegroupName, compactionPolicy, dataverseName + "." + datasetName, defaultCompactionPolicy);
-        Dataset dataset = null;
-        try {
-
-            IDatasetDetails datasetDetails = null;
-            Dataset ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
-                    datasetName);
-            if (ds != null) {
-                if (dd.getIfNotExists()) {
-                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                    return;
-                } else {
-                    throw new AlgebricksException("A dataset with this name " + datasetName + " already exists.");
-                }
-            }
-            Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), dataverseName,
-                    itemTypeName);
-            if (dt == null) {
-                throw new AlgebricksException(": type " + itemTypeName + " could not be found.");
-            }
-            String ngName = ngNameId != null ? ngNameId.getValue() : configureNodegroupForDataset(dd, dataverseName,
-                    mdTxnCtx);
-
-            if (compactionPolicy == null) {
-                compactionPolicy = GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME;
-                compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
-            } else {
-                validateCompactionPolicy(compactionPolicy, compactionPolicyProperties, mdTxnCtx, false);
-            }
-            switch (dd.getDatasetType()) {
-                case INTERNAL: {
-                    IAType itemType = dt.getDatatype();
-                    if (itemType.getTypeTag() != ATypeTag.RECORD) {
-                        throw new AlgebricksException("Can only partition ARecord's.");
-                    }
-                    List<List<String>> partitioningExprs = ((InternalDetailsDecl) dd.getDatasetDetailsDecl())
-                            .getPartitioningExprs();
-                    boolean autogenerated = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).isAutogenerated();
-                    ARecordType aRecordType = (ARecordType) itemType;
-                    List<IAType> partitioningTypes = aRecordType.validatePartitioningExpressions(partitioningExprs,
-                            autogenerated);
-
-                    List<String> filterField = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getFilterField();
-                    if (filterField != null) {
-                        aRecordType.validateFilterField(filterField);
-                    }
-                    if (compactionPolicy == null) {
-                        if (filterField != null) {
-                            // If the dataset has a filter and the user didn't specify a merge policy, then we will pick the
-                            // correlated-prefix as the default merge policy.
-                            compactionPolicy = GlobalConfig.DEFAULT_FILTERED_DATASET_COMPACTION_POLICY_NAME;
-                            compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
-                        }
-                    }
-                    datasetDetails = new InternalDatasetDetails(InternalDatasetDetails.FileStructure.BTREE,
-                            InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs, partitioningExprs,
-                            partitioningTypes, autogenerated, filterField, temp);
-                    break;
-                }
-                case EXTERNAL: {
-                    String adapter = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getAdapter();
-                    Map<String, String> properties = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getProperties();
-
-                    datasetDetails = new ExternalDatasetDetails(adapter, properties, new Date(),
-                            ExternalDatasetTransactionState.COMMIT);
-                    break;
-                }
-
-            }
-
-            //#. initialize DatasetIdFactory if it is not initialized.
-            if (!DatasetIdFactory.isInitialized()) {
-                DatasetIdFactory.initialize(MetadataManager.INSTANCE.getMostRecentDatasetId());
-            }
-
-            //#. add a new dataset with PendingAddOp
-            dataset = new Dataset(dataverseName, datasetName, itemTypeName, ngName, compactionPolicy,
-                    compactionPolicyProperties, datasetDetails, dd.getHints(), dsType,
-                    DatasetIdFactory.generateDatasetId(), IMetadataEntity.PENDING_ADD_OP);
-            MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
-
-            if (dd.getDatasetType() == DatasetType.INTERNAL) {
-                Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(),
-                        dataverseName);
-                JobSpecification jobSpec = DatasetOperations.createDatasetJobSpec(dataverse, datasetName,
-                        metadataProvider);
-
-                //#. make metadataTxn commit before calling runJob.
-                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                bActiveTxn = false;
-                progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
-
-                //#. runJob
-                runJob(hcc, jobSpec, true);
-
-                //#. begin new metadataTxn
-                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-                bActiveTxn = true;
-                metadataProvider.setMetadataTxnContext(mdTxnCtx);
-            }
-
-            //#. add a new dataset with PendingNoOp after deleting the dataset with PendingAddOp
-            MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
-            dataset.setPendingOp(IMetadataEntity.PENDING_NO_OP);
-            MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
-            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-        } catch (Exception e) {
-            if (bActiveTxn) {
-                abort(e, e, mdTxnCtx);
-            }
-
-            if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
-
-                //#. execute compensation operations
-                //   remove the index in NC
-                //   [Notice]
-                //   As long as we updated(and committed) metadata, we should remove any effect of the job
-                //   because an exception occurs during runJob.
-                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-                bActiveTxn = true;
-                metadataProvider.setMetadataTxnContext(mdTxnCtx);
-                CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
-                try {
-                    JobSpecification jobSpec = DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider);
-                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                    bActiveTxn = false;
-
-                    runJob(hcc, jobSpec, true);
-                } catch (Exception e2) {
-                    e.addSuppressed(e2);
-                    if (bActiveTxn) {
-                        abort(e, e2, mdTxnCtx);
-                    }
-                }
-
-                //   remove the record from the metadata.
-                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-                metadataProvider.setMetadataTxnContext(mdTxnCtx);
-                try {
-                    MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
-                            datasetName);
-                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                } catch (Exception e2) {
-                    e.addSuppressed(e2);
-                    abort(e, e2, mdTxnCtx);
-                    throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName
-                            + "." + datasetName + ") couldn't be removed from the metadata", e);
-                }
-            }
-
-            throw e;
-        } finally {
-            MetadataLockManager.INSTANCE.createDatasetEnd(dataverseName, dataverseName + "." + itemTypeName,
-                    nodegroupName, compactionPolicy, dataverseName + "." + datasetName, defaultCompactionPolicy);
-        }
-    }
-
-    private void validateIfResourceIsActiveInFeed(String dataverseName, String datasetName) throws AsterixException {
-        List<FeedConnectionId> activeFeedConnections = FeedLifecycleListener.INSTANCE.getActiveFeedConnections(null);
-        boolean resourceInUse = false;
-        StringBuilder builder = new StringBuilder();
-
-        if (activeFeedConnections != null && !activeFeedConnections.isEmpty()) {
-            for (FeedConnectionId connId : activeFeedConnections) {
-                if (connId.getDatasetName().equals(datasetName)) {
-                    resourceInUse = true;
-                    builder.append(connId + "\n");
-                }
-            }
-        }
-
-        if (resourceInUse) {
-            throw new AsterixException("Dataset " + datasetName + " is currently being "
-                    + "fed into by the following feed(s).\n" + builder.toString() + "\n" + "Operation not supported");
-        }
-
-    }
-
-    private String getNodeGroupName(Identifier ngNameId, DatasetDecl dd, String dataverse) {
-        if (ngNameId != null) {
-            return ngNameId.getValue();
-        }
-        String hintValue = dd.getHints().get(DatasetNodegroupCardinalityHint.NAME);
-        if (hintValue == null) {
-            return MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME;
-        } else {
-            return (dataverse + ":" + dd.getName().getValue());
-        }
-    }
-
-    private String configureNodegroupForDataset(DatasetDecl dd, String dataverse, MetadataTransactionContext mdTxnCtx)
-            throws AsterixException {
-        int nodegroupCardinality = -1;
-        String nodegroupName;
-        String hintValue = dd.getHints().get(DatasetNodegroupCardinalityHint.NAME);
-        if (hintValue == null) {
-            nodegroupName = MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME;
-            return nodegroupName;
-        } else {
-            int numChosen = 0;
-            boolean valid = DatasetHints.validate(DatasetNodegroupCardinalityHint.NAME,
-                    dd.getHints().get(DatasetNodegroupCardinalityHint.NAME)).first;
-            if (!valid) {
-                throw new AsterixException("Incorrect use of hint:" + DatasetNodegroupCardinalityHint.NAME);
-            } else {
-                nodegroupCardinality = Integer.parseInt(dd.getHints().get(DatasetNodegroupCardinalityHint.NAME));
-            }
-            Set<String> nodeNames = AsterixAppContextInfo.getInstance().getMetadataProperties().getNodeNames();
-            Set<String> nodeNamesClone = new HashSet<String>();
-            for (String node : nodeNames) {
-                nodeNamesClone.add(node);
-            }
-            String metadataNodeName = AsterixAppContextInfo.getInstance().getMetadataProperties().getMetadataNodeName();
-            List<String> selectedNodes = new ArrayList<String>();
-            selectedNodes.add(metadataNodeName);
-            numChosen++;
-            nodeNamesClone.remove(metadataNodeName);
-
-            if (numChosen < nodegroupCardinality) {
-                Random random = new Random();
-                String[] nodes = nodeNamesClone.toArray(new String[] {});
-                int[] b = new int[nodeNamesClone.size()];
-                for (int i = 0; i < b.length; i++) {
-                    b[i] = i;
-                }
-
-                for (int i = 0; i < nodegroupCardinality - numChosen; i++) {
-                    int selected = i + random.nextInt(nodeNamesClone.size() - i);
-                    int selNodeIndex = b[selected];
-                    selectedNodes.add(nodes[selNodeIndex]);
-                    int temp = b[0];
-                    b[0] = b[selected];
-                    b[selected] = temp;
-                }
-            }
-            nodegroupName = dataverse + ":" + dd.getName().getValue();
-            MetadataManager.INSTANCE.addNodegroup(mdTxnCtx, new NodeGroup(nodegroupName, selectedNodes));
-            return nodegroupName;
-        }
-
-    }
-
-    @SuppressWarnings("unchecked")
-    private void handleCreateIndexStatement(AqlMetadataProvider metadataProvider, Statement stmt,
-            IHyracksClientConnection hcc) throws Exception {
-        ProgressState progress = ProgressState.NO_PROGRESS;
-        CreateIndexStatement stmtCreateIndex = (CreateIndexStatement) stmt;
-        String dataverseName = getActiveDataverse(stmtCreateIndex.getDataverseName());
-        String datasetName = stmtCreateIndex.getDatasetName().getValue();
-
-        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-        boolean bActiveTxn = true;
-        metadataProvider.setMetadataTxnContext(mdTxnCtx);
-
-        MetadataLockManager.INSTANCE.createIndexBegin(dataverseName, dataverseName + "." + datasetName);
-
-        String indexName = null;
-        JobSpecification spec = null;
-        Dataset ds = null;
-        // For external datasets
-        ArrayList<ExternalFile> externalFilesSnapshot = null;
-        boolean firstExternalDatasetIndex = false;
-        boolean filesIndexReplicated = false;
-        Index filesIndex = null;
-        boolean datasetLocked = false;
-        try {
-            ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
-                    datasetName);
-            if (ds == null) {
-                throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse "
-                        + dataverseName);
-            }
-
-            indexName = stmtCreateIndex.getIndexName().getValue();
-            Index idx = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
-                    datasetName, indexName);
-
-            String itemTypeName = ds.getItemTypeName();
-            Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), dataverseName,
-                    itemTypeName);
-            IAType itemType = dt.getDatatype();
-            ARecordType aRecordType = (ARecordType) itemType;
-
-            List<List<String>> indexFields = new ArrayList<List<String>>();
-            List<IAType> indexFieldTypes = new ArrayList<IAType>();
-            for (Pair<List<String>, TypeExpression> fieldExpr : stmtCreateIndex.getFieldExprs()) {
-                IAType fieldType = null;
-                boolean isOpen = aRecordType.isOpen();
-                ARecordType subType = aRecordType;
-                int i = 0;
-                if (fieldExpr.first.size() > 1 && !isOpen) {
-                    for (; i < fieldExpr.first.size() - 1;) {
-                        subType = (ARecordType) subType.getFieldType(fieldExpr.first.get(i));
-                        i++;
-                        if (subType.isOpen()) {
-                            isOpen = true;
-                            break;
-                        };
-                    }
-                }
-                if (fieldExpr.second == null) {
-                    fieldType = subType.getSubFieldType(fieldExpr.first.subList(i, fieldExpr.first.size()));
-                } else {
-                    if (!stmtCreateIndex.isEnforced())
-                        throw new AlgebricksException("Cannot create typed index on \"" + fieldExpr.first
-                                + "\" field without enforcing it's type");
-                    if (!isOpen)
-                        throw new AlgebricksException("Typed index on \"" + fieldExpr.first
-                                + "\" field could be created only for open datatype");
-                    Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(mdTxnCtx, fieldExpr.second,
-                            indexName, dataverseName);
-                    TypeSignature typeSignature = new TypeSignature(dataverseName, indexName);
-                    fieldType = typeMap.get(typeSignature);
-                }
-                if (fieldType == null)
-                    throw new AlgebricksException("Unknown type " + fieldExpr.second);
-
-                indexFields.add(fieldExpr.first);
-                indexFieldTypes.add(fieldType);
-            }
-
-            aRecordType.validateKeyFields(indexFields, indexFieldTypes, stmtCreateIndex.getIndexType());
-
-            if (idx != null) {
-                if (stmtCreateIndex.getIfNotExists()) {
-                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                    return;
-                } else {
-                    throw new AlgebricksException("An index with this name " + indexName + " already exists.");
-                }
-            }
-
-            // Checks whether a user is trying to create an inverted secondary index on a dataset with a variable-length primary key.
-            // Currently, we do not support this. Therefore, as a temporary solution, we print an error message and stop.
-            if (stmtCreateIndex.getIndexType() == IndexType.SINGLE_PARTITION_WORD_INVIX
-                    || stmtCreateIndex.getIndexType() == IndexType.SINGLE_PARTITION_NGRAM_INVIX
-                    || stmtCreateIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX
-                    || stmtCreateIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX) {
-                List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(ds);
-                for (List<String> partitioningKey : partitioningKeys) {
-                    IAType keyType = aRecordType.getSubFieldType(partitioningKey);
-                    ITypeTraits typeTrait = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
-
-                    // If it is not a fixed length
-                    if (typeTrait.getFixedLength() < 0) {
-                        throw new AlgebricksException("The keyword or ngram index -" + indexName
-                                + " cannot be created on the dataset -" + datasetName
-                                + " due to its variable-length primary key field - " + partitioningKey);
-                    }
-
-                }
-            }
-
-            if (ds.getDatasetType() == DatasetType.INTERNAL) {
-                validateIfResourceIsActiveInFeed(dataverseName, datasetName);
-            } else {
-                // External dataset
-                // Check if the dataset is indexible
-                if (!ExternalIndexingOperations.isIndexible((ExternalDatasetDetails) ds.getDatasetDetails())) {
-                    throw new AlgebricksException("dataset using "
-                            + ((ExternalDatasetDetails) ds.getDatasetDetails()).getAdapter()
-                            + " Adapter can't be indexed");
-                }
-                // check if the name of the index is valid
-                if (!ExternalIndexingOperations.isValidIndexName(datasetName, indexName)) {
-                    throw new AlgebricksException("external dataset index name is invalid");
-                }
-
-                // Check if the files index exist
-                filesIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
-                        datasetName, ExternalIndexingOperations.getFilesIndexName(datasetName));
-                firstExternalDatasetIndex = (filesIndex == null);
-                // lock external dataset
-                ExternalDatasetsRegistry.INSTANCE.buildIndexBegin(ds, firstExternalDatasetIndex);
-                datasetLocked = true;
-                if (firstExternalDatasetIndex) {
-                    // verify that no one has created an index before we acquire the lock
-                    filesIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(),
-                            dataverseName, datasetName, ExternalIndexingOperations.getFilesIndexName(datasetName));
-                    if (filesIndex != null) {
-                        ExternalDatasetsRegistry.INSTANCE.buildIndexEnd(ds, firstExternalDatasetIndex);
-                        firstExternalDatasetIndex = false;
-                        ExternalDatasetsRegistry.INSTANCE.buildIndexBegin(ds, firstExternalDatasetIndex);
-                    }
-                }
-                if (firstExternalDatasetIndex) {
-                    // Get snapshot from External File System
-                    externalFilesSnapshot = ExternalIndexingOperations.getSnapshotFromExternalFileSystem(ds);
-                    // Add an entry for the files index
-                    filesIndex = new Index(dataverseName, datasetName,
-                            ExternalIndexingOperations.getFilesIndexName(datasetName), IndexType.BTREE,
-                            ExternalIndexingOperations.FILE_INDEX_FIELD_NAMES,
-                            ExternalIndexingOperations.FILE_INDEX_FIELD_TYPES, false, false,
-                            IMetadataEntity.PENDING_ADD_OP);
-                    MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), filesIndex);
-                    // Add files to the external files index
-                    for (ExternalFile file : externalFilesSnapshot) {
-                        MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
-                    }
-                    // This is the first index for the external dataset, replicate the files index
-                    spec = ExternalIndexingOperations.buildFilesIndexReplicationJobSpec(ds, externalFilesSnapshot,
-                            metadataProvider, true);
-                    if (spec == null) {
-                        throw new AsterixException(
-                                "Failed to create job spec for replicating Files Index For external dataset");
-                    }
-                    filesIndexReplicated = true;
-                    runJob(hcc, spec, true);
-                }
-            }
-
-            //check whether there exists another enforced index on the same field
-            if (stmtCreateIndex.isEnforced()) {
-                List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(
-                        metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
-                for (Index index : indexes) {
-                    if (index.getKeyFieldNames().equals(indexFields)
-                            && !index.getKeyFieldTypes().equals(indexFieldTypes) && index.isEnforcingKeyFileds())
-                        throw new AsterixException("Cannot create index " + indexName + " , enforced index "
-                                + index.getIndexName() + " on field \"" + StringUtils.join(indexFields, ',')
-                                + "\" already exist");
-                }
-            }
-
-            //#. add a new index with PendingAddOp
-            Index index = new Index(dataverseName, datasetName, indexName, stmtCreateIndex.getIndexType(), indexFields,
-                    indexFieldTypes, stmtCreateIndex.getGramLength(), stmtCreateIndex.isEnforced(), false,
-                    IMetadataEntity.PENDING_ADD_OP);
-            MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
-
-            ARecordType enforcedType = null;
-            if (stmtCreateIndex.isEnforced()) {
-                enforcedType = IntroduceSecondaryIndexInsertDeleteRule.createEnforcedType(aRecordType, index);
-            }
-
-            //#. prepare to create the index artifact in NC.
-            CompiledCreateIndexStatement cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName,
-                    index.getDatasetName(), index.getKeyFieldNames(), index.getKeyFieldTypes(),
-                    index.isEnforcingKeyFileds(), index.getGramLength(), index.getIndexType());
-            spec = IndexOperations.buildSecondaryIndexCreationJobSpec(cis, aRecordType, enforcedType, metadataProvider);
-            if (spec == null) {
-                throw new AsterixException("Failed to create job spec for creating index '"
-                        + stmtCreateIndex.getDatasetName() + "." + stmtCreateIndex.getIndexName() + "'");
-            }
-            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-            bActiveTxn = false;
-
-            progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
-
-            //#. create the index artifact in NC.
-            runJob(hcc, spec, true);
-
-            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-            bActiveTxn = true;
-            metadataProvider.setMetadataTxnContext(mdTxnCtx);
-
-            //#. load data into the index in NC.
-            cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName, index.getDatasetName(),
-                    index.getKeyFieldNames(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(),
-                    index.getGramLength(), index.getIndexType());
-            spec = IndexOperations.buildSecondaryIndexLoadingJobSpec(cis, aRecordType, enforcedType, metadataProvider);
-            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-            bActiveTxn = false;
-
-            runJob(hcc, spec, true);
-
-            //#. begin new metadataTxn
-            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-            bActiveTxn = true;
-            metadataProvider.setMetadataTxnContext(mdTxnCtx);
-
-            //#. add another new index with PendingNoOp after deleting the index with PendingAddOp
-            MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName,
-                    indexName);
-            index.setPendingOp(IMetadataEntity.PENDING_NO_OP);
-            MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
-            // add another new files index with PendingNoOp after deleting the index with PendingAddOp
-            if (firstExternalDatasetIndex) {
-                MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
-                        datasetName, filesIndex.getIndexName());
-                filesIndex.setPendingOp(IMetadataEntity.PENDING_NO_OP);
-                MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), filesIndex);
-                // update transaction timestamp
-                ((ExternalDatasetDetails) ds.getDatasetDetails()).setRefreshTimestamp(new Date());
-                MetadataManager.INSTANCE.updateDataset(mdTxnCtx, ds);
-            }
-            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-
-        } catch (Exception e) {
-            if (bActiveTxn) {
-                abort(e, e, mdTxnCtx);
-            }
-            // If files index was replicated for external dataset, it should be cleaned up on NC side
-            if (filesIndexReplicated) {
-                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-                bActiveTxn = true;
-                CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
-                        ExternalIndexingOperations.getFilesIndexName(datasetName));
-                try {
-                    JobSpecification jobSpec = ExternalIndexingOperations.buildDropFilesIndexJobSpec(cds,
-                            metadataProvider, ds);
-                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                    bActiveTxn = false;
-                    runJob(hcc, jobSpec, true);
-                } catch (Exception e2) {
-                    e.addSuppressed(e2);
-                    if (bActiveTxn) {
-                        abort(e, e2, mdTxnCtx);
-                    }
-                }
-            }
-
-            if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
-                //#. execute compensation operations
-                //   remove the index in NC
-                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-                bActiveTxn = true;
-                metadataProvider.setMetadataTxnContext(mdTxnCtx);
-                CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName, indexName);
-                try {
-                    JobSpecification jobSpec = IndexOperations
-                            .buildDropSecondaryIndexJobSpec(cds, metadataProvider, ds);
-
-                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                    bActiveTxn = false;
-                    runJob(hcc, jobSpec, true);
-                } catch (Exception e2) {
-                    e.addSuppressed(e2);
-                    if (bActiveTxn) {
-                        abort(e, e2, mdTxnCtx);
-                    }
-                }
-
-                if (firstExternalDatasetIndex) {
-                    mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-                    metadataProvider.setMetadataTxnContext(mdTxnCtx);
-                    try {
-                        // Drop External Files from metadata
-                        MetadataManager.INSTANCE.dropDatasetExternalFiles(mdTxnCtx, ds);
-                        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                    } catch (Exception e2) {
-                        e.addSuppressed(e2);
-                        abort(e, e2, mdTxnCtx);
-                        throw new IllegalStateException("System is inconsistent state: pending files for("
-                                + dataverseName + "." + datasetName + ") couldn't be removed from the metadata", e);
-                    }
-                    mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-                    metadataProvider.setMetadataTxnContext(mdTxnCtx);
-                    try {
-                        // Drop the files index from metadata
-                        MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
-                                datasetName, ExternalIndexingOperations.getFilesIndexName(datasetName));
-                        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                    } catch (Exception e2) {
-                        e.addSuppressed(e2);
-                        abort(e, e2, mdTxnCtx);
-                        throw new IllegalStateException("System is inconsistent state: pending index(" + dataverseName
-                                + "." + datasetName + "." + ExternalIndexingOperations.getFilesIndexName(datasetName)
-                                + ") couldn't be removed from the metadata", e);
-                    }
-                }
-                // remove the record from the metadata.
-                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-                metadataProvider.setMetadataTxnContext(mdTxnCtx);
-                try {
-                    MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
-                            datasetName, indexName);
-                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                } catch (Exception e2) {
-                    e.addSuppressed(e2);
-                    abort(e, e2, mdTxnCtx);
-                    throw new IllegalStateException("System is in inconsistent state: pending index(" + dataverseName
-                            + "." + datasetName + "." + indexName + ") couldn't be removed from the metadata", e);
-                }
-            }
-            throw e;
-        } finally {
-            MetadataLockManager.INSTANCE.createIndexEnd(dataverseName, dataverseName + "." + datasetName);
-            if (datasetLocked) {
-                ExternalDatasetsRegistry.INSTANCE.buildIndexEnd(ds, firstExternalDatasetIndex);
-            }
-        }
-    }
-
-    private void handleCreateTypeStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
-        TypeDecl stmtCreateType = (TypeDecl) stmt;
-        String dataverseName = getActiveDataverse(stmtCreateType.getDataverseName());
-        String typeName = stmtCreateType.getIdent().getValue();
-        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-        metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        MetadataLockManager.INSTANCE.createTypeBegin(dataverseName, dataverseName + "." + typeName);
-        try {
-
-            Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
-            if (dv == null) {
-                throw new AlgebricksException("Unknown dataverse " + dataverseName);
-            }
-            Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, typeName);
-            if (dt != null) {
-                if (!stmtCreateType.getIfNotExists()) {
-                    throw new AlgebricksException("A datatype with this name " + typeName + " already exists.");
-                }
-            } else {
-                if (builtinTypeMap.get(typeName) != null) {
-                    throw new AlgebricksException("Cannot redefine builtin type " + typeName + ".");
-                } else {
-                    Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(mdTxnCtx,
-                            stmtCreateType.getTypeDef(), stmtCreateType.getIdent().getValue(), dataverseName);
-                    TypeSignature typeSignature = new TypeSignature(dataverseName, typeName);
-                    IAType type = typeMap.get(typeSignature);
-                    MetadataManager.INSTANCE.addDatatype(mdTxnCtx, new Datatype(dataverseName, typeName, type, false));
-                }
-            }
-            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-        } catch (Exception e) {
-            abort(e, e, mdTxnCtx);
-            throw e;
-        } finally {
-            MetadataLockManager.INSTANCE.createTypeEnd(dataverseName, dataverseName + "." + typeName);
-        }
-    }
-
-    private void handleDataverseDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
-            IHyracksClientConnection hcc) throws Exception {
-        DataverseDropStatement stmtDelete = (DataverseDropStatement) stmt;
-        String dataverseName = stmtDelete.getDataverseName().getValue();
-
-        ProgressState progress = ProgressState.NO_PROGRESS;
-        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-        boolean bActiveTxn = true;
-        metadataProvider.setMetadataTxnContext(mdTxnCtx);
-        MetadataLockManager.INSTANCE.acquireDataverseWriteLock(dataverseName);
-        List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
-        try {
-
-            Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
-            if (dv == null) {
-                if (stmtDelete.getIfExists()) {
-                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                    return;
-                } else {
-                    throw new AlgebricksException("There is no dataverse with this name " + dataverseName + ".");
-                }
-            }
-
-            //# disconnect all feeds from any datasets in the dataverse.
-            List<FeedConnectionId> activeFeedConnections = FeedLifecycleListener.INSTANCE
-                    .getActiveFeedConnections(null);
-            DisconnectFeedStatement disStmt = null;
-            Identifier dvId = new Identifier(dataverseName);
-            for (FeedConnectionId connection : activeFeedConnections) {
-                FeedId feedId = connection.getFeedId();
-                if (feedId.getDataverse().equals(dataverseName)) {
-                    disStmt = new DisconnectFeedStatement(dvId, new Identifier(feedId.getFeedName()), new Identifier(
-                            connection.getDatasetName()));
-                    try {
-                        handleDisconnectFeedStatement(metadataProvider, disStmt, hcc);
-                        if (LOGGER.isLoggable(Level.INFO)) {
-                            LOGGER.info("Disconnected feed " + feedId.getFeedName() + " from dataset "
-                                    + connection.getDatasetName());
-                        }
-                    } catch (Exception exception) {
-                        if (LOGGER.isLoggable(Level.WARNING)) {
-                            LOGGER.warning("Unable to disconnect feed " + feedId.getFeedName() + " from dataset "
-                                    + connection.getDatasetName() + ". Encountered exception " + exception);
-                        }
-                    }
-                }
-            }
-
-            //#. prepare jobs which will drop corresponding datasets with indexes.
-            List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverseName);
-            for (int j = 0; j < datasets.size(); j++) {
-                String datasetName = datasets.get(j).getDatasetName();
-                DatasetType dsType = datasets.get(j).getDatasetType();
-                if (dsType == DatasetType.INTERNAL) {
-                    List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName,
-                            datasetName);
-                    for (int k = 0; k < indexes.size(); k++) {
-                        if (indexes.get(k).isSecondaryIndex()) {
-                            CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
-                                    indexes.get(k).getIndexName());
-                            jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider,
-                                    datasets.get(j)));
-                        }
-                    }
-
-                    CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
-                    jobsToExecute.add(DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider));
-                } else {
-                    // External dataset
-                    List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName,
-                            datasetName);
-                    for (int k = 0; k < indexes.size(); k++) {
-                        if (ExternalIndexingOperations.isFileIndex(indexes.get(k))) {
-                            CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
-                                    indexes.get(k).getIndexName());
-                            jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(cds,
-                                    metadataProvider, datasets.get(j)));
-                        } else {
-                            CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
-                                    indexes.get(k).getIndexName());
-                            jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider,
-                                    datasets.get(j)));
-                        }
-                    }
-                    ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(datasets.get(j));
-                }
-            }
-            jobsToExecute.add(DataverseOperations.createDropDataverseJobSpec(dv, metadataProvider));
-
-            //#. mark PendingDropOp on the dataverse record by
-            //   first, deleting the dataverse record from the DATAVERSE_DATASET
-            //   second, inserting the dataverse record with the PendingDropOp value into the DATAVERSE_DATASET
-            MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
-            MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dataverseName, dv.getDataFormat(),
-                    IMetadataEntity.PENDING_DROP_OP));
-
-            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-            bActiveTxn = false;
-            progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
-
-            for (JobSpecification jobSpec : jobsToExecute) {
-                runJob(hcc, jobSpec, true);
-            }
-
-            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-            bActiveTxn = true;
-            metadataProvider.setMetadataTxnContext(mdTxnCtx);
-
-            //#. finally, delete the dataverse.
-            MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
-            if (activeDefaultDataverse != null && activeDefaultDataverse.getDataverseName() == dataverseName) {
-                activeDefaultDataverse = null;
-            }
-            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-        } catch (Exception e) {
-            if (bActiveTxn) {
-                abort(e, e, mdTxnCtx);
-            }
-
-            if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
-                if (activeDefaultDataverse != null && activeDefaultDataverse.getDataverseName() == dataverseName) {
-                    activeDefaultDataverse = null;
-                }
-
-                //#. execute compensation operations
-                //   remove the all indexes in NC
-                try {
-                    for (JobSpecification jobSpec : jobsToExecute) {
-                        runJob(hcc, jobSpec, true);
-                    }
-                } catch (Exception e2) {
-                    //do no throw exception since still the metadata needs to be compensated.
-                    e.addSuppressed(e2);
-                }
-
-                //   remove the record from the metadata.
-                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-                try {
-                    MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
-                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                } catch (Exception e2) {
-                    e.addSuppressed(e2);
-                    abort(e, e2, mdTxnCtx);
-                    throw new IllegalStateException("System is inconsistent state: pending dataverse(" + dataverseName
-                            + ") couldn't be removed from the metadata", e);
-                }
-            }
-
-            throw e;
-        } finally {
-            MetadataLockManager.INSTANCE.releaseDataverseWriteLock(dataverseName);
-        }
-    }
-
-    private void handleDatasetDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
-            IHyracksClientConnection hcc) throws Exception {
-        DropStatement stmtDelete = (DropStatement) stmt;
-        String dataverseName = getActiveDataverse(stmtDelete.getDataverseName());
-        String datasetName = stmtDelete.getDatasetName().getValue();
-
-        ProgressState progress = ProgressState.NO_PROGRESS;
-        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-        boolean bActiveTxn = true;
-        metadataProvider.setMetadataTxnContext(mdTxnCtx);
-
-        MetadataLockManager.INSTANCE.dropDatasetBegin(dataverseName, dataverseName + "." + datasetName);
-        List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
-        try {
-
-            Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
-            if (ds == null) {
-                if (stmtDelete.getIfExists()) {
-                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                    return;
-                } else {
-                    throw new AlgebricksException("There is no dataset with this name " + datasetName
-                            + " in dataverse " + dataverseName + ".");
-                }
-            }
-
-            Map<FeedConnectionId, Pair<JobSpecification, Boolean>> disconnectJobList = new HashMap<FeedConnectionId, Pair<JobSpecification, Boolean>>();
-            if (ds.getDatasetType() == DatasetType.INTERNAL) {
-                // prepare job spec(s) that would disconnect any active feeds involving the dataset.
-                List<FeedConnectionId> feedConnections = FeedLifecycleListener.INSTANCE.getActiveFeedConnections(null);
-                if (feedConnections != null && !feedConnections.isEmpty()) {
-                    for (FeedConnectionId connection : feedConnections) {
-                        Pair<JobSpecification, Boolean> p = FeedOperations.buildDisconnectFeedJobSpec(metadataProvider,
-                                connection);
-                        disconnectJobList.put(connection, p);
-                        if (LOGGER.isLoggable(Level.INFO)) {
-                            LOGGER.info("Disconnecting feed " + connection.getFeedId().getFeedName() + " from dataset "
-                                    + datasetName + " as dataset is being dropped");
-                        }
-                    }
-                }
-
-                //#. prepare jobs to drop the datatset and the indexes in NC
-                List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
-                for (int j = 0; j < indexes.size(); j++) {
-                    if (indexes.get(j).isSecondaryIndex()) {
-                        CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
-                                indexes.get(j).getIndexName());
-                        jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider, ds));
-                    }
-                }
-                CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
-                jobsToExecute.add(DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider));
-
-                //#. mark the existing dataset as PendingDropOp
-                MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
-                MetadataManager.INSTANCE.addDataset(
-                        mdTxnCtx,
-                        new Dataset(dataverseName, datasetName, ds.getItemTypeName(), ds.getNodeGroupName(), ds
-                                .getCompactionPolicy(), ds.getCompactionPolicyProperties(), ds.getDatasetDetails(), ds
-                                .getHints(), ds.getDatasetType(), ds.getDatasetId(), IMetadataEntity.PENDING_DROP_OP));
-
-                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                bActiveTxn = false;
-                progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
-
-                //# disconnect the feeds
-                for (Pair<JobSpecification, Boolean> p : disconnectJobList.values()) {
-                    runJob(hcc, p.first, true);
-                }
-
-                //#. run the jobs
-                for (JobSpecification jobSpec : jobsToExecute) {
-                    runJob(hcc, jobSpec, true);
-                }
-
-                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-                bActiveTxn = true;
-                metadataProvider.setMetadataTxnContext(mdTxnCtx);
-            } else {
-                // External dataset
-                ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(ds);
-                //#. prepare jobs to drop the datatset and the indexes in NC
-                List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
-                for (int j = 0; j < indexes.size(); j++) {
-                    if (ExternalIndexingOperations.isFileIndex(indexes.get(j))) {
-                        CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
-                                indexes.get(j).getIndexName());
-                        jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider, ds));
-                    } else {
-                        CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
-                                indexes.get(j).getIndexName());
-                        jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(cds, metadataProvider,
-                                ds));
-                    }
-                }
-
-                //#. mark the existing dataset as PendingDropOp
-                MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
-                MetadataManager.INSTANCE.addDataset(
-                        mdTxnCtx,
-                        new Dataset(dataverseName, datasetName, ds.getItemTypeName(), ds.getNodeGroupName(), ds
-                                .getCompactionPolicy(), ds.getCompactionPolicyProperties(), ds.getDatasetDetails(), ds
-                                .getHints(), ds.getDatasetType(), ds.getDatasetId(), IMetadataEntity.PENDING_DROP_OP));
-
-                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                bActiveTxn = false;
-                progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
-
-                //#. run the jobs
-                for (JobSpecification jobSpec : jobsToExecute) {
-                    runJob(hcc, jobSpec, true);
-                }
-                if (indexes.size() > 0) {
-                    ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(ds);
-                }
-                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-                bActiveTxn = true;
-                metadataProvider.setMetadataTxnContext(mdTxnCtx);
-            }
-
-            //#. finally, delete the dataset.
-            MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
-            // Drop the associated nodegroup
-            String nodegroup = ds.getNodeGroupName();
-            if (!nodegroup.equalsIgnoreCase(MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME)) {
-                MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, dataverseName + ":" + datasetName);
-            }
-
-            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-        } catch (Exception e) {
-            if (bActiveTxn) {
-                abort(e, e, mdTxnCtx);
-            }
-
-            if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
-                //#. execute compensation operations
-                //   remove the all indexes in NC
-                try {
-                    for (JobSpecification jobSpec : jobsToExecute) {
-                        runJob(hcc, jobSpec, true);
-                    }
-                } catch (Exception e2) {
-                    //do no throw exception since still the metadata needs to be compensated.
-                    e.addSuppressed(e2);
-                }
-
-                //   remove the record from the metadata.
-                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-                metadataProvider.setMetadataTxnContext(mdTxnCtx);
-                try {
-                    MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
-                            datasetName);
-                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                } catch (Exception e2) {
-                    e.addSuppressed(e2);
-                    abort(e, e2, mdTxnCtx);
-                    throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName
-                            + "." + datasetName + ") couldn't be removed from the metadata", e);
-                }
-            }
-
-            throw e;
-        } finally {
-            MetadataLockManager.INSTANCE.dropDatasetEnd(dataverseName, dataverseName + "." + datasetName);
-        }
-    }
-
-    private void handleIndexDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
-            IHyracksClientConnection hcc) throws Exception {
-
-        IndexDropStatement stmtIndexDrop = (IndexDropStatement) stmt;
-        String datasetName = stmtIndexDrop.getDatasetName().getValue();
-        String dataverseName = getActiveDataverse(stmtIndexDrop.getDataverseName());
-        ProgressState progress = ProgressState.NO_PROGRESS;
-        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-        boolean bActiveTxn = true;
-        metadataProvider.setMetadataTxnContext(mdTxnCtx);
-
-        MetadataLockManager.INSTANCE.dropIndexBegin(dataverseName, dataverseName + "." + datasetName);
-
-        String indexName = null;
-        // For external index
-        boolean dropFilesIndex = false;
-        List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
-        try {
-
-            Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
-            if (ds == null) {
-                throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse "
-                        + dataverseName);
-            }
-
-            List<FeedConnectionId> feedConnections = FeedLifecycleListener.INSTANCE.getActiveFeedConnections(null);
-            boolean resourceInUse = false;
-            if (feedConnections != null && !feedConnections.isEmpty()) {
-                StringBuilder builder = new StringBuilder();
-                for (FeedConnectionId connection : feedConnections) {
-                    if (connection.getDatasetName().equals(datasetName)) {
-                        resourceInUse = true;
-                        builder.append(connection + "\n");
-                    }
-                }
-                if (resourceInUse) {
-                    throw new AsterixException("Dataset" + datasetName
-                            + " is currently being fed into by the following feeds " + "." + builder.toString()
-                            + "\nOperation not supported.");
-                }
-            }
-
-            if (ds.getDatasetType() == DatasetType.INTERNAL) {
-                indexName = stmtIndexDrop.getIndexName().getValue();
-                Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
-                if (index == null) {
-                    if (stmtIndexDrop.getIfExists()) {
-                        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                        return;
-                    } else {
-                        throw new AlgebricksException("There is no index with this name " + indexName + ".");
-                    }
-                }
-                //#. prepare a job to drop the index in NC.
-                CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName, indexName);
-                jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider, ds));
-
-                //#. mark PendingDropOp on the existing index
-                MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
-                MetadataManager.INSTANCE.addIndex(
-                        mdTxnCtx,
-                        new Index(dataverseName, datasetName, indexName, index.getIndexType(),
-                                index.getKeyFieldNames(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), index
-                                        .isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
-
-                //#. commit the existing transaction before calling runJob.
-                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                bActiveTxn = false;
-                progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
-
-                for (JobSpecification jobSpec : jobsToExecute) {
-                    runJob(hcc, jobSpec, true);
-                }
-
-                //#. begin a new transaction
-                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-                bActiveTxn = true;
-                metadataProvider.setMetadataTxnContext(mdTxnCtx);
-
-                //#. finally, delete the existing index
-                MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
-            } else {
-                // External dataset
-                indexName = stmtIndexDrop.getIndexName().getValue();
-                Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
-                if (index == null) {
-                    if (stmtIndexDrop.getIfExists()) {
-                        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                        return;
-                    } else {
-                        throw new AlgebricksException("There is no index with this name " + indexName + ".");
-                    }
-                } else if (ExternalIndexingOperations.isFileIndex(index)) {
-                    throw new AlgebricksException("Dropping a dataset's files index is not allowed.");
-                }
-                //#. prepare a job to drop the index in NC.
-                CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName, indexName);
-                jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider, ds));
-                List<Index> datasetIndexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName,
-                        datasetName);
-                if (datasetIndexes.size() == 2) {
-                    dropFilesIndex = true;
-                    // only one index + the files index, we need to delete both of the indexes
-                    for (Index externalIndex : datasetIndexes) {
-                        if (ExternalIndexingOperations.isFileIndex(externalIndex)) {
-                            cds = new CompiledIndexDropStatement(dataverseName, datasetName,
-                                    externalIndex.getIndexName());
-                            jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(cds,
-                                    metadataProvider, ds));
-                            //#. mark PendingDropOp on the existing files index
-                            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName,
-                                    externalIndex.getIndexName());
-                            MetadataManager.INSTANCE.addIndex(
-                                    mdTxnCtx,
-                                    new Index(dataverseName, datasetName, externalIndex.getIndexName(), externalIndex
-                                            .getIndexType(), externalIndex.getKeyFieldNames(),
-                                            index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), externalIndex
-                                                    .isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
-                        }
-                    }
-                }
-
-                //#. mark PendingDropOp on the existing index
-                MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
-                MetadataManager.INSTANCE.addIndex(
-                        mdTxnCtx,
-                        new Index(dataverseName, datasetName, indexName, index.getIndexType(),
-                                index.getKeyFieldNames(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), index
-                                        .isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
-
-                //#. commit the existing transaction before calling runJob.
-                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                bActiveTxn = false;
-                progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
-
-                for (JobSpecification jobSpec : jobsToExecute) {
-                    runJob(hcc, jobSpec, true);
-                }
-
-                //#. begin a new transaction
-                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-                bActiveTxn = true;
-                metadataProvider.setMetadataTxnContext(mdTxnCtx);
-
-                //#. finally, delete the existing index
-                MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
-                if (dropFilesIndex) {
-                    // delete the files index too
-                    MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName,
-                            ExternalIndexingOperations.getFilesIndexName(datasetName));
-                    MetadataManager.INSTANCE.dropDatasetExternalFiles(mdTxnCtx, ds);
-                    ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(ds);
-                }
-            }
-            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-
-        } catch (Exception e) {
-            if (bActiveTxn) {
-                abort(e, e, mdTxnCtx);
-            }
-
-            if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
-                //#. execute compensation operations
-                //   remove the all indexes in NC
-                try {
-                    for (JobSpecification jobSpec : jobsToExecute) {
-                        runJob(hcc, jobSpec, true);
-                    }
-                } catch (Exception e2) {
-                    //do no throw exception since still the metadata needs to be compensated.
-                    e.addSuppressed(e2);
-        

<TRUNCATED>


[47/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FuzzyEqRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FuzzyEqRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FuzzyEqRule.java
deleted file mode 100644
index 3a06168..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FuzzyEqRule.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.om.base.IAObject;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.optimizer.base.FuzzyUtils;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class FuzzyEqRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-
-        // current operator is INNERJOIN or LEFTOUTERJOIN or SELECT
-        Mutable<ILogicalExpression> expRef;
-        if (op.getOperatorTag() == LogicalOperatorTag.INNERJOIN
-                || op.getOperatorTag() == LogicalOperatorTag.LEFTOUTERJOIN) {
-            AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) op;
-            expRef = joinOp.getCondition();
-        } else if (op.getOperatorTag() == LogicalOperatorTag.SELECT) {
-            SelectOperator selectOp = (SelectOperator) op;
-            expRef = selectOp.getCondition();
-        } else {
-            return false;
-        }
-
-        AqlMetadataProvider metadataProvider = ((AqlMetadataProvider) context.getMetadataProvider());
-
-        IVariableTypeEnvironment env = context.getOutputTypeEnvironment(op);
-        if (expandFuzzyEq(expRef, context, env, metadataProvider)) {
-            context.computeAndSetTypeEnvironmentForOperator(op);
-            return true;
-        }
-        return false;
-    }
-
-    private boolean expandFuzzyEq(Mutable<ILogicalExpression> expRef, IOptimizationContext context,
-            IVariableTypeEnvironment env, AqlMetadataProvider metadataProvider) throws AlgebricksException {
-        ILogicalExpression exp = expRef.getValue();
-
-        if (exp.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            return false;
-        }
-
-        boolean expanded = false;
-        AbstractFunctionCallExpression funcExp = (AbstractFunctionCallExpression) exp;
-        FunctionIdentifier fi = funcExp.getFunctionIdentifier();
-        if (fi.equals(AsterixBuiltinFunctions.FUZZY_EQ)) {
-            List<Mutable<ILogicalExpression>> inputExps = funcExp.getArguments();
-
-            String simFuncName = FuzzyUtils.getSimFunction(metadataProvider);
-            ArrayList<Mutable<ILogicalExpression>> similarityArgs = new ArrayList<Mutable<ILogicalExpression>>();
-            for (int i = 0; i < inputExps.size(); ++i) {
-                Mutable<ILogicalExpression> inputExpRef = inputExps.get(i);
-                similarityArgs.add(inputExpRef);
-            }
-
-            FunctionIdentifier simFunctionIdentifier = FuzzyUtils.getFunctionIdentifier(simFuncName);
-            ScalarFunctionCallExpression similarityExp = new ScalarFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(simFunctionIdentifier), similarityArgs);
-            // Add annotations from the original fuzzy-eq function.
-            similarityExp.getAnnotations().putAll(funcExp.getAnnotations());
-            ArrayList<Mutable<ILogicalExpression>> cmpArgs = new ArrayList<Mutable<ILogicalExpression>>();
-            cmpArgs.add(new MutableObject<ILogicalExpression>(similarityExp));
-            IAObject simThreshold = FuzzyUtils.getSimThreshold(metadataProvider, simFuncName);
-            cmpArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
-                    simThreshold))));
-            ScalarFunctionCallExpression cmpExpr = FuzzyUtils.getComparisonExpr(simFuncName, cmpArgs);
-            expRef.setValue(cmpExpr);
-            return true;
-        } else if (fi.equals(AlgebricksBuiltinFunctions.AND) || fi.equals(AlgebricksBuiltinFunctions.OR)) {
-            for (int i = 0; i < 2; i++) {
-                if (expandFuzzyEq(funcExp.getArguments().get(i), context, env, metadataProvider)) {
-                    expanded = true;
-                }
-            }
-        }
-        return expanded;
-    }
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FuzzyJoinRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FuzzyJoinRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FuzzyJoinRule.java
deleted file mode 100644
index 9bcfafa..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FuzzyJoinRule.java
+++ /dev/null
@@ -1,404 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.io.StringReader;
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Locale;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.algebra.base.LogicalOperatorDeepCopyVisitor;
-import edu.uci.ics.asterix.aql.base.Clause;
-import edu.uci.ics.asterix.aql.expression.Identifier;
-import edu.uci.ics.asterix.aqlplus.parser.AQLPlusParser;
-import edu.uci.ics.asterix.aqlplus.parser.ParseException;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.types.TypeHelper;
-import edu.uci.ics.asterix.optimizer.base.FuzzyUtils;
-import edu.uci.ics.asterix.translator.AqlPlusExpressionToPlanTranslator;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.Counter;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IndexedNLJoinExpressionAnnotation;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class FuzzyJoinRule implements IAlgebraicRewriteRule {
-
-    private static HashSet<FunctionIdentifier> simFuncs = new HashSet<FunctionIdentifier>();
-    static {
-        simFuncs.add(AsterixBuiltinFunctions.SIMILARITY_JACCARD_CHECK);
-    }
-
-    private static final String AQLPLUS = ""
-            //
-            // -- - Stage 3 - --
-            //
-            + "((#RIGHT), "
-            + "  (join((#LEFT), "
-            //
-            // -- -- - Stage 2 - --
-            //
-            + "    ("
-            + "    join( "
-            + "      ( "
-            + "      #LEFT_1 "
-            + "      let $tokensUnrankedLeft := %s($$LEFT_1) "
-            + "      let $lenLeft := len($tokensUnrankedLeft) "
-            + "      let $tokensLeft := "
-            + "        for $token in $tokensUnrankedLeft "
-            + "        for $tokenRanked at $i in "
-            //
-            // -- -- -- - Stage 1 - --
-            //
-            // + "          #LEFT_2 "
-            // + "          let $id := $$LEFTPK_2 "
-            // + "          for $token in %s($$LEFT_2) "
-            + "          #RIGHT_2 "
-            + "          let $id := $$RIGHTPK_2 "
-            + "          for $token in %s($$RIGHT_2) "
-            + "          /*+ hash */ "
-            + "          group by $tokenGroupped := $token with $id "
-            + "          /*+ inmem 34 198608 */ "
-            + "          order by count($id), $tokenGroupped "
-            + "          return $tokenGroupped "
-            //
-            // -- -- -- -
-            //
-            + "        where $token = /*+ bcast */ $tokenRanked "
-            + "        order by $i "
-            + "        return $i "
-            + "      for $prefixTokenLeft in subset-collection($tokensLeft, 0, prefix-len-%s(len($tokensLeft), %ff)) "
-            + "      ),( "
-            + "      #RIGHT_1 "
-            + "      let $tokensUnrankedRight := %s($$RIGHT_1) "
-            + "      let $lenRight := len($tokensUnrankedRight) "
-            + "      let $tokensRight := "
-            + "        for $token in $tokensUnrankedRight "
-            + "        for $tokenRanked at $i in "
-            //
-            // -- -- -- - Stage 1 - --
-            //
-            // + "          #LEFT_3 "
-            // + "          let $id := $$LEFTPK_3 "
-            // + "          for $token in %s($$LEFT_3) "
-            + "          #RIGHT_3 "
-            + "          let $id := $$RIGHTPK_3 "
-            + "          for $token in %s($$RIGHT_3) "
-            + "          /*+ hash */ "
-            + "          group by $tokenGroupped := $token with $id "
-            + "          /*+ inmem 34 198608 */ "
-            + "          order by count($id), $tokenGroupped "
-            + "          return $tokenGroupped "
-            //
-            // -- -- -- -
-            //
-            + "        where $token = /*+ bcast */ $tokenRanked "
-            + "        order by $i "
-            + "        return $i "
-            + "      for $prefixTokenRight in subset-collection($tokensRight, 0, prefix-len-%s(len($tokensRight), %ff)) "
-            + "      ), $prefixTokenLeft = $prefixTokenRight) "
-            + "    let $sim := similarity-%s-prefix($lenLeft, $tokensLeft, $lenRight, $tokensRight, $prefixTokenLeft, %ff) "
-            + "    where $sim >= %ff " + "    /*+ hash*/ "
-            + "    group by $idLeft := $$LEFTPK_1, $idRight := $$RIGHTPK_1 with $sim "
-            //
-            // -- -- -
-            //
-            + "    ), $$LEFTPK = $idLeft)),  $$RIGHTPK = $idRight)";
-
-    private Collection<LogicalVariable> liveVars = new HashSet<LogicalVariable>();
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        // current opperator is join
-        if (op.getOperatorTag() != LogicalOperatorTag.INNERJOIN
-                && op.getOperatorTag() != LogicalOperatorTag.LEFTOUTERJOIN) {
-            return false;
-        }
-
-        // Find GET_ITEM function.
-        AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) op;
-        Mutable<ILogicalExpression> expRef = joinOp.getCondition();
-        Mutable<ILogicalExpression> getItemExprRef = getSimilarityExpression(expRef);
-        if (getItemExprRef == null) {
-            return false;
-        }
-        // Check if the GET_ITEM function is on one of the supported similarity-check functions.
-        AbstractFunctionCallExpression getItemFuncExpr = (AbstractFunctionCallExpression) getItemExprRef.getValue();
-        Mutable<ILogicalExpression> argRef = getItemFuncExpr.getArguments().get(0);
-        AbstractFunctionCallExpression simFuncExpr = (AbstractFunctionCallExpression) argRef.getValue();
-        if (!simFuncs.contains(simFuncExpr.getFunctionIdentifier())) {
-            return false;
-        }
-        // Skip this rule based on annotations.
-        if (simFuncExpr.getAnnotations().containsKey(IndexedNLJoinExpressionAnnotation.INSTANCE)) {
-            return false;
-        }
-
-        List<Mutable<ILogicalOperator>> inputOps = joinOp.getInputs();
-        ILogicalOperator leftInputOp = inputOps.get(0).getValue();
-        ILogicalOperator rightInputOp = inputOps.get(1).getValue();
-
-        List<Mutable<ILogicalExpression>> inputExps = simFuncExpr.getArguments();
-
-        ILogicalExpression inputExp0 = inputExps.get(0).getValue();
-        ILogicalExpression inputExp1 = inputExps.get(1).getValue();
-
-        // left and right expressions are variables
-        if (inputExp0.getExpressionTag() != LogicalExpressionTag.VARIABLE
-                || inputExp1.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
-            return false;
-        }
-
-        LogicalVariable inputVar0 = ((VariableReferenceExpression) inputExp0).getVariableReference();
-        LogicalVariable inputVar1 = ((VariableReferenceExpression) inputExp1).getVariableReference();
-
-        LogicalVariable leftInputVar;
-        LogicalVariable rightInputVar;
-
-        liveVars.clear();
-        VariableUtilities.getLiveVariables(leftInputOp, liveVars);
-        if (liveVars.contains(inputVar0)) {
-            leftInputVar = inputVar0;
-            rightInputVar = inputVar1;
-        } else {
-            leftInputVar = inputVar1;
-            rightInputVar = inputVar0;
-        }
-
-        List<LogicalVariable> leftInputPKs = context.findPrimaryKey(leftInputVar);
-        List<LogicalVariable> rightInputPKs = context.findPrimaryKey(rightInputVar);
-        // Bail if primary keys could not be inferred.
-        if (leftInputPKs == null || rightInputPKs == null) {
-            return false;
-        }
-        // primary key has only one variable
-        if (leftInputPKs.size() != 1 || rightInputPKs.size() != 1) {
-            return false;
-        }
-        IAType leftType = (IAType) context.getOutputTypeEnvironment(leftInputOp).getVarType(leftInputVar);
-        IAType rightType = (IAType) context.getOutputTypeEnvironment(rightInputOp).getVarType(rightInputVar);
-        // left-hand side and right-hand side of "~=" has the same type
-        IAType left2 = TypeHelper.getNonOptionalType(leftType);
-        IAType right2 = TypeHelper.getNonOptionalType(rightType);
-        if (!left2.deepEqual(right2)) {
-            return false;
-        }
-        //
-        // -- - FIRE - --
-        //
-        AqlMetadataProvider metadataProvider = ((AqlMetadataProvider) context.getMetadataProvider());
-        FunctionIdentifier funcId = FuzzyUtils.getTokenizer(leftType.getTypeTag());
-        String tokenizer;
-        if (funcId == null) {
-            tokenizer = "";
-        } else {
-            tokenizer = funcId.getName();
-        }
-
-        float simThreshold = FuzzyUtils.getSimThreshold(metadataProvider);
-        String simFunction = FuzzyUtils.getSimFunction(metadataProvider);
-
-        // finalize AQL+ query
-        String prepareJoin;
-        switch (joinOp.getJoinKind()) {
-            case INNER: {
-                prepareJoin = "join" + AQLPLUS;
-                break;
-            }
-            case LEFT_OUTER: {
-                // TODO To make it work for Left Outer Joins, we should permute
-                // the #LEFT and #RIGHT at the top of the AQL+ query. But, when
-                // doing this, the
-                // fuzzyjoin/user-vis-int-vis-user-lot-aqlplus_1.aql (the one
-                // doing 3-way fuzzy joins) gives a different result. But even
-                // if we don't change the FuzzyJoinRule, permuting the for
-                // clauses in fuzzyjoin/user-vis-int-vis-user-lot-aqlplus_1.aql
-                // leads to different results, which suggests there is some
-                // other sort of bug.
-                return false;
-                // prepareJoin = "loj" + AQLPLUS;
-                // break;
-            }
-            default: {
-                throw new IllegalStateException();
-            }
-        }
-        String aqlPlus = String.format(Locale.US, prepareJoin, tokenizer, tokenizer, simFunction, simThreshold,
-                tokenizer, tokenizer, simFunction, simThreshold, simFunction, simThreshold, simThreshold);
-
-        LogicalVariable leftPKVar = leftInputPKs.get(0);
-        LogicalVariable rightPKVar = rightInputPKs.get(0);
-
-        Counter counter = new Counter(context.getVarCounter());
-
-        AQLPlusParser parser = new AQLPlusParser(new StringReader(aqlPlus));
-        parser.initScope();
-        parser.setVarCounter(counter);
-        List<Clause> clauses;
-        try {
-            clauses = parser.Clauses();
-        } catch (ParseException e) {
-            throw new AlgebricksException(e);
-        }
-        // The translator will compile metadata internally. Run this compilation
-        // under the same transaction id as the "outer" compilation.
-        AqlPlusExpressionToPlanTranslator translator = new AqlPlusExpressionToPlanTranslator(
-                metadataProvider.getJobId(), metadataProvider, counter, null, null);
-
-        LogicalOperatorDeepCopyVisitor deepCopyVisitor = new LogicalOperatorDeepCopyVisitor(counter);
-
-        translator.addOperatorToMetaScope(new Identifier("#LEFT"), leftInputOp);
-        translator.addVariableToMetaScope(new Identifier("$$LEFT"), leftInputVar);
-        translator.addVariableToMetaScope(new Identifier("$$LEFTPK"), leftPKVar);
-
-        translator.addOperatorToMetaScope(new Identifier("#RIGHT"), rightInputOp);
-        translator.addVariableToMetaScope(new Identifier("$$RIGHT"), rightInputVar);
-        translator.addVariableToMetaScope(new Identifier("$$RIGHTPK"), rightPKVar);
-
-        translator.addOperatorToMetaScope(new Identifier("#LEFT_1"), deepCopyVisitor.deepCopy(leftInputOp, null));
-        translator.addVariableToMetaScope(new Identifier("$$LEFT_1"), deepCopyVisitor.varCopy(leftInputVar));
-        translator.addVariableToMetaScope(new Identifier("$$LEFTPK_1"), deepCopyVisitor.varCopy(leftPKVar));
-        deepCopyVisitor.updatePrimaryKeys(context);
-        deepCopyVisitor.reset();
-
-        // translator.addOperatorToMetaScope(new Identifier("#LEFT_2"),
-        // deepCopyVisitor.deepCopy(leftInputOp, null));
-        // translator.addVariableToMetaScope(new Identifier("$$LEFT_2"),
-        // deepCopyVisitor.varCopy(leftInputVar));
-        // translator.addVariableToMetaScope(new Identifier("$$LEFTPK_2"),
-        // deepCopyVisitor.varCopy(leftPKVar));
-        // deepCopyVisitor.updatePrimaryKeys(context);
-        // deepCopyVisitor.reset();
-        //
-        // translator.addOperatorToMetaScope(new Identifier("#LEFT_3"),
-        // deepCopyVisitor.deepCopy(leftInputOp, null));
-        // translator.addVariableToMetaScope(new Identifier("$$LEFT_3"),
-        // deepCopyVisitor.varCopy(leftInputVar));
-        // translator.addVariableToMetaScope(new Identifier("$$LEFTPK_3"),
-        // deepCopyVisitor.varCopy(leftPKVar));
-        // deepCopyVisitor.updatePrimaryKeys(context);
-        // deepCopyVisitor.reset();
-
-        translator.addOperatorToMetaScope(new Identifier("#RIGHT_1"), deepCopyVisitor.deepCopy(rightInputOp, null));
-        translator.addVariableToMetaScope(new Identifier("$$RIGHT_1"), deepCopyVisitor.varCopy(rightInputVar));
-        translator.addVariableToMetaScope(new Identifier("$$RIGHTPK_1"), deepCopyVisitor.varCopy(rightPKVar));
-        deepCopyVisitor.updatePrimaryKeys(context);
-        deepCopyVisitor.reset();
-
-        // TODO pick side to run Stage 1, currently always picks RIGHT side
-        translator.addOperatorToMetaScope(new Identifier("#RIGHT_2"), deepCopyVisitor.deepCopy(rightInputOp, null));
-        translator.addVariableToMetaScope(new Identifier("$$RIGHT_2"), deepCopyVisitor.varCopy(rightInputVar));
-        translator.addVariableToMetaScope(new Identifier("$$RIGHTPK_2"), deepCopyVisitor.varCopy(rightPKVar));
-        deepCopyVisitor.updatePrimaryKeys(context);
-        deepCopyVisitor.reset();
-
-        translator.addOperatorToMetaScope(new Identifier("#RIGHT_3"), deepCopyVisitor.deepCopy(rightInputOp, null));
-        translator.addVariableToMetaScope(new Identifier("$$RIGHT_3"), deepCopyVisitor.varCopy(rightInputVar));
-        translator.addVariableToMetaScope(new Identifier("$$RIGHTPK_3"), deepCopyVisitor.varCopy(rightPKVar));
-        deepCopyVisitor.updatePrimaryKeys(context);
-        deepCopyVisitor.reset();
-
-        ILogicalPlan plan;
-        try {
-            plan = translator.translate(clauses);
-        } catch (AsterixException e) {
-            throw new AlgebricksException(e);
-        }
-        context.setVarCounter(counter.get());
-
-        ILogicalOperator outputOp = plan.getRoots().get(0).getValue();
-
-        SelectOperator extraSelect = null;
-        if (getItemExprRef != expRef) {
-            // more than one join condition
-            getItemExprRef.setValue(ConstantExpression.TRUE);
-            switch (joinOp.getJoinKind()) {
-                case INNER: {
-                    extraSelect = new SelectOperator(expRef, false, null);
-                    extraSelect.getInputs().add(new MutableObject<ILogicalOperator>(outputOp));
-                    outputOp = extraSelect;
-                    break;
-                }
-                case LEFT_OUTER: {
-                    if (((AbstractLogicalOperator) outputOp).getOperatorTag() != LogicalOperatorTag.LEFTOUTERJOIN) {
-                        throw new IllegalStateException();
-                    }
-                    LeftOuterJoinOperator topJoin = (LeftOuterJoinOperator) outputOp;
-                    topJoin.getCondition().setValue(expRef.getValue());
-                    break;
-                }
-                default: {
-                    throw new IllegalStateException();
-                }
-            }
-        }
-        opRef.setValue(outputOp);
-        OperatorPropertiesUtil.typeOpRec(opRef, context);
-        return true;
-    }
-
-    /**
-     * Look for GET_ITEM function call.
-     */
-    private Mutable<ILogicalExpression> getSimilarityExpression(Mutable<ILogicalExpression> expRef) {
-        ILogicalExpression exp = expRef.getValue();
-        if (exp.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-            AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) exp;
-            if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.GET_ITEM)) {
-                return expRef;
-            }
-            if (funcExpr.getFunctionIdentifier().equals(AlgebricksBuiltinFunctions.AND)) {
-                for (int i = 0; i < 2; i++) {
-                    Mutable<ILogicalExpression> expRefRet = getSimilarityExpression(funcExpr.getArguments().get(i));
-                    if (expRefRet != null) {
-                        return expRefRet;
-                    }
-                }
-            }
-        }
-        return null;
-    }
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IfElseToSwitchCaseFunctionRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IfElseToSwitchCaseFunctionRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IfElseToSwitchCaseFunctionRule.java
deleted file mode 100644
index cd18517..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IfElseToSwitchCaseFunctionRule.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class IfElseToSwitchCaseFunctionRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
-        if (op1.getOperatorTag() != LogicalOperatorTag.ASSIGN)
-            return false;
-
-        AssignOperator assignOp = (AssignOperator) op1;
-        List<Mutable<ILogicalExpression>> assignExprs = assignOp.getExpressions();
-        if (assignExprs.size() > 1)
-            return false;
-        ILogicalExpression expr = assignExprs.get(0).getValue();
-        if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-            AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
-            if (!funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.CONCAT_NON_NULL))
-                return false;
-        }
-
-        AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
-        if (op2.getOperatorTag() != LogicalOperatorTag.SUBPLAN)
-            return false;
-
-        SubplanOperator subplan = (SubplanOperator) op2;
-        List<ILogicalPlan> subPlans = subplan.getNestedPlans();
-        List<Mutable<ILogicalExpression>> arguments = new ArrayList<Mutable<ILogicalExpression>>();
-        for (ILogicalPlan plan : subPlans) {
-            List<Mutable<ILogicalOperator>> roots = plan.getRoots();
-
-            AbstractLogicalOperator nestedRoot = (AbstractLogicalOperator) roots.get(0).getValue();
-            if (nestedRoot.getOperatorTag() != LogicalOperatorTag.SELECT)
-                return false;
-            SelectOperator selectOp = (SelectOperator) nestedRoot;
-
-            AbstractLogicalOperator nestedNextOp = (AbstractLogicalOperator) nestedRoot.getInputs().get(0).getValue();
-            if (nestedNextOp.getOperatorTag() != LogicalOperatorTag.ASSIGN)
-                return false;
-            AssignOperator assignRoot = (AssignOperator) nestedNextOp;
-            Mutable<ILogicalExpression> actionExprRef = assignRoot.getExpressions().get(0);
-
-            arguments.add(selectOp.getCondition());
-            arguments.add(actionExprRef);
-            AbstractLogicalOperator nestedBottomOp = (AbstractLogicalOperator) assignRoot.getInputs().get(0).getValue();
-
-            if (nestedBottomOp.getOperatorTag() != LogicalOperatorTag.NESTEDTUPLESOURCE)
-                return false;
-        }
-
-        AbstractLogicalOperator op3 = (AbstractLogicalOperator) op2.getInputs().get(0).getValue();
-        if (op3.getOperatorTag() != LogicalOperatorTag.ASSIGN)
-            return false;
-
-        AssignOperator bottomAssign = (AssignOperator) op3;
-        LogicalVariable conditionVar = bottomAssign.getVariables().get(0);
-        Mutable<ILogicalExpression> switchCondition = new MutableObject<ILogicalExpression>(
-                new VariableReferenceExpression(conditionVar));
-        List<Mutable<ILogicalExpression>> argumentRefs = new ArrayList<Mutable<ILogicalExpression>>();
-        argumentRefs.add(switchCondition);
-        argumentRefs.addAll(arguments);
-
-        /** replace the branch conditions */
-        for (int i = 0; i < arguments.size(); i += 2) {
-            if (arguments.get(i).getValue().equals(switchCondition.getValue())) {
-                arguments.get(i).setValue(ConstantExpression.TRUE);
-            } else {
-                arguments.get(i).setValue(ConstantExpression.FALSE);
-            }
-        }
-
-        ILogicalExpression callExpr = new ScalarFunctionCallExpression(
-                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.SWITCH_CASE), argumentRefs);
-
-        assignOp.getInputs().get(0).setValue(op3);
-        assignOp.getExpressions().get(0).setValue(callExpr);
-        context.computeAndSetTypeEnvironmentForOperator(assignOp);
-        return true;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/InlineUnnestFunctionRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/InlineUnnestFunctionRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/InlineUnnestFunctionRule.java
deleted file mode 100644
index 418e114..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/InlineUnnestFunctionRule.java
+++ /dev/null
@@ -1,174 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-/**
- * This rule is to inline unnest functions that are hold by variables.
- * This rule is to fix issue 201.
- */
-public class InlineUnnestFunctionRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
-        if (context.checkIfInDontApplySet(this, op1))
-            return false;
-        context.addToDontApplySet(this, op1);
-        if (op1.getOperatorTag() != LogicalOperatorTag.UNNEST)
-            return false;
-        UnnestOperator unnestOperator = (UnnestOperator) op1;
-        AbstractFunctionCallExpression expr = (AbstractFunctionCallExpression) unnestOperator.getExpressionRef()
-                .getValue();
-        //we only inline for the scan-collection function
-        if (expr.getFunctionIdentifier() != AsterixBuiltinFunctions.SCAN_COLLECTION)
-            return false;
-
-        // inline all variables from an unnesting function call
-        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
-        List<Mutable<ILogicalExpression>> args = funcExpr.getArguments();
-        for (int i = 0; i < args.size(); i++) {
-            ILogicalExpression argExpr = args.get(i).getValue();
-            if (argExpr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-                VariableReferenceExpression varExpr = (VariableReferenceExpression) argExpr;
-                inlineVariable(varExpr.getVariableReference(), unnestOperator);
-            }
-        }
-        return true;
-    }
-
-    /**
-     * This method is to inline one variable
-     * 
-     * @param usedVar
-     *            A variable that is used by the scan-collection function in the unnest operator
-     * @param unnestOp
-     *            The unnest operator.
-     * @throws AlgebricksException
-     */
-    private void inlineVariable(LogicalVariable usedVar, UnnestOperator unnestOp) throws AlgebricksException {
-        AbstractFunctionCallExpression expr = (AbstractFunctionCallExpression) unnestOp.getExpressionRef().getValue();
-        List<Pair<AbstractFunctionCallExpression, Integer>> parentAndIndexList = new ArrayList<Pair<AbstractFunctionCallExpression, Integer>>();
-        getParentFunctionExpression(usedVar, expr, parentAndIndexList);
-        ILogicalExpression usedVarOrginExpr = findUsedVarOrigin(usedVar, unnestOp, (AbstractLogicalOperator) unnestOp
-                .getInputs().get(0).getValue());
-        if (usedVarOrginExpr != null) {
-            for (Pair<AbstractFunctionCallExpression, Integer> parentAndIndex : parentAndIndexList) {
-                //we only rewrite the top scan-collection function
-                if (parentAndIndex.first.getFunctionIdentifier() == AsterixBuiltinFunctions.SCAN_COLLECTION
-                        && parentAndIndex.first == expr) {
-                    unnestOp.getExpressionRef().setValue(usedVarOrginExpr);
-                }
-            }
-        }
-    }
-
-    private void getParentFunctionExpression(LogicalVariable usedVar, ILogicalExpression expr,
-            List<Pair<AbstractFunctionCallExpression, Integer>> parentAndIndexList) {
-        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
-        List<Mutable<ILogicalExpression>> args = funcExpr.getArguments();
-        for (int i = 0; i < args.size(); i++) {
-            ILogicalExpression argExpr = args.get(i).getValue();
-            if (argExpr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-                VariableReferenceExpression varExpr = (VariableReferenceExpression) argExpr;
-                if (varExpr.getVariableReference().equals(usedVar))
-                    parentAndIndexList.add(new Pair<AbstractFunctionCallExpression, Integer>(funcExpr, i));
-            }
-            if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                getParentFunctionExpression(usedVar, argExpr, parentAndIndexList);
-            }
-        }
-    }
-
-    private ILogicalExpression findUsedVarOrigin(LogicalVariable usedVar, AbstractLogicalOperator parentOp,
-            AbstractLogicalOperator currentOp) throws AlgebricksException {
-        ILogicalExpression ret = null;
-        if (currentOp.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
-            List<LogicalVariable> producedVars = new ArrayList<LogicalVariable>();
-            VariableUtilities.getProducedVariables(currentOp, producedVars);
-            if (producedVars.contains(usedVar)) {
-                AssignOperator assignOp = (AssignOperator) currentOp;
-                int index = assignOp.getVariables().indexOf(usedVar);
-                ILogicalExpression returnedExpr = assignOp.getExpressions().get(index).getValue();
-                if (returnedExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                    AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) returnedExpr;
-                    if (AsterixBuiltinFunctions.isBuiltinUnnestingFunction(funcExpr.getFunctionIdentifier())) {
-                        // we only inline for unnest functions
-                        removeUnecessaryAssign(parentOp, currentOp, assignOp, index);
-                        ret = returnedExpr;
-                    }
-                } else if (returnedExpr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-                    //recusively inline
-                    VariableReferenceExpression varExpr = (VariableReferenceExpression) returnedExpr;
-                    LogicalVariable var = varExpr.getVariableReference();
-                    ILogicalExpression finalExpr = findUsedVarOrigin(var, currentOp,
-                            (AbstractLogicalOperator) currentOp.getInputs().get(0).getValue());
-                    if (finalExpr != null) {
-                        removeUnecessaryAssign(parentOp, currentOp, assignOp, index);
-                        ret = finalExpr;
-                    }
-                }
-            }
-        } else {
-            for (Mutable<ILogicalOperator> child : currentOp.getInputs()) {
-                ILogicalExpression expr = findUsedVarOrigin(usedVar, currentOp,
-                        (AbstractLogicalOperator) child.getValue());
-                if (expr != null) {
-                    ret = expr;
-                }
-            }
-        }
-        return ret;
-    }
-
-    private void removeUnecessaryAssign(AbstractLogicalOperator parentOp, AbstractLogicalOperator currentOp,
-            AssignOperator assignOp, int index) {
-        assignOp.getVariables().remove(index);
-        assignOp.getExpressions().remove(index);
-        if (assignOp.getVariables().size() == 0) {
-            int opIndex = parentOp.getInputs().indexOf(new MutableObject<ILogicalOperator>(currentOp));
-            parentOp.getInputs().get(opIndex).setValue(assignOp.getInputs().get(0).getValue());
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java
deleted file mode 100644
index b64c0e7..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java
+++ /dev/null
@@ -1,168 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
-import edu.uci.ics.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
-import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
-import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator.Kind;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class IntroduceAutogenerateIDRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-
-        // match: [insert to internal dataset with autogenerated id] - assign - project
-        // produce: insert - assign - assign* - project
-        // **
-        // OR [insert to internal dataset with autogenerated id] - assign - [datasource scan]
-        // produce insert - assign - assign* - datasource scan
-
-        AbstractLogicalOperator currentOp = (AbstractLogicalOperator) opRef.getValue();
-        if (currentOp.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE) {
-            return false;
-        }
-
-        InsertDeleteOperator insertOp = (InsertDeleteOperator) currentOp;
-        if (insertOp.getOperation() != Kind.INSERT) {
-            return false;
-        }
-
-        DatasetDataSource dds = (DatasetDataSource) insertOp.getDataSource();
-        boolean autogenerated = ((InternalDatasetDetails) dds.getDataset().getDatasetDetails()).isAutogenerated();
-        if (!autogenerated) {
-            return false;
-        }
-
-        if (((AqlDataSource) insertOp.getDataSource()).getDatasourceType() != AqlDataSourceType.INTERNAL_DATASET) {
-            return false;
-        }
-
-        AbstractLogicalOperator parentOp = (AbstractLogicalOperator) currentOp.getInputs().get(0).getValue();
-        if (parentOp.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
-            return false;
-        }
-        AssignOperator assignOp = (AssignOperator) parentOp;
-        LogicalVariable inputRecord;
-
-        //bug here. will not work for internal datasets with filters since the pattern becomes [project-assign-assign-insert] <-this should be fixed->
-        AbstractLogicalOperator grandparentOp = (AbstractLogicalOperator) parentOp.getInputs().get(0).getValue();
-        if (grandparentOp.getOperatorTag() == LogicalOperatorTag.PROJECT) {
-            ProjectOperator projectOp = (ProjectOperator) grandparentOp;
-            inputRecord = projectOp.getVariables().get(0);
-        } else if (grandparentOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
-            DataSourceScanOperator dssOp = (DataSourceScanOperator) grandparentOp;
-            inputRecord = dssOp.getVariables().get(0);
-        } else {
-            return false;
-        }
-
-        List<String> pkFieldName = ((InternalDatasetDetails) dds.getDataset().getDatasetDetails()).getPrimaryKey().get(
-                0);
-        ILogicalExpression rec0 = new VariableReferenceExpression(inputRecord);
-        ILogicalExpression rec1 = createPrimaryKeyRecordExpression(pkFieldName);
-        ILogicalExpression mergedRec = createRecordMergeFunction(rec0, rec1);
-        ILogicalExpression nonNullMergedRec = createNotNullFunction(mergedRec);
-
-        LogicalVariable v = context.newVar();
-        AssignOperator newAssign = new AssignOperator(v, new MutableObject<ILogicalExpression>(nonNullMergedRec));
-        newAssign.getInputs().add(new MutableObject<ILogicalOperator>(grandparentOp));
-        assignOp.getInputs().set(0, new MutableObject<ILogicalOperator>(newAssign));
-        VariableUtilities.substituteVariables(assignOp, inputRecord, v, context);
-        VariableUtilities.substituteVariables(insertOp, inputRecord, v, context);
-        context.computeAndSetTypeEnvironmentForOperator(newAssign);
-        context.computeAndSetTypeEnvironmentForOperator(assignOp);
-        context.computeAndSetTypeEnvironmentForOperator(insertOp);
-        return true;
-    }
-
-    private ILogicalExpression createNotNullFunction(ILogicalExpression mergedRec) {
-        List<Mutable<ILogicalExpression>> args = new ArrayList<>();
-        args.add(new MutableObject<ILogicalExpression>(mergedRec));
-        AbstractFunctionCallExpression notNullFn = new ScalarFunctionCallExpression(
-                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.NOT_NULL), args);
-        return notNullFn;
-    }
-
-    private AbstractFunctionCallExpression createPrimaryKeyRecordExpression(List<String> pkFieldName) {
-        //Create lowest level of nested uuid
-        AbstractFunctionCallExpression uuidFn = new ScalarFunctionCallExpression(
-                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.CREATE_UUID));
-        List<Mutable<ILogicalExpression>> openRecordConsArgs = new ArrayList<>();
-        Mutable<ILogicalExpression> pkFieldNameExpression = new MutableObject<ILogicalExpression>(
-                new ConstantExpression(new AsterixConstantValue(new AString(pkFieldName.get(pkFieldName.size() - 1)))));
-        openRecordConsArgs.add(pkFieldNameExpression);
-        Mutable<ILogicalExpression> pkFieldValueExpression = new MutableObject<ILogicalExpression>(uuidFn);
-        openRecordConsArgs.add(pkFieldValueExpression);
-        AbstractFunctionCallExpression openRecFn = new ScalarFunctionCallExpression(
-                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR), openRecordConsArgs);
-
-        //Create higher levels
-        for (int i = pkFieldName.size() - 2; i > -1; i--) {
-            AString fieldName = new AString(pkFieldName.get(i));
-            openRecordConsArgs = new ArrayList<>();
-            openRecordConsArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(
-                    new AsterixConstantValue(fieldName))));
-            openRecordConsArgs.add(new MutableObject<ILogicalExpression>(openRecFn));
-            openRecFn = new ScalarFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR), openRecordConsArgs);
-        }
-
-        return openRecFn;
-    }
-
-    private AbstractFunctionCallExpression createRecordMergeFunction(ILogicalExpression rec0, ILogicalExpression rec1) {
-        List<Mutable<ILogicalExpression>> recordMergeFnArgs = new ArrayList<>();
-        recordMergeFnArgs.add(new MutableObject<>(rec0));
-        recordMergeFnArgs.add(new MutableObject<>(rec1));
-        AbstractFunctionCallExpression recordMergeFn = new ScalarFunctionCallExpression(
-                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.RECORD_MERGE), recordMergeFnArgs);
-        return recordMergeFn;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceDynamicTypeCastForExternalFunctionRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceDynamicTypeCastForExternalFunctionRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceDynamicTypeCastForExternalFunctionRule.java
deleted file mode 100644
index 545a336..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceDynamicTypeCastForExternalFunctionRule.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.metadata.functions.AsterixExternalScalarFunctionInfo;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.AUnionType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-/**
- * This rule provides the same type-casting handling as the IntroduceDynamicTypeCastRule does.
- * The only difference is that this rule is intended for external functions (User-Defined Functions).
- * Refer to IntroduceDynamicTypeCastRule for the detail.
- */
-public class IntroduceDynamicTypeCastForExternalFunctionRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        /**
-         * pattern match: distribute_result - project - assign (external function call) - assign (open_record_constructor)
-         * resulting plan: distribute_result - project - assign (external function call) - assign (cast-record) - assign(open_record_constructor)
-         */
-        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
-        if (op1.getOperatorTag() != LogicalOperatorTag.DISTRIBUTE_RESULT)
-            return false;
-        AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
-        if (op2.getOperatorTag() != LogicalOperatorTag.PROJECT)
-            return false;
-        AbstractLogicalOperator op3 = (AbstractLogicalOperator) op2.getInputs().get(0).getValue();
-        if (op3.getOperatorTag() != LogicalOperatorTag.ASSIGN)
-            return false;
-        AbstractLogicalOperator op4 = (AbstractLogicalOperator) op3.getInputs().get(0).getValue();
-        if (op4.getOperatorTag() != LogicalOperatorTag.ASSIGN)
-            return false;
-
-        // Op1 : assign (external function call), Op2 : assign (open_record_constructor)
-        AssignOperator assignOp1 = (AssignOperator) op3;
-        AssignOperator assignOp2 = (AssignOperator) op4;
-
-        // Checks whether open-record-constructor is called to create a record in the first assign operator - assignOp2
-        FunctionIdentifier fid = null;
-        ILogicalExpression assignExpr = assignOp2.getExpressions().get(0).getValue();
-        if (assignExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-            ScalarFunctionCallExpression funcExpr = (ScalarFunctionCallExpression) assignOp2.getExpressions().get(0)
-                    .getValue();
-            fid = funcExpr.getFunctionIdentifier();
-
-            if (fid != AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR) {
-                return false;
-            }
-        } else {
-            return false;
-        }
-
-        // Checks whether an external function is called in the second assign operator - assignOp1
-        assignExpr = assignOp1.getExpressions().get(0).getValue();
-        ScalarFunctionCallExpression funcExpr = null;
-        if (assignExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-            funcExpr = (ScalarFunctionCallExpression) assignOp1.getExpressions().get(0).getValue();
-            fid = funcExpr.getFunctionIdentifier();
-
-            // Checks whether this is an internal function call. Then, we return false.
-            if (AsterixBuiltinFunctions.getBuiltinFunctionIdentifier(fid) != null) {
-                return false;
-            }
-
-        } else {
-            return false;
-        }
-
-        AsterixExternalScalarFunctionInfo finfo = (AsterixExternalScalarFunctionInfo) funcExpr.getFunctionInfo();
-        ARecordType requiredRecordType = (ARecordType) finfo.getArgumenTypes().get(0);
-
-        List<LogicalVariable> recordVar = new ArrayList<LogicalVariable>();
-        recordVar.addAll(assignOp2.getVariables());
-
-        IVariableTypeEnvironment env = assignOp2.computeOutputTypeEnvironment(context);
-        IAType inputRecordType = (IAType) env.getVarType(recordVar.get(0));
-
-        /** the input record type can be an union type -- for the case when it comes from a subplan or left-outer join */
-        boolean checkNull = false;
-        while (NonTaggedFormatUtil.isOptional(inputRecordType)) {
-            /** while-loop for the case there is a nested multi-level union */
-            inputRecordType = ((AUnionType) inputRecordType).getNullableType();
-            checkNull = true;
-        }
-
-        /** see whether the input record type needs to be casted */
-        boolean cast = !IntroduceDynamicTypeCastRule.compatible(requiredRecordType, inputRecordType);
-
-        if (checkNull) {
-            recordVar.set(0, IntroduceDynamicTypeCastRule.addWrapperFunction(requiredRecordType, recordVar.get(0),
-                    assignOp1, context, AsterixBuiltinFunctions.NOT_NULL));
-        }
-        if (cast) {
-            IntroduceDynamicTypeCastRule.addWrapperFunction(requiredRecordType, recordVar.get(0), assignOp1, context,
-                    AsterixBuiltinFunctions.CAST_RECORD);
-        }
-        return cast || checkNull;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
deleted file mode 100644
index 64c34ed..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
+++ /dev/null
@@ -1,288 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.AUnionType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-/**
- * Dynamically cast a variable from its type to a specified required type, in a
- * recursive way. It enables: 1. bag-based fields in a record, 2. bidirectional
- * cast of a open field and a matched closed field, and 3. put in null fields
- * when necessary.
- * Here is an example: A record { "hobby": {{"music", "coding"}}, "id": "001",
- * "name": "Person Three"} which conforms to closed type ( id: string, name:
- * string, hobby: {{string}}? ) can be cast to an open type (id: string ), or
- * vice versa.
- * However, if the input record is a variable, then we don't know its exact
- * field layout at compile time. For example, records conforming to the same
- * type can have different field orderings and different open parts. That's why
- * we need dynamic type casting.
- * Note that as we can see in the example, the ordering of fields of a record is
- * not required. Since the open/closed part of a record has completely different
- * underlying memory/storage layout, a cast-record function will change the
- * layout as specified at runtime.
- * Implementation wise, this rule checks the target dataset type and the input
- * record type, and if the types are different, then it plugs in an assign with
- * a cast-record function, and projects away the original (uncast) field.
- */
-public class IntroduceDynamicTypeCastRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        // Depending on the operator type, we need to extract the following pieces of information.
-        AbstractLogicalOperator op;
-        ARecordType requiredRecordType;
-        LogicalVariable recordVar;
-
-        // We identify INSERT and DISTRIBUTE_RESULT operators.
-        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
-        switch (op1.getOperatorTag()) {
-            case SINK: {
-                /**
-                 * pattern match: sink insert assign
-                 * resulting plan: sink-insert-project-assign
-                 */
-
-                AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
-                if (op2.getOperatorTag() == LogicalOperatorTag.INSERT_DELETE) {
-                    InsertDeleteOperator insertDeleteOp = (InsertDeleteOperator) op2;
-                    if (insertDeleteOp.getOperation() == InsertDeleteOperator.Kind.DELETE)
-                        return false;
-
-                    // Remember this is the operator we need to modify
-                    op = insertDeleteOp;
-
-                    // Derive the required ARecordType based on the schema of the AqlDataSource
-                    InsertDeleteOperator insertDeleteOperator = (InsertDeleteOperator) op2;
-                    AqlDataSource dataSource = (AqlDataSource) insertDeleteOperator.getDataSource();
-                    IAType[] schemaTypes = (IAType[]) dataSource.getSchemaTypes();
-                    requiredRecordType = (ARecordType) schemaTypes[schemaTypes.length - 1];
-
-                    // Derive the Variable which we will potentially wrap with cast/null functions
-                    ILogicalExpression expr = insertDeleteOperator.getPayloadExpression().getValue();
-                    List<LogicalVariable> payloadVars = new ArrayList<LogicalVariable>();
-                    expr.getUsedVariables(payloadVars);
-                    recordVar = payloadVars.get(0);
-                } else {
-                    return false;
-                }
-
-                break;
-            }
-            case DISTRIBUTE_RESULT: {
-                // First, see if there was an output-record-type specified
-                requiredRecordType = (ARecordType) op1.getAnnotations().get("output-record-type");
-                if (requiredRecordType == null) {
-                    return false;
-                }
-
-                // Remember this is the operator we need to modify
-                op = op1;
-
-                // The Variable we want is the (hopefully singular, hopefully record-typed) live variable
-                // of the singular input operator of the DISTRIBUTE_RESULT
-                if (op.getInputs().size() > 1) {
-                    // Hopefully not possible?
-                    throw new AlgebricksException(
-                            "output-record-type defined for expression with multiple input operators");
-                }
-                AbstractLogicalOperator input = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
-                List<LogicalVariable> liveVars = new ArrayList<LogicalVariable>();
-                VariableUtilities.getLiveVariables(input, liveVars);
-                if (liveVars.size() > 1) {
-                    throw new AlgebricksException(
-                            "Expression with multiple fields cannot be cast to output-record-type!");
-                }
-                recordVar = liveVars.get(0);
-
-                break;
-            }
-            default: {
-                return false;
-            }
-        }
-
-        // Derive the statically-computed type of the record
-        IVariableTypeEnvironment env = op.computeOutputTypeEnvironment(context);
-        IAType inputRecordType = (IAType) env.getVarType(recordVar);
-
-        /** the input record type can be an union type -- for the case when it comes from a subplan or left-outer join */
-        boolean checkNull = false;
-        while (NonTaggedFormatUtil.isOptional(inputRecordType)) {
-            /** while-loop for the case there is a nested multi-level union */
-            inputRecordType = ((AUnionType) inputRecordType).getNullableType();
-            checkNull = true;
-        }
-
-        /** see whether the input record type needs to be casted */
-        boolean cast = !compatible(requiredRecordType, inputRecordType);
-
-        if (checkNull) {
-            recordVar = addWrapperFunction(requiredRecordType, recordVar, op, context, AsterixBuiltinFunctions.NOT_NULL);
-        }
-        if (cast) {
-            addWrapperFunction(requiredRecordType, recordVar, op, context, AsterixBuiltinFunctions.CAST_RECORD);
-        }
-        return cast || checkNull;
-    }
-
-    /**
-     * Inject a function to wrap a variable when necessary
-     *
-     * @param requiredRecordType
-     *            the required record type
-     * @param recordVar
-     *            the record variable
-     * @param parent
-     *            the current parent operator to be rewritten
-     * @param context
-     *            the optimization context
-     * @param fd
-     *            the function to be injected
-     * @return true if cast is injected; false otherwise.
-     * @throws AlgebricksException
-     */
-    public static LogicalVariable addWrapperFunction(ARecordType requiredRecordType, LogicalVariable recordVar,
-            ILogicalOperator parent, IOptimizationContext context, FunctionIdentifier fd) throws AlgebricksException {
-        List<Mutable<ILogicalOperator>> opRefs = parent.getInputs();
-        for (int index = 0; index < opRefs.size(); index++) {
-            Mutable<ILogicalOperator> opRef = opRefs.get(index);
-            ILogicalOperator op = opRef.getValue();
-
-            /** get produced vars */
-            List<LogicalVariable> producedVars = new ArrayList<LogicalVariable>();
-            VariableUtilities.getProducedVariables(op, producedVars);
-            IVariableTypeEnvironment env = op.computeOutputTypeEnvironment(context);
-            for (int i = 0; i < producedVars.size(); i++) {
-                LogicalVariable var = producedVars.get(i);
-                if (var.equals(recordVar)) {
-                    /** insert an assign operator to call the function on-top-of the variable */
-                    IAType actualType = (IAType) env.getVarType(var);
-                    AbstractFunctionCallExpression cast = new ScalarFunctionCallExpression(
-                            FunctionUtils.getFunctionInfo(fd));
-                    cast.getArguments()
-                            .add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(var)));
-                    /** enforce the required record type */
-                    TypeComputerUtilities.setRequiredAndInputTypes(cast, requiredRecordType, actualType);
-                    LogicalVariable newAssignVar = context.newVar();
-                    AssignOperator newAssignOperator = new AssignOperator(newAssignVar,
-                            new MutableObject<ILogicalExpression>(cast));
-                    newAssignOperator.getInputs().add(new MutableObject<ILogicalOperator>(op));
-                    opRef.setValue(newAssignOperator);
-                    context.computeAndSetTypeEnvironmentForOperator(newAssignOperator);
-                    newAssignOperator.computeOutputTypeEnvironment(context);
-                    VariableUtilities.substituteVariables(parent, recordVar, newAssignVar, context);
-                    return newAssignVar;
-                }
-            }
-            /** recursive descend to the operator who produced the recordVar */
-            LogicalVariable replacedVar = addWrapperFunction(requiredRecordType, recordVar, op, context, fd);
-            if (replacedVar != null) {
-                /** substitute the recordVar by the replacedVar for operators who uses recordVar */
-                VariableUtilities.substituteVariables(parent, recordVar, replacedVar, context);
-                return replacedVar;
-            }
-        }
-        return null;
-    }
-
-    /**
-     * Check whether the required record type and the input type is compatible
-     *
-     * @param reqType
-     * @param inputType
-     * @return true if compatible; false otherwise
-     * @throws AlgebricksException
-     */
-    public static boolean compatible(ARecordType reqType, IAType inputType) throws AlgebricksException {
-        if (inputType.getTypeTag() == ATypeTag.ANY) {
-            return false;
-        }
-        if (inputType.getTypeTag() != ATypeTag.RECORD) {
-            throw new AlgebricksException("The input type " + inputType + " is not a valid record type!");
-        }
-        ARecordType inputRecType = (ARecordType) inputType;
-        if (reqType.isOpen() != inputRecType.isOpen()) {
-            return false;
-        }
-
-        IAType[] reqTypes = reqType.getFieldTypes();
-        String[] reqFieldNames = reqType.getFieldNames();
-        IAType[] inputTypes = inputRecType.getFieldTypes();
-        String[] inputFieldNames = ((ARecordType) inputType).getFieldNames();
-
-        if (reqTypes.length != inputTypes.length) {
-            return false;
-        }
-        for (int i = 0; i < reqTypes.length; i++) {
-            if (!reqFieldNames[i].equals(inputFieldNames[i])) {
-                return false;
-            }
-            IAType reqTypeInside = reqTypes[i];
-            if (NonTaggedFormatUtil.isOptional(reqTypes[i])) {
-                reqTypeInside = ((AUnionType) reqTypes[i]).getNullableType();
-            }
-            IAType inputTypeInside = inputTypes[i];
-            if (NonTaggedFormatUtil.isOptional(inputTypes[i])) {
-                if (!NonTaggedFormatUtil.isOptional(reqTypes[i])) {
-                    /** if the required type is not optional, the two types are incompatible */
-                    return false;
-                }
-                inputTypeInside = ((AUnionType) inputTypes[i]).getNullableType();
-            }
-            if (inputTypeInside.getTypeTag() != ATypeTag.NULL && !reqTypeInside.equals(inputTypeInside)) {
-                return false;
-            }
-        }
-        return true;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceEnforcedListTypeRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceEnforcedListTypeRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceEnforcedListTypeRule.java
deleted file mode 100644
index 543a132..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceEnforcedListTypeRule.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.Collections;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.optimizer.rules.typecast.StaticTypeCastUtil;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractAssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractUnnestOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-/**
- * This class is to enforce types for function expressions which contain list constructor function calls.
- * The List constructor is very special because a nested list is of type List<ANY>.
- * However, the bottom-up type inference (InferTypeRule in algebricks) did not infer that so we need this method to enforce the type.
- * We do not want to break the generality of algebricks so this method is called in an ASTERIX rule: @ IntroduceEnforcedListTypeRule} .
- */
-public class IntroduceEnforcedListTypeRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        if (context.checkIfInDontApplySet(this, opRef.getValue()))
-            return false;
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        context.addToDontApplySet(this, opRef.getValue());
-
-        /**
-         * rewrite list constructor types for list constructor functions
-         */
-        List<Mutable<ILogicalExpression>> expressions;
-        switch (op.getOperatorTag()) {
-            case ASSIGN:
-                AbstractAssignOperator assignOp = (AbstractAssignOperator) op;
-                expressions = assignOp.getExpressions();
-                break;
-            case UNNEST:
-                AbstractUnnestOperator unnestOp = (AbstractUnnestOperator) op;
-                expressions = Collections.singletonList(unnestOp.getExpressionRef());
-                break;
-            default:
-                return false;
-        }
-        IVariableTypeEnvironment env = op.computeOutputTypeEnvironment(context);
-        return rewriteExpressions(expressions, env);
-    }
-
-    private boolean rewriteExpressions(List<Mutable<ILogicalExpression>> expressions, IVariableTypeEnvironment env)
-            throws AlgebricksException {
-        boolean changed = false;
-        for (Mutable<ILogicalExpression> exprRef : expressions) {
-            ILogicalExpression expr = exprRef.getValue();
-            if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                AbstractFunctionCallExpression argFuncExpr = (AbstractFunctionCallExpression) expr;
-                IAType exprType = (IAType) env.getType(argFuncExpr);
-                if (StaticTypeCastUtil.rewriteListExpr(argFuncExpr, exprType, exprType, env)) {
-                    TypeComputerUtilities.resetRequiredAndInputTypes(argFuncExpr);
-                    changed = true;
-                }
-            }
-        }
-        return changed;
-    }
-
-}


[16/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedLoadManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedLoadManager.java b/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedLoadManager.java
deleted file mode 100644
index b45368a..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedLoadManager.java
+++ /dev/null
@@ -1,298 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feeds;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.feeds.FeedActivity;
-import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
-import edu.uci.ics.asterix.common.feeds.FeedJobInfo.FeedJobState;
-import edu.uci.ics.asterix.common.feeds.FeedRuntimeId;
-import edu.uci.ics.asterix.common.feeds.NodeLoadReport;
-import edu.uci.ics.asterix.common.feeds.api.IFeedLoadManager;
-import edu.uci.ics.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import edu.uci.ics.asterix.common.feeds.api.IFeedTrackingManager;
-import edu.uci.ics.asterix.common.feeds.message.FeedCongestionMessage;
-import edu.uci.ics.asterix.common.feeds.message.FeedReportMessage;
-import edu.uci.ics.asterix.common.feeds.message.ScaleInReportMessage;
-import edu.uci.ics.asterix.common.feeds.message.ThrottlingEnabledFeedMessage;
-import edu.uci.ics.asterix.file.FeedOperations;
-import edu.uci.ics.asterix.metadata.feeds.FeedUtil;
-import edu.uci.ics.asterix.metadata.feeds.PrepareStallMessage;
-import edu.uci.ics.asterix.metadata.feeds.TerminateDataFlowMessage;
-import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-
-public class FeedLoadManager implements IFeedLoadManager {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedLoadManager.class.getName());
-
-    private static final long MIN_MODIFICATION_INTERVAL = 180000; // 10 seconds
-    private final TreeSet<NodeLoadReport> nodeReports;
-    private final Map<FeedConnectionId, FeedActivity> feedActivities;
-    private final Map<String, Pair<Integer, Integer>> feedMetrics;
-
-    private FeedConnectionId lastModified;
-    private long lastModifiedTimestamp;
-
-    private static final int UNKNOWN = -1;
-
-    public FeedLoadManager() {
-        this.nodeReports = new TreeSet<NodeLoadReport>();
-        this.feedActivities = new HashMap<FeedConnectionId, FeedActivity>();
-        this.feedMetrics = new HashMap<String, Pair<Integer, Integer>>();
-    }
-
-    @Override
-    public void submitNodeLoadReport(NodeLoadReport report) {
-        nodeReports.remove(report);
-        nodeReports.add(report);
-    }
-
-    @Override
-    public void reportCongestion(FeedCongestionMessage message) throws AsterixException {
-        FeedRuntimeId runtimeId = message.getRuntimeId();
-        FeedJobState jobState = FeedLifecycleListener.INSTANCE.getFeedJobState(message.getConnectionId());
-        if (jobState == null
-                || (jobState.equals(FeedJobState.UNDER_RECOVERY))
-                || (message.getConnectionId().equals(lastModified) && System.currentTimeMillis()
-                        - lastModifiedTimestamp < MIN_MODIFICATION_INTERVAL)) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Ignoring congestion report from " + runtimeId);
-            }
-            return;
-        } else {
-            try {
-                FeedLifecycleListener.INSTANCE.setJobState(message.getConnectionId(), FeedJobState.UNDER_RECOVERY);
-                int inflowRate = message.getInflowRate();
-                int outflowRate = message.getOutflowRate();
-                List<String> currentComputeLocations = new ArrayList<String>();
-                currentComputeLocations.addAll(FeedLifecycleListener.INSTANCE.getComputeLocations(message
-                        .getConnectionId().getFeedId()));
-                int computeCardinality = currentComputeLocations.size();
-                int requiredCardinality = (int) Math
-                        .ceil((double) ((computeCardinality * inflowRate) / (double) outflowRate)) + 5;
-                int additionalComputeNodes = requiredCardinality - computeCardinality;
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("INCREASING COMPUTE CARDINALITY from " + computeCardinality + " by "
-                            + additionalComputeNodes);
-                }
-
-                List<String> helperComputeNodes = getNodeForSubstitution(additionalComputeNodes);
-
-                // Step 1) Alter the original feed job to adjust the cardinality
-                JobSpecification jobSpec = FeedLifecycleListener.INSTANCE.getCollectJobSpecification(message
-                        .getConnectionId());
-                helperComputeNodes.addAll(currentComputeLocations);
-                List<String> newLocations = new ArrayList<String>();
-                newLocations.addAll(currentComputeLocations);
-                newLocations.addAll(helperComputeNodes);
-                FeedUtil.increaseCardinality(jobSpec, FeedRuntimeType.COMPUTE, requiredCardinality, newLocations);
-
-                // Step 2) send prepare to  stall message
-                gracefullyTerminateDataFlow(message.getConnectionId(), Integer.MAX_VALUE);
-
-                // Step 3) run the altered job specification 
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("New Job after adjusting to the workload " + jobSpec);
-                }
-
-                Thread.sleep(10000);
-                runJob(jobSpec, false);
-                lastModified = message.getConnectionId();
-                lastModifiedTimestamp = System.currentTimeMillis();
-
-            } catch (Exception e) {
-                e.printStackTrace();
-                if (LOGGER.isLoggable(Level.SEVERE)) {
-                    LOGGER.severe("Unable to form the required job for scaling in/out" + e.getMessage());
-                }
-                throw new AsterixException(e);
-            }
-        }
-    }
-
-    @Override
-    public void submitScaleInPossibleReport(ScaleInReportMessage message) throws Exception {
-        FeedJobState jobState = FeedLifecycleListener.INSTANCE.getFeedJobState(message.getConnectionId());
-        if (jobState == null || (jobState.equals(FeedJobState.UNDER_RECOVERY))) {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("JobState information for job " + "[" + message.getConnectionId() + "]" + " not found ");
-            }
-            return;
-        } else {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Processing scale-in message " + message);
-            }
-            FeedLifecycleListener.INSTANCE.setJobState(message.getConnectionId(), FeedJobState.UNDER_RECOVERY);
-            JobSpecification jobSpec = FeedLifecycleListener.INSTANCE.getCollectJobSpecification(message
-                    .getConnectionId());
-            int reducedCardinality = message.getReducedCardinaliy();
-            List<String> currentComputeLocations = new ArrayList<String>();
-            currentComputeLocations.addAll(FeedLifecycleListener.INSTANCE.getComputeLocations(message.getConnectionId()
-                    .getFeedId()));
-            FeedUtil.decreaseComputeCardinality(jobSpec, FeedRuntimeType.COMPUTE, reducedCardinality,
-                    currentComputeLocations);
-
-            gracefullyTerminateDataFlow(message.getConnectionId(), reducedCardinality - 1);
-            Thread.sleep(3000);
-            JobId newJobId = runJob(jobSpec, false);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Launch modified job" + "[" + newJobId + "]" + "for scale-in \n" + jobSpec);
-            }
-
-        }
-    }
-
-    private void gracefullyTerminateDataFlow(FeedConnectionId connectionId, int computePartitionRetainLimit)
-            throws Exception {
-        // Step 1) send prepare to  stall message
-        PrepareStallMessage stallMessage = new PrepareStallMessage(connectionId, computePartitionRetainLimit);
-        List<String> intakeLocations = FeedLifecycleListener.INSTANCE.getCollectLocations(connectionId);
-        List<String> computeLocations = FeedLifecycleListener.INSTANCE.getComputeLocations(connectionId.getFeedId());
-        List<String> storageLocations = FeedLifecycleListener.INSTANCE.getStoreLocations(connectionId);
-
-        Set<String> operatorLocations = new HashSet<String>();
-
-        operatorLocations.addAll(intakeLocations);
-        operatorLocations.addAll(computeLocations);
-        operatorLocations.addAll(storageLocations);
-
-        JobSpecification messageJobSpec = FeedOperations.buildPrepareStallMessageJob(stallMessage, operatorLocations);
-        runJob(messageJobSpec, true);
-
-        // Step 2)
-        TerminateDataFlowMessage terminateMesg = new TerminateDataFlowMessage(connectionId);
-        messageJobSpec = FeedOperations.buildTerminateFlowMessageJob(terminateMesg, intakeLocations);
-        runJob(messageJobSpec, true);
-    }
-
-    public static JobId runJob(JobSpecification spec, boolean waitForCompletion) throws Exception {
-        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
-        JobId jobId = hcc.startJob(spec);
-        if (waitForCompletion) {
-            hcc.waitForCompletion(jobId);
-        }
-        return jobId;
-    }
-
-    @Override
-    public void submitFeedRuntimeReport(FeedReportMessage report) {
-        String key = "" + report.getConnectionId() + ":" + report.getRuntimeId().getFeedRuntimeType();
-        Pair<Integer, Integer> value = feedMetrics.get(key);
-        if (value == null) {
-            value = new Pair<Integer, Integer>(report.getValue(), 1);
-            feedMetrics.put(key, value);
-        } else {
-            value.first = value.first + report.getValue();
-            value.second = value.second + 1;
-        }
-    }
-
-    @Override
-    public int getOutflowRate(FeedConnectionId connectionId, FeedRuntimeType runtimeType) {
-        int rVal;
-        String key = "" + connectionId + ":" + runtimeType;
-        feedMetrics.get(key);
-        Pair<Integer, Integer> value = feedMetrics.get(key);
-        if (value == null) {
-            rVal = UNKNOWN;
-        } else {
-            rVal = value.first / value.second;
-        }
-        return rVal;
-    }
-
-    private List<String> getNodeForSubstitution(int nRequired) {
-        List<String> nodeIds = new ArrayList<String>();
-        Iterator<NodeLoadReport> it = null;
-        int nAdded = 0;
-        while (nAdded < nRequired) {
-            it = nodeReports.iterator();
-            while (it.hasNext()) {
-                nodeIds.add(it.next().getNodeId());
-                nAdded++;
-            }
-        }
-        return nodeIds;
-    }
-
-    @Override
-    public synchronized List<String> getNodes(int required) {
-        Iterator<NodeLoadReport> it;
-        List<String> allocated = new ArrayList<String>();
-        while (allocated.size() < required) {
-            it = nodeReports.iterator();
-            while (it.hasNext() && allocated.size() < required) {
-                allocated.add(it.next().getNodeId());
-            }
-        }
-        return allocated;
-    }
-
-    @Override
-    public void reportThrottlingEnabled(ThrottlingEnabledFeedMessage mesg) throws AsterixException, Exception {
-        System.out.println("Throttling Enabled for " + mesg.getConnectionId() + " " + mesg.getFeedRuntimeId());
-        FeedConnectionId connectionId = mesg.getConnectionId();
-        List<String> destinationLocations = new ArrayList<String>();
-        List<String> storageLocations = FeedLifecycleListener.INSTANCE.getStoreLocations(connectionId);
-        List<String> collectLocations = FeedLifecycleListener.INSTANCE.getCollectLocations(connectionId);
-
-        destinationLocations.addAll(storageLocations);
-        destinationLocations.addAll(collectLocations);
-        JobSpecification messageJobSpec = FeedOperations.buildNotifyThrottlingEnabledMessageJob(mesg,
-                destinationLocations);
-        runJob(messageJobSpec, true);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.warning("Acking disabled for " + mesg.getConnectionId() + " in view of activated throttling");
-        }
-        IFeedTrackingManager trackingManager = CentralFeedManager.getInstance().getFeedTrackingManager();
-        trackingManager.disableAcking(connectionId);
-    }
-
-    @Override
-    public void reportFeedActivity(FeedConnectionId connectionId, FeedActivity activity) {
-        feedActivities.put(connectionId, activity);
-    }
-
-    @Override
-    public FeedActivity getFeedActivity(FeedConnectionId connectionId) {
-        return feedActivities.get(connectionId);
-    }
-
-    @Override
-    public Collection<FeedActivity> getFeedActivities() {
-        return feedActivities.values();
-    }
-
-    @Override
-    public void removeFeedActivity(FeedConnectionId connectionId) {
-        feedActivities.remove(connectionId);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedManager.java b/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedManager.java
deleted file mode 100644
index e3020aa..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedManager.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feeds;
-
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.common.config.AsterixFeedProperties;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.feeds.FeedMemoryManager;
-import edu.uci.ics.asterix.common.feeds.FeedMessageService;
-import edu.uci.ics.asterix.common.feeds.FeedMetricCollector;
-import edu.uci.ics.asterix.common.feeds.NodeLoadReportService;
-import edu.uci.ics.asterix.common.feeds.api.IFeedConnectionManager;
-import edu.uci.ics.asterix.common.feeds.api.IFeedManager;
-import edu.uci.ics.asterix.common.feeds.api.IFeedMemoryManager;
-import edu.uci.ics.asterix.common.feeds.api.IFeedMessageService;
-import edu.uci.ics.asterix.common.feeds.api.IFeedMetadataManager;
-import edu.uci.ics.asterix.common.feeds.api.IFeedMetricCollector;
-import edu.uci.ics.asterix.common.feeds.api.IFeedSubscriptionManager;
-import edu.uci.ics.asterix.metadata.feeds.FeedConnectionManager;
-import edu.uci.ics.asterix.metadata.feeds.FeedSubscriptionManager;
-import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-/**
- * An implementation of the IFeedManager interface.
- * Provider necessary central repository for registering/retrieving
- * artifacts/services associated with a feed.
- */
-public class FeedManager implements IFeedManager {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedManager.class.getName());
-
-    private final IFeedSubscriptionManager feedSubscriptionManager;
-
-    private final IFeedConnectionManager feedConnectionManager;
-
-    private final IFeedMemoryManager feedMemoryManager;
-
-    private final IFeedMetricCollector feedMetricCollector;
-
-    private final IFeedMetadataManager feedMetadataManager;
-
-    private final IFeedMessageService feedMessageService;
-
-    private final NodeLoadReportService nodeLoadReportService;
-
-    private final AsterixFeedProperties asterixFeedProperties;
-
-    private final String nodeId;
-
-    private final int frameSize;
-
-    public FeedManager(String nodeId, AsterixFeedProperties feedProperties, int frameSize) throws AsterixException, HyracksDataException {
-        this.nodeId = nodeId;
-        this.feedSubscriptionManager = new FeedSubscriptionManager(nodeId);
-        this.feedConnectionManager = new FeedConnectionManager(nodeId);
-        this.feedMetadataManager = new FeedMetadataManager(nodeId);
-        this.feedMemoryManager = new FeedMemoryManager(nodeId, feedProperties, frameSize);
-        String ccClusterIp = AsterixClusterProperties.INSTANCE.getCluster() != null ? AsterixClusterProperties.INSTANCE
-                .getCluster().getMasterNode().getClusterIp() : "localhost";
-        this.feedMessageService = new FeedMessageService(feedProperties, nodeId, ccClusterIp);
-        this.nodeLoadReportService = new NodeLoadReportService(nodeId, this);
-        try {
-            this.feedMessageService.start();
-            this.nodeLoadReportService.start();
-        } catch (Exception e) {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Unable to start feed services " + e.getMessage());
-            }
-            e.printStackTrace();
-        }
-        this.feedMetricCollector = new FeedMetricCollector(nodeId);
-        this.frameSize = frameSize;
-        this.asterixFeedProperties = feedProperties;
-    }
-
-    @Override
-    public IFeedSubscriptionManager getFeedSubscriptionManager() {
-        return feedSubscriptionManager;
-    }
-
-    @Override
-    public IFeedConnectionManager getFeedConnectionManager() {
-        return feedConnectionManager;
-    }
-
-    @Override
-    public IFeedMemoryManager getFeedMemoryManager() {
-        return feedMemoryManager;
-    }
-
-    @Override
-    public IFeedMetricCollector getFeedMetricCollector() {
-        return feedMetricCollector;
-    }
-
-    public int getFrameSize() {
-        return frameSize;
-    }
-
-    @Override
-    public IFeedMetadataManager getFeedMetadataManager() {
-        return feedMetadataManager;
-    }
-
-    @Override
-    public IFeedMessageService getFeedMessageService() {
-        return feedMessageService;
-    }
-
-    @Override
-    public String getNodeId() {
-        return nodeId;
-    }
-
-    @Override
-    public String toString() {
-        return "FeedManager " + "[" + nodeId + "]";
-    }
-
-    @Override
-    public AsterixFeedProperties getAsterixFeedProperties() {
-        return asterixFeedProperties;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedMessageReceiver.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedMessageReceiver.java b/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedMessageReceiver.java
deleted file mode 100644
index 8530370..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedMessageReceiver.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feeds;
-
-import java.util.logging.Level;
-
-import org.json.JSONObject;
-
-import edu.uci.ics.asterix.common.feeds.FeedConstants;
-import edu.uci.ics.asterix.common.feeds.FeedTupleCommitAckMessage;
-import edu.uci.ics.asterix.common.feeds.MessageReceiver;
-import edu.uci.ics.asterix.common.feeds.NodeLoadReport;
-import edu.uci.ics.asterix.common.feeds.api.IFeedLoadManager;
-import edu.uci.ics.asterix.common.feeds.api.IFeedMessage.MessageType;
-import edu.uci.ics.asterix.common.feeds.api.IFeedTrackingManager;
-import edu.uci.ics.asterix.common.feeds.message.FeedCongestionMessage;
-import edu.uci.ics.asterix.common.feeds.message.FeedReportMessage;
-import edu.uci.ics.asterix.common.feeds.message.ScaleInReportMessage;
-import edu.uci.ics.asterix.common.feeds.message.StorageReportFeedMessage;
-import edu.uci.ics.asterix.common.feeds.message.ThrottlingEnabledFeedMessage;
-import edu.uci.ics.asterix.feeds.CentralFeedManager.AQLExecutor;
-import edu.uci.ics.asterix.hyracks.bootstrap.FeedBootstrap;
-
-public class FeedMessageReceiver extends MessageReceiver<String> {
-
-    private static boolean initialized;
-
-    private final IFeedLoadManager feedLoadManager;
-    private final IFeedTrackingManager feedTrackingManager;
-
-    public FeedMessageReceiver(CentralFeedManager centralFeedManager) {
-        this.feedLoadManager = centralFeedManager.getFeedLoadManager();
-        this.feedTrackingManager = centralFeedManager.getFeedTrackingManager();
-    }
-
-    @Override
-    public void processMessage(String message) throws Exception {
-        JSONObject obj = new JSONObject(message);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Received message " + obj);
-        }
-        MessageType messageType = MessageType.valueOf(obj.getString(FeedConstants.MessageConstants.MESSAGE_TYPE));
-        switch (messageType) {
-            case XAQL:
-                if (!initialized) {
-                    FeedBootstrap.setUpInitialArtifacts();
-                    initialized = true;
-                }
-                AQLExecutor.executeAQL(obj.getString(FeedConstants.MessageConstants.AQL));
-                break;
-            case CONGESTION:
-                feedLoadManager.reportCongestion(FeedCongestionMessage.read(obj));
-                break;
-            case FEED_REPORT:
-                feedLoadManager.submitFeedRuntimeReport(FeedReportMessage.read(obj));
-                break;
-            case NODE_REPORT:
-                feedLoadManager.submitNodeLoadReport(NodeLoadReport.read(obj));
-                break;
-            case SCALE_IN_REQUEST:
-                feedLoadManager.submitScaleInPossibleReport(ScaleInReportMessage.read(obj));
-                break;
-            case STORAGE_REPORT:
-                FeedLifecycleListener.INSTANCE.updateTrackingInformation(StorageReportFeedMessage.read(obj));
-                break;
-            case COMMIT_ACK:
-                feedTrackingManager.submitAckReport(FeedTupleCommitAckMessage.read(obj));
-                break;
-            case THROTTLING_ENABLED:
-                feedLoadManager.reportThrottlingEnabled(ThrottlingEnabledFeedMessage.read(obj));
-            default:
-                break;
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedMetadataManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedMetadataManager.java b/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedMetadataManager.java
deleted file mode 100644
index b927c90..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedMetadataManager.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feeds;
-
-import java.util.Date;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
-import edu.uci.ics.asterix.common.feeds.api.IFeedManager;
-import edu.uci.ics.asterix.common.feeds.api.IFeedMetadataManager;
-import edu.uci.ics.asterix.hyracks.bootstrap.FeedBootstrap;
-import edu.uci.ics.asterix.metadata.feeds.XAQLFeedMessage;
-import edu.uci.ics.asterix.om.base.ARecord;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.base.IAObject;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public class FeedMetadataManager implements IFeedMetadataManager {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedMetadataManager.class.getName());
-
-    private final String nodeId;
-    private ARecordType recordType;
-
-    public FeedMetadataManager(String nodeId) throws AsterixException, HyracksDataException {
-        this.nodeId = nodeId;
-        String[] fieldNames = new String[] { "id", "dataverseName", "feedName", "targetDataset", "tuple", "message",
-                "timestamp" };
-        IAType[] fieldTypes = new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
-                BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING };
-
-        recordType = new ARecordType(FeedBootstrap.FAILED_TUPLE_DATASET_TYPE, fieldNames, fieldTypes, true);
-    }
-
-    @Override
-    public void logTuple(FeedConnectionId connectionId, String tuple, String message, IFeedManager feedManager)
-            throws AsterixException {
-        try {
-            AString id = new AString("1");
-            AString dataverseValue = new AString(connectionId.getFeedId().getDataverse());
-            AString feedValue = new AString(connectionId.getFeedId().getFeedName());
-            AString targetDatasetValue = new AString(connectionId.getDatasetName());
-            AString tupleValue = new AString(tuple);
-            AString messageValue = new AString(message);
-            AString dateTime = new AString(new Date().toString());
-
-            IAObject[] fields = new IAObject[] { id, dataverseValue, feedValue, targetDatasetValue, tupleValue,
-                    messageValue, dateTime };
-            ARecord record = new ARecord(recordType, fields);
-            StringBuilder builder = new StringBuilder();
-            builder.append("use dataverse " + FeedBootstrap.FEEDS_METADATA_DV + ";" + "\n");
-            builder.append("insert into dataset " + FeedBootstrap.FAILED_TUPLE_DATASET + " ");
-            builder.append(" (" + recordToString(record) + ")");
-            builder.append(";");
-
-            XAQLFeedMessage xAqlMessage = new XAQLFeedMessage(connectionId, builder.toString());
-            feedManager.getFeedMessageService().sendMessage(xAqlMessage);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info(" Sent " + xAqlMessage.toJSON());
-            }
-        } catch (Exception pe) {
-            throw new AsterixException(pe);
-        }
-    }
-
-    @Override
-    public String toString() {
-        return "FeedMetadataManager [" + nodeId + "]";
-    }
-
-    private String recordToString(ARecord record) {
-        String[] fieldNames = record.getType().getFieldNames();
-        StringBuilder sb = new StringBuilder();
-        sb.append("{ ");
-        for (int i = 0; i < fieldNames.length; i++) {
-            if (i > 0) {
-                sb.append(", ");
-            }
-            sb.append("\"" + fieldNames[i] + "\"");
-            sb.append(": ");
-            switch (record.getType().getFieldTypes()[i].getTypeTag()) {
-                case STRING:
-                    sb.append("\"" + ((AString) record.getValueByPos(i)).getStringValue() + "\"");
-                    break;
-            }
-        }
-        sb.append(" }");
-        return sb.toString();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedTrackingManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedTrackingManager.java b/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedTrackingManager.java
deleted file mode 100644
index 3e5a1a4..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedTrackingManager.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feeds;
-
-import java.util.Arrays;
-import java.util.BitSet;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
-import edu.uci.ics.asterix.common.feeds.FeedTupleCommitAckMessage;
-import edu.uci.ics.asterix.common.feeds.FeedTupleCommitResponseMessage;
-import edu.uci.ics.asterix.common.feeds.api.IFeedTrackingManager;
-import edu.uci.ics.asterix.file.FeedOperations;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-
-public class FeedTrackingManager implements IFeedTrackingManager {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedTrackingManager.class.getName());
-
-    private final BitSet allOnes;
-
-    private Map<FeedConnectionId, Map<AckId, BitSet>> ackHistory;
-    private Map<FeedConnectionId, Map<AckId, Integer>> maxBaseAcked;
-
-    public FeedTrackingManager() {
-        byte[] allOneBytes = new byte[128];
-        Arrays.fill(allOneBytes, (byte) 0xff);
-        allOnes = BitSet.valueOf(allOneBytes);
-        ackHistory = new HashMap<FeedConnectionId, Map<AckId, BitSet>>();
-        maxBaseAcked = new HashMap<FeedConnectionId, Map<AckId, Integer>>();
-    }
-
-    @Override
-    public synchronized void submitAckReport(FeedTupleCommitAckMessage ackMessage) {
-        AckId ackId = getAckId(ackMessage);
-        Map<AckId, BitSet> acksForConnection = ackHistory.get(ackMessage.getConnectionId());
-        if (acksForConnection == null) {
-            acksForConnection = new HashMap<AckId, BitSet>();
-            acksForConnection.put(ackId, BitSet.valueOf(ackMessage.getCommitAcks()));
-            ackHistory.put(ackMessage.getConnectionId(), acksForConnection);
-        }
-        BitSet currentAcks = acksForConnection.get(ackId);
-        if (currentAcks == null) {
-            currentAcks = BitSet.valueOf(ackMessage.getCommitAcks());
-            acksForConnection.put(ackId, currentAcks);
-        } else {
-            currentAcks.or(BitSet.valueOf(ackMessage.getCommitAcks()));
-        }
-        if (Arrays.equals(currentAcks.toByteArray(), allOnes.toByteArray())) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info(ackMessage.getIntakePartition() + " (" + ackMessage.getBase() + ")" + " is convered");
-            }
-            Map<AckId, Integer> maxBaseAckedForConnection = maxBaseAcked.get(ackMessage.getConnectionId());
-            if (maxBaseAckedForConnection == null) {
-                maxBaseAckedForConnection = new HashMap<AckId, Integer>();
-                maxBaseAcked.put(ackMessage.getConnectionId(), maxBaseAckedForConnection);
-            }
-            Integer maxBaseAckedValue = maxBaseAckedForConnection.get(ackId);
-            if (maxBaseAckedValue == null) {
-                maxBaseAckedValue = ackMessage.getBase();
-                maxBaseAckedForConnection.put(ackId, ackMessage.getBase());
-                sendCommitResponseMessage(ackMessage.getConnectionId(), ackMessage.getIntakePartition(),
-                        ackMessage.getBase());
-            } else if (ackMessage.getBase() == maxBaseAckedValue + 1) {
-                maxBaseAckedForConnection.put(ackId, ackMessage.getBase());
-                sendCommitResponseMessage(ackMessage.getConnectionId(), ackMessage.getIntakePartition(),
-                        ackMessage.getBase());
-            } else {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Ignoring discountiuous acked base " + ackMessage.getBase() + " for " + ackId);
-                }
-            }
-
-        } else {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("AckId " + ackId + " pending number of acks " + (128 * 8 - currentAcks.cardinality()));
-            }
-        }
-    }
-
-    public synchronized void disableTracking(FeedConnectionId connectionId) {
-        ackHistory.remove(connectionId);
-        maxBaseAcked.remove(connectionId);
-    }
-
-    private void sendCommitResponseMessage(FeedConnectionId connectionId, int partition, int base) {
-        FeedTupleCommitResponseMessage response = new FeedTupleCommitResponseMessage(connectionId, partition, base);
-        List<String> storageLocations = FeedLifecycleListener.INSTANCE.getStoreLocations(connectionId);
-        List<String> collectLocations = FeedLifecycleListener.INSTANCE.getCollectLocations(connectionId);
-        String collectLocation = collectLocations.get(partition);
-        Set<String> messageDestinations = new HashSet<String>();
-        messageDestinations.add(collectLocation);
-        messageDestinations.addAll(storageLocations);
-        try {
-            JobSpecification spec = FeedOperations.buildCommitAckResponseJob(response, messageDestinations);
-            CentralFeedManager.runJob(spec, false);
-        } catch (Exception e) {
-            e.printStackTrace();
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Unable to send commit response message " + response + " exception " + e.getMessage());
-            }
-        }
-    }
-
-    private static AckId getAckId(FeedTupleCommitAckMessage ackMessage) {
-        return new AckId(ackMessage.getConnectionId(), ackMessage.getIntakePartition(), ackMessage.getBase());
-    }
-
-    private static class AckId {
-        private FeedConnectionId connectionId;
-        private int intakePartition;
-        private int base;
-
-        public AckId(FeedConnectionId connectionId, int intakePartition, int base) {
-            this.connectionId = connectionId;
-            this.intakePartition = intakePartition;
-            this.base = base;
-        }
-
-        @Override
-        public boolean equals(Object o) {
-            if (this == o) {
-                return true;
-            }
-            if (!(o instanceof AckId)) {
-                return false;
-            }
-            AckId other = (AckId) o;
-            return other.getConnectionId().equals(connectionId) && other.getIntakePartition() == intakePartition
-                    && other.getBase() == base;
-        }
-
-        @Override
-        public String toString() {
-            return connectionId + "[" + intakePartition + "]" + "(" + base + ")";
-        }
-
-        @Override
-        public int hashCode() {
-            return toString().hashCode();
-        }
-
-        public FeedConnectionId getConnectionId() {
-            return connectionId;
-        }
-
-        public int getIntakePartition() {
-            return intakePartition;
-        }
-
-        public int getBase() {
-            return base;
-        }
-
-    }
-
-    @Override
-    public void disableAcking(FeedConnectionId connectionId) {
-        ackHistory.remove(connectionId);
-        maxBaseAcked.remove(connectionId);
-        if (LOGGER.isLoggable(Level.WARNING)) {
-            LOGGER.warning("Acking disabled for " + connectionId);
-        }
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedWorkRequestResponseHandler.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedWorkRequestResponseHandler.java b/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedWorkRequestResponseHandler.java
deleted file mode 100644
index ba54406..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedWorkRequestResponseHandler.java
+++ /dev/null
@@ -1,263 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feeds;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.common.api.IClusterManagementWork;
-import edu.uci.ics.asterix.common.api.IClusterManagementWorkResponse;
-import edu.uci.ics.asterix.common.feeds.FeedConnectJobInfo;
-import edu.uci.ics.asterix.common.feeds.FeedIntakeInfo;
-import edu.uci.ics.asterix.common.feeds.FeedJobInfo;
-import edu.uci.ics.asterix.metadata.cluster.AddNodeWork;
-import edu.uci.ics.asterix.metadata.cluster.AddNodeWorkResponse;
-import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
-import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.constraints.Constraint;
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.constraints.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.api.constraints.expressions.ConstraintExpression;
-import edu.uci.ics.hyracks.api.constraints.expressions.ConstraintExpression.ExpressionTag;
-import edu.uci.ics.hyracks.api.constraints.expressions.LValueConstraintExpression;
-import edu.uci.ics.hyracks.api.constraints.expressions.PartitionCountExpression;
-import edu.uci.ics.hyracks.api.constraints.expressions.PartitionLocationExpression;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.OperatorDescriptorId;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-
-public class FeedWorkRequestResponseHandler implements Runnable {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedWorkRequestResponseHandler.class.getName());
-
-    private final LinkedBlockingQueue<IClusterManagementWorkResponse> inbox;
-
-    private Map<Integer, Map<String, List<FeedJobInfo>>> feedsWaitingForResponse = new HashMap<Integer, Map<String, List<FeedJobInfo>>>();
-
-    public FeedWorkRequestResponseHandler(LinkedBlockingQueue<IClusterManagementWorkResponse> inbox) {
-        this.inbox = inbox;
-    }
-
-    @Override
-    public void run() {
-        while (true) {
-            IClusterManagementWorkResponse response = null;
-            try {
-                response = inbox.take();
-            } catch (InterruptedException e) {
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("Interrupted exception " + e.getMessage());
-                }
-            }
-            IClusterManagementWork submittedWork = response.getWork();
-            Map<String, String> nodeSubstitution = new HashMap<String, String>();
-            switch (submittedWork.getClusterManagementWorkType()) {
-                case ADD_NODE:
-                    AddNodeWork addNodeWork = (AddNodeWork) submittedWork;
-                    int workId = addNodeWork.getWorkId();
-                    Map<String, List<FeedJobInfo>> failureAnalysis = feedsWaitingForResponse.get(workId);
-                    AddNodeWorkResponse resp = (AddNodeWorkResponse) response;
-                    List<String> nodesAdded = resp.getNodesAdded();
-                    List<String> unsubstitutedNodes = new ArrayList<String>();
-                    unsubstitutedNodes.addAll(addNodeWork.getDeadNodes());
-                    int nodeIndex = 0;
-
-                    /** form a mapping between the failed node and its substitute **/
-                    if (nodesAdded != null && nodesAdded.size() > 0) {
-                        for (String failedNodeId : addNodeWork.getDeadNodes()) {
-                            String substitute = nodesAdded.get(nodeIndex);
-                            nodeSubstitution.put(failedNodeId, substitute);
-                            nodeIndex = (nodeIndex + 1) % nodesAdded.size();
-                            unsubstitutedNodes.remove(failedNodeId);
-                            if (LOGGER.isLoggable(Level.INFO)) {
-                                LOGGER.info("Node " + substitute + " chosen to substiute lost node " + failedNodeId);
-                            }
-                        }
-                    }
-                    if (unsubstitutedNodes.size() > 0) {
-                        String[] participantNodes = AsterixClusterProperties.INSTANCE.getParticipantNodes().toArray(
-                                new String[] {});
-                        nodeIndex = 0;
-                        for (String unsubstitutedNode : unsubstitutedNodes) {
-                            nodeSubstitution.put(unsubstitutedNode, participantNodes[nodeIndex]);
-                            if (LOGGER.isLoggable(Level.INFO)) {
-                                LOGGER.info("Node " + participantNodes[nodeIndex] + " chosen to substiute lost node "
-                                        + unsubstitutedNode);
-                            }
-                            nodeIndex = (nodeIndex + 1) % participantNodes.length;
-                        }
-
-                        if (LOGGER.isLoggable(Level.WARNING)) {
-                            LOGGER.warning("Request " + resp.getWork() + " completed using internal nodes");
-                        }
-                    }
-
-                    // alter failed feed intake jobs
-
-                    for (Entry<String, List<FeedJobInfo>> entry : failureAnalysis.entrySet()) {
-                        String failedNode = entry.getKey();
-                        List<FeedJobInfo> impactedJobInfos = entry.getValue();
-                        for (FeedJobInfo info : impactedJobInfos) {
-                            JobSpecification spec = info.getSpec();
-                            replaceNode(spec, failedNode, nodeSubstitution.get(failedNode));
-                            info.setSpec(spec);
-                        }
-                    }
-
-                    Set<FeedIntakeInfo> revisedIntakeJobs = new HashSet<FeedIntakeInfo>();
-                    Set<FeedConnectJobInfo> revisedConnectJobInfos = new HashSet<FeedConnectJobInfo>();
-
-                    for (List<FeedJobInfo> infos : failureAnalysis.values()) {
-                        for (FeedJobInfo info : infos) {
-                            switch (info.getJobType()) {
-                                case INTAKE:
-                                    revisedIntakeJobs.add((FeedIntakeInfo) info);
-                                    break;
-                                case FEED_CONNECT:
-                                    revisedConnectJobInfos.add((FeedConnectJobInfo) info);
-                                    break;
-                            }
-                        }
-                    }
-
-                    IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
-                    try {
-                        for (FeedIntakeInfo info : revisedIntakeJobs) {
-                            hcc.startJob(info.getSpec());
-                        }
-                        Thread.sleep(2000);
-                        for (FeedConnectJobInfo info : revisedConnectJobInfos) {
-                            hcc.startJob(info.getSpec());
-                            Thread.sleep(2000);
-                        }
-                    } catch (Exception e) {
-                        if (LOGGER.isLoggable(Level.WARNING)) {
-                            LOGGER.warning("Unable to start revised job post failure");
-                        }
-                    }
-
-                    break;
-                case REMOVE_NODE:
-                    throw new IllegalStateException("Invalid work submitted");
-            }
-        }
-    }
-
-    private void replaceNode(JobSpecification jobSpec, String failedNodeId, String replacementNode) {
-        Set<Constraint> userConstraints = jobSpec.getUserConstraints();
-        List<Constraint> locationConstraintsToReplace = new ArrayList<Constraint>();
-        List<Constraint> countConstraintsToReplace = new ArrayList<Constraint>();
-        List<OperatorDescriptorId> modifiedOperators = new ArrayList<OperatorDescriptorId>();
-        Map<OperatorDescriptorId, List<Constraint>> candidateConstraints = new HashMap<OperatorDescriptorId, List<Constraint>>();
-        Map<OperatorDescriptorId, Map<Integer, String>> newConstraints = new HashMap<OperatorDescriptorId, Map<Integer, String>>();
-        OperatorDescriptorId opId = null;
-        for (Constraint constraint : userConstraints) {
-            LValueConstraintExpression lexpr = constraint.getLValue();
-            ConstraintExpression cexpr = constraint.getRValue();
-            switch (lexpr.getTag()) {
-                case PARTITION_COUNT:
-                    opId = ((PartitionCountExpression) lexpr).getOperatorDescriptorId();
-                    if (modifiedOperators.contains(opId)) {
-                        countConstraintsToReplace.add(constraint);
-                    } else {
-                        List<Constraint> clist = candidateConstraints.get(opId);
-                        if (clist == null) {
-                            clist = new ArrayList<Constraint>();
-                            candidateConstraints.put(opId, clist);
-                        }
-                        clist.add(constraint);
-                    }
-                    break;
-                case PARTITION_LOCATION:
-                    opId = ((PartitionLocationExpression) lexpr).getOperatorDescriptorId();
-                    String oldLocation = (String) ((ConstantExpression) cexpr).getValue();
-                    if (oldLocation.equals(failedNodeId)) {
-                        locationConstraintsToReplace.add(constraint);
-                        modifiedOperators.add(((PartitionLocationExpression) lexpr).getOperatorDescriptorId());
-                        Map<Integer, String> newLocs = newConstraints.get(opId);
-                        if (newLocs == null) {
-                            newLocs = new HashMap<Integer, String>();
-                            newConstraints.put(opId, newLocs);
-                        }
-                        int partition = ((PartitionLocationExpression) lexpr).getPartition();
-                        newLocs.put(partition, replacementNode);
-                    } else {
-                        if (modifiedOperators.contains(opId)) {
-                            locationConstraintsToReplace.add(constraint);
-                            Map<Integer, String> newLocs = newConstraints.get(opId);
-                            if (newLocs == null) {
-                                newLocs = new HashMap<Integer, String>();
-                                newConstraints.put(opId, newLocs);
-                            }
-                            int partition = ((PartitionLocationExpression) lexpr).getPartition();
-                            newLocs.put(partition, oldLocation);
-                        } else {
-                            List<Constraint> clist = candidateConstraints.get(opId);
-                            if (clist == null) {
-                                clist = new ArrayList<Constraint>();
-                                candidateConstraints.put(opId, clist);
-                            }
-                            clist.add(constraint);
-                        }
-                    }
-                    break;
-            }
-        }
-
-        jobSpec.getUserConstraints().removeAll(locationConstraintsToReplace);
-        jobSpec.getUserConstraints().removeAll(countConstraintsToReplace);
-
-        for (OperatorDescriptorId mopId : modifiedOperators) {
-            List<Constraint> clist = candidateConstraints.get(mopId);
-            if (clist != null && !clist.isEmpty()) {
-                jobSpec.getUserConstraints().removeAll(clist);
-
-                for (Constraint c : clist) {
-                    if (c.getLValue().getTag().equals(ExpressionTag.PARTITION_LOCATION)) {
-                        ConstraintExpression cexpr = c.getRValue();
-                        int partition = ((PartitionLocationExpression) c.getLValue()).getPartition();
-                        String oldLocation = (String) ((ConstantExpression) cexpr).getValue();
-                        newConstraints.get(mopId).put(partition, oldLocation);
-                    }
-                }
-            }
-        }
-
-        for (Entry<OperatorDescriptorId, Map<Integer, String>> entry : newConstraints.entrySet()) {
-            OperatorDescriptorId nopId = entry.getKey();
-            Map<Integer, String> clist = entry.getValue();
-            IOperatorDescriptor op = jobSpec.getOperatorMap().get(nopId);
-            String[] locations = new String[clist.size()];
-            for (int i = 0; i < locations.length; i++) {
-                locations[i] = clist.get(i);
-            }
-            PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, op, locations);
-        }
-
-    }
-
-    public void registerFeedWork(int workId, Map<String, List<FeedJobInfo>> impactedJobs) {
-        feedsWaitingForResponse.put(workId, impactedJobs);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedsActivator.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedsActivator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedsActivator.java
deleted file mode 100644
index 0e7ec5b..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/feeds/FeedsActivator.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feeds;
-
-import java.io.PrintWriter;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.api.common.SessionConfig;
-import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.ConnectFeedStatement;
-import edu.uci.ics.asterix.aql.expression.DataverseDecl;
-import edu.uci.ics.asterix.aql.expression.Identifier;
-import edu.uci.ics.asterix.aql.translator.AqlTranslator;
-import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
-import edu.uci.ics.hyracks.api.job.JobId;
-
-public class FeedsActivator implements Runnable {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedJobNotificationHandler.class.getName());
-
-    private List<FeedCollectInfo> feedsToRevive;
-    private Mode mode;
-
-    public enum Mode {
-        REVIVAL_POST_CLUSTER_REBOOT,
-        REVIVAL_POST_NODE_REJOIN
-    }
-
-    public FeedsActivator() {
-        this.mode = Mode.REVIVAL_POST_CLUSTER_REBOOT;
-    }
-
-    public FeedsActivator(List<FeedCollectInfo> feedsToRevive) {
-        this.feedsToRevive = feedsToRevive;
-        this.mode = Mode.REVIVAL_POST_NODE_REJOIN;
-    }
-
-    @Override
-    public void run() {
-        switch (mode) {
-            case REVIVAL_POST_CLUSTER_REBOOT:
-                //revivePostClusterReboot();
-                break;
-            case REVIVAL_POST_NODE_REJOIN:
-                try {
-                    Thread.sleep(10000);
-                } catch (InterruptedException e1) {
-                    if (LOGGER.isLoggable(Level.INFO)) {
-                        LOGGER.info("Attempt to resume feed interrupted");
-                    }
-                    throw new IllegalStateException(e1.getMessage());
-                }
-                for (FeedCollectInfo finfo : feedsToRevive) {
-                    try {
-                        JobId jobId = AsterixAppContextInfo.getInstance().getHcc().startJob(finfo.jobSpec);
-                        if (LOGGER.isLoggable(Level.INFO)) {
-                            LOGGER.info("Resumed feed :" + finfo.feedConnectionId + " job id " + jobId);
-                            LOGGER.info("Job:" + finfo.jobSpec);
-                        }
-                    } catch (Exception e) {
-                        if (LOGGER.isLoggable(Level.WARNING)) {
-                            LOGGER.warning("Unable to resume feed " + finfo.feedConnectionId + " " + e.getMessage());
-                        }
-                    }
-                }
-        }
-    }
-
-    public void reviveFeed(String dataverse, String feedName, String dataset, String feedPolicy) {
-        PrintWriter writer = new PrintWriter(System.out, true);
-        SessionConfig pc = new SessionConfig(writer, OutputFormat.ADM);
-        try {
-            DataverseDecl dataverseDecl = new DataverseDecl(new Identifier(dataverse));
-            ConnectFeedStatement stmt = new ConnectFeedStatement(new Identifier(dataverse), new Identifier(feedName),
-                    new Identifier(dataset), feedPolicy, 0);
-            stmt.setForceConnect(true);
-            List<Statement> statements = new ArrayList<Statement>();
-            statements.add(dataverseDecl);
-            statements.add(stmt);
-            AqlTranslator translator = new AqlTranslator(statements, pc);
-            translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null, AqlTranslator.ResultDelivery.SYNC);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Resumed feed: " + dataverse + ":" + dataset + " using policy " + feedPolicy);
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Exception in resuming loser feed: " + dataverse + ":" + dataset + " using policy "
-                        + feedPolicy + " Exception " + e.getMessage());
-            }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
deleted file mode 100644
index d476eed..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
+++ /dev/null
@@ -1,252 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.file;
-
-import java.io.File;
-import java.rmi.RemoteException;
-import java.util.Map;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.common.api.ILocalResourceMetadata;
-import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.common.context.AsterixVirtualBufferCacheProvider;
-import edu.uci.ics.asterix.common.exceptions.ACIDException;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
-import edu.uci.ics.asterix.formats.base.IDataFormat;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.Dataverse;
-import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
-import edu.uci.ics.asterix.transaction.management.opcallbacks.PrimaryIndexOperationTrackerProvider;
-import edu.uci.ics.asterix.transaction.management.resource.LSMBTreeLocalResourceMetadata;
-import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceFactoryProvider;
-import edu.uci.ics.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
-import edu.uci.ics.asterix.translator.CompiledStatements.CompiledDatasetDropStatement;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexCompactOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceFactoryProvider;
-import edu.uci.ics.hyracks.storage.common.file.LocalResource;
-
-public class DatasetOperations {
-
-    private static Logger LOGGER = Logger.getLogger(DatasetOperations.class.getName());
-
-    public static JobSpecification createDropDatasetJobSpec(CompiledDatasetDropStatement datasetDropStmt,
-            AqlMetadataProvider metadataProvider) throws AlgebricksException, HyracksDataException, RemoteException,
-            ACIDException, AsterixException {
-
-        String dataverseName = null;
-        if (datasetDropStmt.getDataverseName() != null) {
-            dataverseName = datasetDropStmt.getDataverseName();
-        } else if (metadataProvider.getDefaultDataverse() != null) {
-            dataverseName = metadataProvider.getDefaultDataverse().getDataverseName();
-        }
-
-        String datasetName = datasetDropStmt.getDatasetName();
-        String datasetPath = dataverseName + File.separator + datasetName;
-
-        LOGGER.info("DROP DATASETPATH: " + datasetPath);
-
-        Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
-        if (dataset == null) {
-            throw new AlgebricksException("DROP DATASET: No metadata for dataset " + datasetName);
-        }
-        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
-            return JobSpecificationUtils.createJobSpecification();
-        }
-        boolean temp = dataset.getDatasetDetails().isTemp();
-
-        Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(),
-                dataverseName);
-        IDataFormat format;
-        try {
-            format = (IDataFormat) Class.forName(dataverse.getDataFormat()).newInstance();
-        } catch (Exception e) {
-            throw new AsterixException(e);
-        }
-
-        ARecordType itemType = (ARecordType) metadataProvider.findType(dataverseName, dataset.getItemTypeName());
-
-        ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, itemType);
-        IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
-                itemType, format.getBinaryComparatorFactoryProvider());
-        int[] filterFields = DatasetUtils.createFilterFields(dataset);
-        int[] btreeFields = DatasetUtils.createBTreeFieldsWhenThereisAFilter(dataset);
-        JobSpecification specPrimary = JobSpecificationUtils.createJobSpecification();
-
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider
-                .splitProviderAndPartitionConstraintsForDataset(dataset.getDataverseName(), datasetName, datasetName,
-                        temp);
-        AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
-        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
-                metadataProvider.getMetadataTxnContext());
-
-        // The index drop operation should be persistent regardless of temp datasets or permanent dataset
-        IndexDropOperatorDescriptor primaryBtreeDrop = new IndexDropOperatorDescriptor(specPrimary,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                splitsAndConstraint.first, new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
-                        dataset.getDatasetId()), compactionInfo.first, compactionInfo.second,
-                        new PrimaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
-                        storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits,
-                        filterCmpFactories, btreeFields, filterFields, true));
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(specPrimary, primaryBtreeDrop,
-                splitsAndConstraint.second);
-
-        specPrimary.addRoot(primaryBtreeDrop);
-
-        return specPrimary;
-    }
-
-    public static JobSpecification createDatasetJobSpec(Dataverse dataverse, String datasetName,
-            AqlMetadataProvider metadata) throws AsterixException, AlgebricksException {
-        String dataverseName = dataverse.getDataverseName();
-        IDataFormat format;
-        try {
-            format = (IDataFormat) Class.forName(dataverse.getDataFormat()).newInstance();
-        } catch (Exception e) {
-            throw new AsterixException(e);
-        }
-        Dataset dataset = metadata.findDataset(dataverseName, datasetName);
-        if (dataset == null) {
-            throw new AsterixException("Could not find dataset " + datasetName + " in dataverse " + dataverseName);
-        }
-        boolean temp = dataset.getDatasetDetails().isTemp();
-        ARecordType itemType = (ARecordType) metadata.findType(dataverseName, dataset.getItemTypeName());
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        IBinaryComparatorFactory[] comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(dataset,
-                itemType, format.getBinaryComparatorFactoryProvider());
-        ITypeTraits[] typeTraits = DatasetUtils.computeTupleTypeTraits(dataset, itemType);
-        int[] bloomFilterKeyFields = DatasetUtils.createBloomFilterKeyFields(dataset);
-
-        ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, itemType);
-        IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
-                itemType, format.getBinaryComparatorFactoryProvider());
-        int[] filterFields = DatasetUtils.createFilterFields(dataset);
-        int[] btreeFields = DatasetUtils.createBTreeFieldsWhenThereisAFilter(dataset);
-
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadata
-                .splitProviderAndPartitionConstraintsForDataset(dataverseName, datasetName, datasetName, temp);
-        FileSplit[] fs = splitsAndConstraint.first.getFileSplits();
-        StringBuilder sb = new StringBuilder();
-        for (int i = 0; i < fs.length; i++) {
-            sb.append(stringOf(fs[i]) + " ");
-        }
-        LOGGER.info("CREATING File Splits: " + sb.toString());
-
-        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
-                metadata.getMetadataTxnContext());
-        AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
-        //prepare a LocalResourceMetadata which will be stored in NC's local resource repository
-        ILocalResourceMetadata localResourceMetadata = new LSMBTreeLocalResourceMetadata(typeTraits,
-                comparatorFactories, bloomFilterKeyFields, true, dataset.getDatasetId(), compactionInfo.first,
-                compactionInfo.second, filterTypeTraits, filterCmpFactories, btreeFields, filterFields);
-        ILocalResourceFactoryProvider localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(
-                localResourceMetadata, LocalResource.LSMBTreeResource);
-
-        // The index create operation should be persistent regardless of temp datasets or permanent dataset
-        TreeIndexCreateOperatorDescriptor indexCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                splitsAndConstraint.first, typeTraits, comparatorFactories, bloomFilterKeyFields,
-                new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
-                        compactionInfo.first, compactionInfo.second, new PrimaryIndexOperationTrackerProvider(dataset
-                                .getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                        LSMBTreeIOOperationCallbackFactory.INSTANCE, storageProperties
-                                .getBloomFilterFalsePositiveRate(), true, filterTypeTraits, filterCmpFactories,
-                        btreeFields, filterFields, true), localResourceFactoryProvider,
-                NoOpOperationCallbackFactory.INSTANCE);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, indexCreateOp,
-                splitsAndConstraint.second);
-        spec.addRoot(indexCreateOp);
-        return spec;
-    }
-
-    private static String stringOf(FileSplit fs) {
-        return fs.getNodeName() + ":" + fs.getLocalFile().toString();
-    }
-
-    public static JobSpecification compactDatasetJobSpec(Dataverse dataverse, String datasetName,
-            AqlMetadataProvider metadata) throws AsterixException, AlgebricksException {
-        String dataverseName = dataverse.getDataverseName();
-        IDataFormat format;
-        try {
-            format = (IDataFormat) Class.forName(dataverse.getDataFormat()).newInstance();
-        } catch (Exception e) {
-            throw new AsterixException(e);
-        }
-        Dataset dataset = metadata.findDataset(dataverseName, datasetName);
-        if (dataset == null) {
-            throw new AsterixException("Could not find dataset " + datasetName + " in dataverse " + dataverseName);
-        }
-        boolean temp = dataset.getDatasetDetails().isTemp();
-
-        ARecordType itemType = (ARecordType) metadata.findType(dataverseName, dataset.getItemTypeName());
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        IBinaryComparatorFactory[] comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(dataset,
-                itemType, format.getBinaryComparatorFactoryProvider());
-        ITypeTraits[] typeTraits = DatasetUtils.computeTupleTypeTraits(dataset, itemType);
-        int[] blooFilterKeyFields = DatasetUtils.createBloomFilterKeyFields(dataset);
-
-        ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, itemType);
-        IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
-                itemType, format.getBinaryComparatorFactoryProvider());
-        int[] filterFields = DatasetUtils.createFilterFields(dataset);
-        int[] btreeFields = DatasetUtils.createBTreeFieldsWhenThereisAFilter(dataset);
-
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadata
-                .splitProviderAndPartitionConstraintsForDataset(dataverseName, datasetName, datasetName, temp);
-
-        AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
-
-        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
-                metadata.getMetadataTxnContext());
-        LSMTreeIndexCompactOperatorDescriptor compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                splitsAndConstraint.first, typeTraits, comparatorFactories, blooFilterKeyFields,
-                new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
-                        compactionInfo.first, compactionInfo.second, new PrimaryIndexOperationTrackerProvider(
-                                dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                        LSMBTreeIOOperationCallbackFactory.INSTANCE,
-                        storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits,
-                        filterCmpFactories, btreeFields, filterFields, !temp), NoOpOperationCallbackFactory.INSTANCE);
-        AlgebricksPartitionConstraintHelper
-                .setPartitionConstraintInJobSpec(spec, compactOp, splitsAndConstraint.second);
-
-        AlgebricksPartitionConstraintHelper
-                .setPartitionConstraintInJobSpec(spec, compactOp, splitsAndConstraint.second);
-        spec.addRoot(compactOp);
-        return spec;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/file/DataverseOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DataverseOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DataverseOperations.java
deleted file mode 100644
index 997893f..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DataverseOperations.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.file;
-
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.entities.Dataverse;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.file.FileRemoveOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-
-public class DataverseOperations {
-    public static JobSpecification createDropDataverseJobSpec(Dataverse dataverse, AqlMetadataProvider metadata) {
-        JobSpecification jobSpec = JobSpecificationUtils.createJobSpecification();
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadata
-                .splitProviderAndPartitionConstraintsForDataverse(dataverse.getDataverseName());
-        FileRemoveOperatorDescriptor frod = new FileRemoveOperatorDescriptor(jobSpec, splitsAndConstraint.first);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(jobSpec, frod, splitsAndConstraint.second);
-        jobSpec.addRoot(frod);
-        return jobSpec;
-    }
-}


[51/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
Change folder structure for Java repackage

Change only the folders, not the files, for our package name change.
This will break the build, and needs to be followed by a change to
the package name in all of the source files. However performing
the folder move and file change in two steps lets Git understand
that the files are the same, and lets us track revisions across
those files.

Change-Id: Iefd2a576415ebc1416cba2a3334d2b64f042ba92
Reviewed-on: https://asterix-gerrit.ics.uci.edu/306
Tested-by: Ian Maxon <im...@apache.org>
Reviewed-by: Till Westmann <ti...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/commit/34d81630
Tree: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/tree/34d81630
Diff: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/diff/34d81630

Branch: refs/heads/master
Commit: 34d816305a0ddaab5d54f23b60ff985eeb12193c
Parents: 95350e2
Author: Ian Maxon <im...@apache.org>
Authored: Fri Aug 21 11:49:23 2015 -0700
Committer: Ian Maxon <im...@apache.org>
Committed: Sat Aug 22 12:56:53 2015 -0700

----------------------------------------------------------------------
 .../base/AsterixOperatorAnnotations.java        |   22 -
 .../base/LogicalExpressionDeepCopyVisitor.java  |  149 -
 .../base/LogicalOperatorDeepCopyVisitor.java    |  469 ---
 .../algebra/operators/CommitOperator.java       |   66 -
 .../physical/BTreeSearchPOperator.java          |  161 -
 .../operators/physical/CommitPOperator.java     |   98 -
 .../operators/physical/CommitRuntime.java       |  139 -
 .../physical/CommitRuntimeFactory.java          |   53 -
 .../physical/ExternalDataLookupPOperator.java   |  180 -
 .../physical/IndexSearchPOperator.java          |   87 -
 .../physical/InvertedIndexPOperator.java        |  288 --
 .../physical/RTreeSearchPOperator.java          |   98 -
 .../jobgen/AqlLogicalExpressionJobGen.java      |  217 --
 .../asterix/optimizer/base/AnalysisUtil.java    |  138 -
 .../ics/asterix/optimizer/base/FuzzyUtils.java  |  119 -
 .../asterix/optimizer/base/RuleCollections.java |  331 --
 .../handle/FieldIndexAndTypeHandle.java         |   41 -
 .../optimizer/handle/FieldNameHandle.java       |   44 -
 .../ics/asterix/optimizer/handle/IHandle.java   |   31 -
 ...quivalenceClassForRecordConstructorRule.java |  127 -
 ...ixExtractFunctionsFromJoinConditionRule.java |   33 -
 .../rules/AsterixInlineVariablesRule.java       |   38 -
 .../AsterixIntroduceGroupByCombinerRule.java    |   74 -
 ...oveFreeVariableOperatorOutOfSubplanRule.java |   26 -
 .../rules/ByNameToByHandleFieldAccessRule.java  |  116 -
 .../rules/ByNameToByIndexFieldAccessRule.java   |  175 -
 .../CancelUnnestWithNestedListifyRule.java      |  282 --
 .../rules/CheckFilterExpressionTypeRule.java    |   83 -
 .../optimizer/rules/ConstantFoldingRule.java    |  264 --
 .../optimizer/rules/CountVarToCountOneRule.java |   86 -
 .../rules/DisjunctivePredicateToJoinRule.java   |  191 --
 .../rules/ExtractDistinctByExpressionsRule.java |   80 -
 .../rules/ExtractOrderExpressionsRule.java      |   80 -
 .../rules/FeedScanCollectionToUnnest.java       |  135 -
 .../asterix/optimizer/rules/FuzzyEqRule.java    |  124 -
 .../asterix/optimizer/rules/FuzzyJoinRule.java  |  404 ---
 .../rules/IfElseToSwitchCaseFunctionRule.java   |  126 -
 .../rules/InlineUnnestFunctionRule.java         |  174 -
 .../rules/IntroduceAutogenerateIDRule.java      |  168 -
 ...eDynamicTypeCastForExternalFunctionRule.java |  141 -
 .../rules/IntroduceDynamicTypeCastRule.java     |  288 --
 .../rules/IntroduceEnforcedListTypeRule.java    |   96 -
 .../IntroduceInstantLockSearchCallbackRule.java |  149 -
 ...aterializationForInsertWithSelfScanRule.java |  118 -
 ...ceRandomPartitioningFeedComputationRule.java |   98 -
 ...troduceRapidFrameFlushProjectAssignRule.java |  102 -
 ...IntroduceSecondaryIndexInsertDeleteRule.java |  598 ----
 .../IntroduceStaticTypeCastForInsertRule.java   |  157 -
 ...ntroduceTransactionCommitByAssignOpRule.java |   76 -
 .../optimizer/rules/IntroduceUnionRule.java     |  123 -
 ...roduceUnnestForCollectionToSequenceRule.java |   94 -
 .../optimizer/rules/LoadRecordFieldsRule.java   |  368 ---
 .../optimizer/rules/NestGroupByRule.java        |  191 --
 .../PullPositionalVariableFromUnnestRule.java   |   74 -
 .../PushAggFuncIntoStandaloneAggregateRule.java |  265 --
 .../rules/PushAggregateIntoGroupbyRule.java     |  462 ---
 .../optimizer/rules/PushFieldAccessRule.java    |  391 ---
 .../rules/PushGroupByThroughProduct.java        |  172 -
 .../rules/PushProperJoinThroughProduct.java     |  107 -
 .../rules/PushSimilarityFunctionsBelowJoin.java |   63 -
 .../rules/RemoveRedundantListifyRule.java       |  254 --
 .../rules/RemoveRedundantSelectRule.java        |   78 -
 .../rules/RemoveSortInFeedIngestionRule.java    |   72 -
 .../rules/RemoveUnusedOneToOneEquiJoinRule.java |  213 --
 .../rules/ReplaceSinkOpWithCommitOpRule.java    |  116 -
 .../rules/SetAsterixPhysicalOperatorsRule.java  |  300 --
 .../rules/SetClosedRecordConstructorsRule.java  |  252 --
 .../optimizer/rules/SimilarityCheckRule.java    |  336 --
 .../SweepIllegalNonfunctionalFunctions.java     |  296 --
 .../optimizer/rules/UnnestToDataScanRule.java   |  248 --
 .../am/AbstractIntroduceAccessMethodRule.java   |  747 -----
 .../rules/am/AccessMethodAnalysisContext.java   |   82 -
 .../rules/am/AccessMethodJobGenParams.java      |  140 -
 .../optimizer/rules/am/AccessMethodUtils.java   |  609 ----
 .../optimizer/rules/am/BTreeAccessMethod.java   |  660 ----
 .../optimizer/rules/am/BTreeJobGenParams.java   |  134 -
 .../optimizer/rules/am/IAccessMethod.java       |   96 -
 .../rules/am/IOptimizableFuncExpr.java          |   70 -
 .../rules/am/IntroduceJoinAccessMethodRule.java |  246 --
 .../am/IntroduceLSMComponentFilterRule.java     |  451 ---
 .../am/IntroduceSelectAccessMethodRule.java     |  154 -
 .../rules/am/InvertedIndexAccessMethod.java     | 1166 -------
 .../rules/am/InvertedIndexJobGenParams.java     |  125 -
 .../optimizer/rules/am/OptimizableFuncExpr.java |  215 --
 .../rules/am/OptimizableOperatorSubTree.java    |  242 --
 .../optimizer/rules/am/RTreeAccessMethod.java   |  255 --
 .../optimizer/rules/am/RTreeJobGenParams.java   |   61 -
 .../TranslateIntervalExpressionRule.java        |  150 -
 .../rules/typecast/StaticTypeCastUtil.java      |  548 ----
 .../rules/util/EquivalenceClassUtils.java       |   99 -
 .../translator/AbstractAqlTranslator.java       |  185 --
 .../AqlExpressionToPlanTranslator.java          | 1608 ---------
 .../AqlPlusExpressionToPlanTranslator.java      | 1498 ---------
 .../asterix/translator/AqlPositionWriter.java   |   33 -
 .../asterix/translator/CompiledStatements.java  |  625 ----
 .../ics/asterix/translator/ConstantHelper.java  |   71 -
 .../asterix/translator/TranslationContext.java  |   73 -
 .../translator/TranslationException.java        |   32 -
 .../ics/asterix/translator/TypeTranslator.java  |  392 ---
 .../base/AsterixOperatorAnnotations.java        |   22 +
 .../base/LogicalExpressionDeepCopyVisitor.java  |  149 +
 .../base/LogicalOperatorDeepCopyVisitor.java    |  469 +++
 .../algebra/operators/CommitOperator.java       |   66 +
 .../physical/BTreeSearchPOperator.java          |  161 +
 .../operators/physical/CommitPOperator.java     |   98 +
 .../operators/physical/CommitRuntime.java       |  139 +
 .../physical/CommitRuntimeFactory.java          |   53 +
 .../physical/ExternalDataLookupPOperator.java   |  180 +
 .../physical/IndexSearchPOperator.java          |   87 +
 .../physical/InvertedIndexPOperator.java        |  288 ++
 .../physical/RTreeSearchPOperator.java          |   98 +
 .../jobgen/AqlLogicalExpressionJobGen.java      |  217 ++
 .../asterix/optimizer/base/AnalysisUtil.java    |  138 +
 .../asterix/optimizer/base/FuzzyUtils.java      |  119 +
 .../asterix/optimizer/base/RuleCollections.java |  331 ++
 .../handle/FieldIndexAndTypeHandle.java         |   41 +
 .../optimizer/handle/FieldNameHandle.java       |   44 +
 .../asterix/optimizer/handle/IHandle.java       |   31 +
 ...quivalenceClassForRecordConstructorRule.java |  127 +
 ...ixExtractFunctionsFromJoinConditionRule.java |   33 +
 .../rules/AsterixInlineVariablesRule.java       |   38 +
 .../AsterixIntroduceGroupByCombinerRule.java    |   74 +
 ...oveFreeVariableOperatorOutOfSubplanRule.java |   26 +
 .../rules/ByNameToByHandleFieldAccessRule.java  |  116 +
 .../rules/ByNameToByIndexFieldAccessRule.java   |  175 +
 .../CancelUnnestWithNestedListifyRule.java      |  282 ++
 .../rules/CheckFilterExpressionTypeRule.java    |   83 +
 .../optimizer/rules/ConstantFoldingRule.java    |  264 ++
 .../optimizer/rules/CountVarToCountOneRule.java |   86 +
 .../rules/DisjunctivePredicateToJoinRule.java   |  191 ++
 .../rules/ExtractDistinctByExpressionsRule.java |   80 +
 .../rules/ExtractOrderExpressionsRule.java      |   80 +
 .../rules/FeedScanCollectionToUnnest.java       |  135 +
 .../asterix/optimizer/rules/FuzzyEqRule.java    |  124 +
 .../asterix/optimizer/rules/FuzzyJoinRule.java  |  404 +++
 .../rules/IfElseToSwitchCaseFunctionRule.java   |  126 +
 .../rules/InlineUnnestFunctionRule.java         |  174 +
 .../rules/IntroduceAutogenerateIDRule.java      |  168 +
 ...eDynamicTypeCastForExternalFunctionRule.java |  141 +
 .../rules/IntroduceDynamicTypeCastRule.java     |  288 ++
 .../rules/IntroduceEnforcedListTypeRule.java    |   96 +
 .../IntroduceInstantLockSearchCallbackRule.java |  149 +
 ...aterializationForInsertWithSelfScanRule.java |  118 +
 ...ceRandomPartitioningFeedComputationRule.java |   98 +
 ...troduceRapidFrameFlushProjectAssignRule.java |  102 +
 ...IntroduceSecondaryIndexInsertDeleteRule.java |  598 ++++
 .../IntroduceStaticTypeCastForInsertRule.java   |  157 +
 ...ntroduceTransactionCommitByAssignOpRule.java |   76 +
 .../optimizer/rules/IntroduceUnionRule.java     |  123 +
 ...roduceUnnestForCollectionToSequenceRule.java |   94 +
 .../optimizer/rules/LoadRecordFieldsRule.java   |  368 +++
 .../optimizer/rules/NestGroupByRule.java        |  191 ++
 .../PullPositionalVariableFromUnnestRule.java   |   74 +
 .../PushAggFuncIntoStandaloneAggregateRule.java |  265 ++
 .../rules/PushAggregateIntoGroupbyRule.java     |  462 +++
 .../optimizer/rules/PushFieldAccessRule.java    |  391 +++
 .../rules/PushGroupByThroughProduct.java        |  172 +
 .../rules/PushProperJoinThroughProduct.java     |  107 +
 .../rules/PushSimilarityFunctionsBelowJoin.java |   63 +
 .../rules/RemoveRedundantListifyRule.java       |  254 ++
 .../rules/RemoveRedundantSelectRule.java        |   78 +
 .../rules/RemoveSortInFeedIngestionRule.java    |   72 +
 .../rules/RemoveUnusedOneToOneEquiJoinRule.java |  213 ++
 .../rules/ReplaceSinkOpWithCommitOpRule.java    |  116 +
 .../rules/SetAsterixPhysicalOperatorsRule.java  |  300 ++
 .../rules/SetClosedRecordConstructorsRule.java  |  252 ++
 .../optimizer/rules/SimilarityCheckRule.java    |  336 ++
 .../SweepIllegalNonfunctionalFunctions.java     |  296 ++
 .../optimizer/rules/UnnestToDataScanRule.java   |  248 ++
 .../am/AbstractIntroduceAccessMethodRule.java   |  747 +++++
 .../rules/am/AccessMethodAnalysisContext.java   |   82 +
 .../rules/am/AccessMethodJobGenParams.java      |  140 +
 .../optimizer/rules/am/AccessMethodUtils.java   |  609 ++++
 .../optimizer/rules/am/BTreeAccessMethod.java   |  660 ++++
 .../optimizer/rules/am/BTreeJobGenParams.java   |  134 +
 .../optimizer/rules/am/IAccessMethod.java       |   96 +
 .../rules/am/IOptimizableFuncExpr.java          |   70 +
 .../rules/am/IntroduceJoinAccessMethodRule.java |  246 ++
 .../am/IntroduceLSMComponentFilterRule.java     |  451 +++
 .../am/IntroduceSelectAccessMethodRule.java     |  154 +
 .../rules/am/InvertedIndexAccessMethod.java     | 1166 +++++++
 .../rules/am/InvertedIndexJobGenParams.java     |  125 +
 .../optimizer/rules/am/OptimizableFuncExpr.java |  215 ++
 .../rules/am/OptimizableOperatorSubTree.java    |  242 ++
 .../optimizer/rules/am/RTreeAccessMethod.java   |  255 ++
 .../optimizer/rules/am/RTreeJobGenParams.java   |   61 +
 .../TranslateIntervalExpressionRule.java        |  150 +
 .../rules/typecast/StaticTypeCastUtil.java      |  548 ++++
 .../rules/util/EquivalenceClassUtils.java       |   99 +
 .../translator/AbstractAqlTranslator.java       |  185 ++
 .../AqlExpressionToPlanTranslator.java          | 1608 +++++++++
 .../AqlPlusExpressionToPlanTranslator.java      | 1498 +++++++++
 .../asterix/translator/AqlPositionWriter.java   |   33 +
 .../asterix/translator/CompiledStatements.java  |  625 ++++
 .../asterix/translator/ConstantHelper.java      |   71 +
 .../asterix/translator/TranslationContext.java  |   73 +
 .../translator/TranslationException.java        |   32 +
 .../asterix/translator/TypeTranslator.java      |  392 +++
 .../ics/asterix/api/common/APIFramework.java    |  411 ---
 .../api/common/AsterixAppRuntimeContext.java    |  271 --
 ...rixAppRuntimeContextProdiverForRecovery.java |  105 -
 .../asterix/api/common/AsterixClientConfig.java |   45 -
 .../common/AsterixHyracksIntegrationUtil.java   |  155 -
 .../asterix/api/common/FeedWorkCollection.java  |  197 --
 .../edu/uci/ics/asterix/api/common/Job.java     |   47 -
 .../ics/asterix/api/common/SessionConfig.java   |  201 --
 .../asterix/api/http/servlet/APIServlet.java    |  173 -
 .../asterix/api/http/servlet/AQLAPIServlet.java |   53 -
 .../api/http/servlet/ConnectorAPIServlet.java   |  168 -
 .../asterix/api/http/servlet/DDLAPIServlet.java |   43 -
 .../asterix/api/http/servlet/FeedServlet.java   |  166 -
 .../api/http/servlet/FeedServletUtil.java       |   70 -
 .../api/http/servlet/HyracksProperties.java     |   52 -
 .../api/http/servlet/QueryAPIServlet.java       |   40 -
 .../api/http/servlet/QueryResultAPIServlet.java |   95 -
 .../api/http/servlet/QueryStatusAPIServlet.java |   97 -
 .../api/http/servlet/RESTAPIServlet.java        |  197 --
 .../api/http/servlet/ShutdownAPIServlet.java    |   79 -
 .../api/http/servlet/UpdateAPIServlet.java      |   41 -
 .../ics/asterix/api/java/AsterixJavaClient.java |  100 -
 .../asterix/aql/translator/AqlTranslator.java   | 3069 ------------------
 .../edu/uci/ics/asterix/drivers/AsterixCLI.java |   85 -
 .../asterix/drivers/AsterixClientDriver.java    |   64 -
 .../ics/asterix/drivers/AsterixWebServer.java   |   34 -
 .../ics/asterix/feeds/CentralFeedManager.java   |  100 -
 .../uci/ics/asterix/feeds/FeedCollectInfo.java  |   50 -
 .../edu/uci/ics/asterix/feeds/FeedInfo.java     |   49 -
 .../feeds/FeedJobNotificationHandler.java       |  739 -----
 .../edu/uci/ics/asterix/feeds/FeedJoint.java    |  186 --
 .../asterix/feeds/FeedLifecycleListener.java    |  486 ---
 .../uci/ics/asterix/feeds/FeedLoadManager.java  |  298 --
 .../edu/uci/ics/asterix/feeds/FeedManager.java  |  140 -
 .../ics/asterix/feeds/FeedMessageReceiver.java  |   88 -
 .../ics/asterix/feeds/FeedMetadataManager.java  |  107 -
 .../ics/asterix/feeds/FeedTrackingManager.java  |  184 --
 .../feeds/FeedWorkRequestResponseHandler.java   |  263 --
 .../uci/ics/asterix/feeds/FeedsActivator.java   |  109 -
 .../uci/ics/asterix/file/DatasetOperations.java |  252 --
 .../ics/asterix/file/DataverseOperations.java   |   36 -
 .../file/ExternalIndexingOperations.java        |  761 -----
 .../uci/ics/asterix/file/FeedOperations.java    |  251 --
 .../uci/ics/asterix/file/IndexOperations.java   |  131 -
 .../ics/asterix/file/JobSpecificationUtils.java |   28 -
 .../file/SecondaryBTreeOperationsHelper.java    |  363 ---
 .../file/SecondaryIndexOperationsHelper.java    |  575 ----
 .../SecondaryInvertedIndexOperationsHelper.java |  373 ---
 .../file/SecondaryRTreeOperationsHelper.java    |  408 ---
 .../bootstrap/AsterixGlobalRecoveryManager.java |  213 --
 .../bootstrap/AsterixStateDumpHandler.java      |   48 -
 .../bootstrap/CCApplicationEntryPoint.java      |  184 --
 .../bootstrap/ClusterLifecycleListener.java     |  204 --
 .../hyracks/bootstrap/ClusterWorkExecutor.java  |  102 -
 .../bootstrap/ExternalLibraryBootstrap.java     |  318 --
 .../hyracks/bootstrap/FeedBootstrap.java        |   64 -
 .../bootstrap/NCApplicationEntryPoint.java      |  279 --
 .../uci/ics/asterix/result/ResultReader.java    |   61 -
 .../edu/uci/ics/asterix/result/ResultUtils.java |  343 --
 .../ConstantTupleSourceOperatorDescriptor.java  |   47 -
 ...ConstantTupleSourceOperatorNodePushable.java |   53 -
 .../apache/asterix/api/common/APIFramework.java |  411 +++
 .../api/common/AsterixAppRuntimeContext.java    |  271 ++
 ...rixAppRuntimeContextProdiverForRecovery.java |  105 +
 .../asterix/api/common/AsterixClientConfig.java |   45 +
 .../common/AsterixHyracksIntegrationUtil.java   |  155 +
 .../asterix/api/common/FeedWorkCollection.java  |  197 ++
 .../java/org/apache/asterix/api/common/Job.java |   47 +
 .../asterix/api/common/SessionConfig.java       |  201 ++
 .../asterix/api/http/servlet/APIServlet.java    |  173 +
 .../asterix/api/http/servlet/AQLAPIServlet.java |   53 +
 .../api/http/servlet/ConnectorAPIServlet.java   |  168 +
 .../asterix/api/http/servlet/DDLAPIServlet.java |   43 +
 .../asterix/api/http/servlet/FeedServlet.java   |  166 +
 .../api/http/servlet/FeedServletUtil.java       |   70 +
 .../api/http/servlet/HyracksProperties.java     |   52 +
 .../api/http/servlet/QueryAPIServlet.java       |   40 +
 .../api/http/servlet/QueryResultAPIServlet.java |   95 +
 .../api/http/servlet/QueryStatusAPIServlet.java |   97 +
 .../api/http/servlet/RESTAPIServlet.java        |  197 ++
 .../api/http/servlet/ShutdownAPIServlet.java    |   79 +
 .../api/http/servlet/UpdateAPIServlet.java      |   41 +
 .../asterix/api/java/AsterixJavaClient.java     |  100 +
 .../asterix/aql/translator/AqlTranslator.java   | 3069 ++++++++++++++++++
 .../org/apache/asterix/drivers/AsterixCLI.java  |   85 +
 .../asterix/drivers/AsterixClientDriver.java    |   64 +
 .../asterix/drivers/AsterixWebServer.java       |   34 +
 .../asterix/feeds/CentralFeedManager.java       |  100 +
 .../apache/asterix/feeds/FeedCollectInfo.java   |   50 +
 .../java/org/apache/asterix/feeds/FeedInfo.java |   49 +
 .../feeds/FeedJobNotificationHandler.java       |  739 +++++
 .../org/apache/asterix/feeds/FeedJoint.java     |  186 ++
 .../asterix/feeds/FeedLifecycleListener.java    |  486 +++
 .../apache/asterix/feeds/FeedLoadManager.java   |  298 ++
 .../org/apache/asterix/feeds/FeedManager.java   |  140 +
 .../asterix/feeds/FeedMessageReceiver.java      |   88 +
 .../asterix/feeds/FeedMetadataManager.java      |  107 +
 .../asterix/feeds/FeedTrackingManager.java      |  184 ++
 .../feeds/FeedWorkRequestResponseHandler.java   |  263 ++
 .../apache/asterix/feeds/FeedsActivator.java    |  109 +
 .../apache/asterix/file/DatasetOperations.java  |  252 ++
 .../asterix/file/DataverseOperations.java       |   36 +
 .../file/ExternalIndexingOperations.java        |  761 +++++
 .../org/apache/asterix/file/FeedOperations.java |  251 ++
 .../apache/asterix/file/IndexOperations.java    |  131 +
 .../asterix/file/JobSpecificationUtils.java     |   28 +
 .../file/SecondaryBTreeOperationsHelper.java    |  363 +++
 .../file/SecondaryIndexOperationsHelper.java    |  575 ++++
 .../SecondaryInvertedIndexOperationsHelper.java |  373 +++
 .../file/SecondaryRTreeOperationsHelper.java    |  408 +++
 .../bootstrap/AsterixGlobalRecoveryManager.java |  213 ++
 .../bootstrap/AsterixStateDumpHandler.java      |   48 +
 .../bootstrap/CCApplicationEntryPoint.java      |  184 ++
 .../bootstrap/ClusterLifecycleListener.java     |  204 ++
 .../hyracks/bootstrap/ClusterWorkExecutor.java  |  102 +
 .../bootstrap/ExternalLibraryBootstrap.java     |  318 ++
 .../hyracks/bootstrap/FeedBootstrap.java        |   64 +
 .../bootstrap/NCApplicationEntryPoint.java      |  279 ++
 .../org/apache/asterix/result/ResultReader.java |   61 +
 .../org/apache/asterix/result/ResultUtils.java  |  343 ++
 .../ConstantTupleSourceOperatorDescriptor.java  |   47 +
 ...ConstantTupleSourceOperatorNodePushable.java |   53 +
 .../http/servlet/ConnectorAPIServletTest.java   |  176 -
 .../uci/ics/asterix/test/aql/AQLTestCase.java   |   67 -
 .../uci/ics/asterix/test/aql/AQLTestSuite.java  |   58 -
 .../uci/ics/asterix/test/common/TestHelper.java |   30 -
 .../edu/uci/ics/asterix/test/dml/DmlTest.java   |   82 -
 .../ics/asterix/test/metadata/MetadataTest.java |  109 -
 .../asterix/test/optimizer/OptimizerTest.java   |  220 --
 .../ics/asterix/test/runtime/ExecutionTest.java |  142 -
 .../ics/asterix/test/runtime/HDFSCluster.java   |  127 -
 .../ics/asterix/test/runtime/RepeatedTest.java  |  105 -
 .../http/servlet/ConnectorAPIServletTest.java   |  176 +
 .../apache/asterix/test/aql/AQLTestCase.java    |   67 +
 .../apache/asterix/test/aql/AQLTestSuite.java   |   58 +
 .../apache/asterix/test/common/TestHelper.java  |   30 +
 .../org/apache/asterix/test/dml/DmlTest.java    |   82 +
 .../asterix/test/metadata/MetadataTest.java     |  109 +
 .../asterix/test/optimizer/OptimizerTest.java   |  220 ++
 .../asterix/test/runtime/ExecutionTest.java     |  142 +
 .../asterix/test/runtime/HDFSCluster.java       |  127 +
 .../asterix/test/runtime/RepeatedTest.java      |  105 +
 .../asterix/aql/base/AbstractExpression.java    |   39 -
 .../edu/uci/ics/asterix/aql/base/Clause.java    |   31 -
 .../uci/ics/asterix/aql/base/Expression.java    |   38 -
 .../ics/asterix/aql/base/IAqlExpression.java    |   25 -
 .../edu/uci/ics/asterix/aql/base/Literal.java   |   61 -
 .../edu/uci/ics/asterix/aql/base/Statement.java |   57 -
 .../aql/context/FunctionExpressionMap.java      |   56 -
 .../asterix/aql/context/FunctionSignatures.java |   47 -
 .../asterix/aql/context/RootScopeFactory.java   |   26 -
 .../edu/uci/ics/asterix/aql/context/Scope.java  |  122 -
 .../aql/expression/AbstractAccessor.java        |   35 -
 .../asterix/aql/expression/AdmSplitInfo.java    |   33 -
 .../ics/asterix/aql/expression/CallExpr.java    |   66 -
 .../aql/expression/CompactStatement.java        |   54 -
 .../aql/expression/ConnectFeedStatement.java    |  189 --
 .../asterix/aql/expression/ConstructorType.java |   21 -
 .../expression/CreateDataverseStatement.java    |   65 -
 .../expression/CreateFeedPolicyStatement.java   |   91 -
 .../aql/expression/CreateFeedStatement.java     |   64 -
 .../aql/expression/CreateFunctionStatement.java |   79 -
 .../aql/expression/CreateIndexStatement.java    |  123 -
 .../expression/CreatePrimaryFeedStatement.java  |   54 -
 .../CreateSecondaryFeedStatement.java           |   57 -
 .../ics/asterix/aql/expression/DatasetDecl.java |  109 -
 .../asterix/aql/expression/DataverseDecl.java   |   50 -
 .../aql/expression/DataverseDropStatement.java  |   55 -
 .../asterix/aql/expression/DeleteStatement.java |   89 -
 .../aql/expression/DisconnectFeedStatement.java |   79 -
 .../asterix/aql/expression/DistinctClause.java  |   52 -
 .../asterix/aql/expression/DropStatement.java   |   61 -
 .../aql/expression/ExternalDetailsDecl.java     |   44 -
 .../aql/expression/FLWOGRExpression.java        |   80 -
 .../asterix/aql/expression/FeedDetailsDecl.java |   50 -
 .../aql/expression/FeedDropStatement.java       |   61 -
 .../aql/expression/FeedPolicyDropStatement.java |   61 -
 .../asterix/aql/expression/FieldAccessor.java   |   53 -
 .../asterix/aql/expression/FieldBinding.java    |   48 -
 .../ics/asterix/aql/expression/ForClause.java   |   76 -
 .../asterix/aql/expression/FunctionDecl.java    |   85 -
 .../aql/expression/FunctionDropStatement.java   |   56 -
 .../expression/GbyVariableExpressionPair.java   |   49 -
 .../asterix/aql/expression/GroupbyClause.java   |   88 -
 .../asterix/aql/expression/HdfsPathInfo.java    |   27 -
 .../aql/expression/IDatasetDetailsDecl.java     |   22 -
 .../ics/asterix/aql/expression/Identifier.java  |   53 -
 .../uci/ics/asterix/aql/expression/IfExpr.java  |   75 -
 .../asterix/aql/expression/IndexAccessor.java   |   65 -
 .../ics/asterix/aql/expression/IndexDecl.java   |   22 -
 .../aql/expression/IndexDropStatement.java      |   66 -
 .../asterix/aql/expression/InsertStatement.java |   67 -
 .../aql/expression/InternalDetailsDecl.java     |   50 -
 .../ics/asterix/aql/expression/JoinClause.java  |   88 -
 .../ics/asterix/aql/expression/LetClause.java   |   71 -
 .../ics/asterix/aql/expression/LimitClause.java |   65 -
 .../asterix/aql/expression/ListConstructor.java |   72 -
 .../ics/asterix/aql/expression/LiteralExpr.java |   55 -
 .../asterix/aql/expression/LoadStatement.java   |   88 -
 .../aql/expression/MetaVariableClause.java      |   49 -
 .../aql/expression/MetaVariableExpr.java        |   43 -
 .../aql/expression/NodeGroupDropStatement.java  |   55 -
 .../asterix/aql/expression/NodegroupDecl.java   |   66 -
 .../asterix/aql/expression/OperatorExpr.java    |  144 -
 .../asterix/aql/expression/OperatorType.java    |   35 -
 .../asterix/aql/expression/OrderbyClause.java   |   99 -
 .../expression/OrderedListTypeDefinition.java   |   48 -
 .../aql/expression/QuantifiedExpression.java    |   83 -
 .../asterix/aql/expression/QuantifiedPair.java  |   47 -
 .../uci/ics/asterix/aql/expression/Query.java   |   78 -
 .../aql/expression/RecordConstructor.java       |   58 -
 .../aql/expression/RecordTypeDefinition.java    |  106 -
 .../RefreshExternalDatasetStatement.java        |   59 -
 .../asterix/aql/expression/RunStatement.java    |   83 -
 .../asterix/aql/expression/SetStatement.java    |   56 -
 .../aql/expression/SubscribeFeedStatement.java  |  207 --
 .../ics/asterix/aql/expression/TypeDecl.java    |   79 -
 .../aql/expression/TypeDropStatement.java       |   61 -
 .../asterix/aql/expression/TypeExpression.java  |   30 -
 .../aql/expression/TypeReferenceExpression.java |   48 -
 .../ics/asterix/aql/expression/UnaryExpr.java   |   69 -
 .../ics/asterix/aql/expression/UnionExpr.java   |   64 -
 .../expression/UnorderedListTypeDefinition.java |   48 -
 .../asterix/aql/expression/UpdateClause.java    |   93 -
 .../asterix/aql/expression/UpdateStatement.java |   70 -
 .../asterix/aql/expression/VarIdentifier.java   |   49 -
 .../asterix/aql/expression/VariableExpr.java    |   67 -
 .../ics/asterix/aql/expression/WhereClause.java |   57 -
 .../asterix/aql/expression/WriteStatement.java  |   62 -
 .../aql/expression/visitor/AQLPrintVisitor.java |  578 ----
 .../visitor/IAqlExpressionVisitor.java          |  183 --
 .../visitor/IAqlPlusExpressionVisitor.java      |   28 -
 .../visitor/IAqlVisitorWithVoidReturn.java      |  182 --
 .../ics/asterix/aql/literal/DoubleLiteral.java  |   44 -
 .../ics/asterix/aql/literal/FalseLiteral.java   |   57 -
 .../ics/asterix/aql/literal/FloatLiteral.java   |   44 -
 .../ics/asterix/aql/literal/IntegerLiteral.java |   43 -
 .../asterix/aql/literal/LongIntegerLiteral.java |   44 -
 .../ics/asterix/aql/literal/NullLiteral.java    |   55 -
 .../ics/asterix/aql/literal/StringLiteral.java  |   46 -
 .../ics/asterix/aql/literal/TrueLiteral.java    |   56 -
 .../ics/asterix/aql/parser/ScopeChecker.java    |  249 --
 .../ics/asterix/aql/rewrites/AqlRewriter.java   |  598 ----
 .../aql/rewrites/AqlRewritingContext.java       |   58 -
 .../CloneAndSubstituteVariablesVisitor.java     |  640 ----
 .../asterix/aql/rewrites/InlineUdfsVisitor.java |  572 ----
 .../aql/rewrites/VariableSubstitution.java      |   39 -
 .../uci/ics/asterix/aql/util/FunctionUtils.java |   84 -
 .../ics/asterix/aql/util/RangeMapBuilder.java   |  171 -
 .../asterix/aql/base/AbstractExpression.java    |   39 +
 .../org/apache/asterix/aql/base/Clause.java     |   31 +
 .../org/apache/asterix/aql/base/Expression.java |   38 +
 .../apache/asterix/aql/base/IAqlExpression.java |   25 +
 .../org/apache/asterix/aql/base/Literal.java    |   61 +
 .../org/apache/asterix/aql/base/Statement.java  |   57 +
 .../aql/context/FunctionExpressionMap.java      |   56 +
 .../asterix/aql/context/FunctionSignatures.java |   47 +
 .../asterix/aql/context/RootScopeFactory.java   |   26 +
 .../org/apache/asterix/aql/context/Scope.java   |  122 +
 .../aql/expression/AbstractAccessor.java        |   35 +
 .../asterix/aql/expression/AdmSplitInfo.java    |   33 +
 .../apache/asterix/aql/expression/CallExpr.java |   66 +
 .../aql/expression/CompactStatement.java        |   54 +
 .../aql/expression/ConnectFeedStatement.java    |  189 ++
 .../asterix/aql/expression/ConstructorType.java |   21 +
 .../expression/CreateDataverseStatement.java    |   65 +
 .../expression/CreateFeedPolicyStatement.java   |   91 +
 .../aql/expression/CreateFeedStatement.java     |   64 +
 .../aql/expression/CreateFunctionStatement.java |   79 +
 .../aql/expression/CreateIndexStatement.java    |  123 +
 .../expression/CreatePrimaryFeedStatement.java  |   54 +
 .../CreateSecondaryFeedStatement.java           |   57 +
 .../asterix/aql/expression/DatasetDecl.java     |  109 +
 .../asterix/aql/expression/DataverseDecl.java   |   50 +
 .../aql/expression/DataverseDropStatement.java  |   55 +
 .../asterix/aql/expression/DeleteStatement.java |   89 +
 .../aql/expression/DisconnectFeedStatement.java |   79 +
 .../asterix/aql/expression/DistinctClause.java  |   52 +
 .../asterix/aql/expression/DropStatement.java   |   61 +
 .../aql/expression/ExternalDetailsDecl.java     |   44 +
 .../aql/expression/FLWOGRExpression.java        |   80 +
 .../asterix/aql/expression/FeedDetailsDecl.java |   50 +
 .../aql/expression/FeedDropStatement.java       |   61 +
 .../aql/expression/FeedPolicyDropStatement.java |   61 +
 .../asterix/aql/expression/FieldAccessor.java   |   53 +
 .../asterix/aql/expression/FieldBinding.java    |   48 +
 .../asterix/aql/expression/ForClause.java       |   76 +
 .../asterix/aql/expression/FunctionDecl.java    |   85 +
 .../aql/expression/FunctionDropStatement.java   |   56 +
 .../expression/GbyVariableExpressionPair.java   |   49 +
 .../asterix/aql/expression/GroupbyClause.java   |   88 +
 .../asterix/aql/expression/HdfsPathInfo.java    |   27 +
 .../aql/expression/IDatasetDetailsDecl.java     |   22 +
 .../asterix/aql/expression/Identifier.java      |   53 +
 .../apache/asterix/aql/expression/IfExpr.java   |   75 +
 .../asterix/aql/expression/IndexAccessor.java   |   65 +
 .../asterix/aql/expression/IndexDecl.java       |   22 +
 .../aql/expression/IndexDropStatement.java      |   66 +
 .../asterix/aql/expression/InsertStatement.java |   67 +
 .../aql/expression/InternalDetailsDecl.java     |   50 +
 .../asterix/aql/expression/JoinClause.java      |   88 +
 .../asterix/aql/expression/LetClause.java       |   71 +
 .../asterix/aql/expression/LimitClause.java     |   65 +
 .../asterix/aql/expression/ListConstructor.java |   72 +
 .../asterix/aql/expression/LiteralExpr.java     |   55 +
 .../asterix/aql/expression/LoadStatement.java   |   88 +
 .../aql/expression/MetaVariableClause.java      |   49 +
 .../aql/expression/MetaVariableExpr.java        |   43 +
 .../aql/expression/NodeGroupDropStatement.java  |   55 +
 .../asterix/aql/expression/NodegroupDecl.java   |   66 +
 .../asterix/aql/expression/OperatorExpr.java    |  144 +
 .../asterix/aql/expression/OperatorType.java    |   35 +
 .../asterix/aql/expression/OrderbyClause.java   |   99 +
 .../expression/OrderedListTypeDefinition.java   |   48 +
 .../aql/expression/QuantifiedExpression.java    |   83 +
 .../asterix/aql/expression/QuantifiedPair.java  |   47 +
 .../apache/asterix/aql/expression/Query.java    |   78 +
 .../aql/expression/RecordConstructor.java       |   58 +
 .../aql/expression/RecordTypeDefinition.java    |  106 +
 .../RefreshExternalDatasetStatement.java        |   59 +
 .../asterix/aql/expression/RunStatement.java    |   83 +
 .../asterix/aql/expression/SetStatement.java    |   56 +
 .../aql/expression/SubscribeFeedStatement.java  |  207 ++
 .../apache/asterix/aql/expression/TypeDecl.java |   79 +
 .../aql/expression/TypeDropStatement.java       |   61 +
 .../asterix/aql/expression/TypeExpression.java  |   30 +
 .../aql/expression/TypeReferenceExpression.java |   48 +
 .../asterix/aql/expression/UnaryExpr.java       |   69 +
 .../asterix/aql/expression/UnionExpr.java       |   64 +
 .../expression/UnorderedListTypeDefinition.java |   48 +
 .../asterix/aql/expression/UpdateClause.java    |   93 +
 .../asterix/aql/expression/UpdateStatement.java |   70 +
 .../asterix/aql/expression/VarIdentifier.java   |   49 +
 .../asterix/aql/expression/VariableExpr.java    |   67 +
 .../asterix/aql/expression/WhereClause.java     |   57 +
 .../asterix/aql/expression/WriteStatement.java  |   62 +
 .../aql/expression/visitor/AQLPrintVisitor.java |  578 ++++
 .../visitor/IAqlExpressionVisitor.java          |  183 ++
 .../visitor/IAqlPlusExpressionVisitor.java      |   28 +
 .../visitor/IAqlVisitorWithVoidReturn.java      |  182 ++
 .../asterix/aql/literal/DoubleLiteral.java      |   44 +
 .../asterix/aql/literal/FalseLiteral.java       |   57 +
 .../asterix/aql/literal/FloatLiteral.java       |   44 +
 .../asterix/aql/literal/IntegerLiteral.java     |   43 +
 .../asterix/aql/literal/LongIntegerLiteral.java |   44 +
 .../apache/asterix/aql/literal/NullLiteral.java |   55 +
 .../asterix/aql/literal/StringLiteral.java      |   46 +
 .../apache/asterix/aql/literal/TrueLiteral.java |   56 +
 .../apache/asterix/aql/parser/ScopeChecker.java |  249 ++
 .../asterix/aql/rewrites/AqlRewriter.java       |  598 ++++
 .../aql/rewrites/AqlRewritingContext.java       |   58 +
 .../CloneAndSubstituteVariablesVisitor.java     |  640 ++++
 .../asterix/aql/rewrites/InlineUdfsVisitor.java |  572 ++++
 .../aql/rewrites/VariableSubstitution.java      |   39 +
 .../apache/asterix/aql/util/FunctionUtils.java  |   84 +
 .../asterix/aql/util/RangeMapBuilder.java       |  171 +
 .../asterix/common/annotations/AutoDataGen.java |   34 -
 .../annotations/DateBetweenYearsDataGen.java    |   40 -
 .../DatetimeAddRandHoursDataGen.java            |   46 -
 .../DatetimeBetweenYearsDataGen.java            |   40 -
 .../annotations/FieldIntervalDataGen.java       |   53 -
 .../common/annotations/FieldValFileDataGen.java |   36 -
 .../FieldValFileSameIndexDataGen.java           |   41 -
 .../common/annotations/IRecordFieldDataGen.java |   32 -
 .../annotations/IRecordTypeAnnotation.java      |   23 -
 .../annotations/InsertRandIntDataGen.java       |   40 -
 .../asterix/common/annotations/ListDataGen.java |   40 -
 .../common/annotations/ListValFileDataGen.java  |   48 -
 .../annotations/RecordDataGenAnnotation.java    |   43 -
 ...econdaryIndexSearchExpressionAnnotation.java |   42 -
 .../asterix/common/annotations/TypeDataGen.java |   39 -
 .../annotations/UndeclaredFieldsDataGen.java    |   52 -
 .../asterix/common/api/AsterixContextInfo.java  |   40 -
 .../common/api/AsterixThreadExecutor.java       |   39 -
 .../common/api/AsterixThreadFactory.java        |   41 -
 .../common/api/IAsterixAppRuntimeContext.java   |   76 -
 .../asterix/common/api/IAsterixContextInfo.java |   28 -
 .../api/IAsterixRuntimeComponentsProvider.java  |   61 -
 .../common/api/IClusterEventsSubscriber.java    |   47 -
 .../common/api/IClusterManagementWork.java      |   34 -
 .../api/IClusterManagementWorkResponse.java     |   41 -
 .../common/api/ILocalResourceMetadata.java      |   29 -
 .../config/AbstractAsterixProperties.java       |   23 -
 .../config/AsterixCompilerProperties.java       |   54 -
 .../config/AsterixExternalProperties.java       |   89 -
 .../common/config/AsterixFeedProperties.java    |   62 -
 .../config/AsterixMetadataProperties.java       |   53 -
 .../config/AsterixPropertiesAccessor.java       |  164 -
 .../common/config/AsterixStorageProperties.java |  100 -
 .../config/AsterixTransactionProperties.java    |  113 -
 .../asterix/common/config/DatasetConfig.java    |   53 -
 .../ics/asterix/common/config/GlobalConfig.java |   68 -
 .../config/IAsterixPropertiesProvider.java      |   29 -
 .../common/config/IPropertyInterpreter.java     |   21 -
 .../common/config/OptimizationConfUtil.java     |   26 -
 .../common/config/PropertyInterpreters.java     |   94 -
 .../common/context/AsterixFileMapManager.java   |   87 -
 .../AsterixVirtualBufferCacheProvider.java      |   39 -
 .../common/context/BaseOperationTracker.java    |   64 -
 .../context/CorrelatedPrefixMergePolicy.java    |  126 -
 .../CorrelatedPrefixMergePolicyFactory.java     |   68 -
 .../common/context/DatasetLifecycleManager.java |  603 ----
 .../context/ITransactionSubsystemProvider.java  |   25 -
 .../context/PrimaryIndexOperationTracker.java   |  196 --
 .../context/TransactionSubsystemProvider.java   |   34 -
 .../common/dataflow/AsterixLSMIndexUtil.java    |   40 -
 ...erixLSMInsertDeleteOperatorNodePushable.java |  123 -
 ...rtedIndexInsertDeleteOperatorDescriptor.java |   63 -
 ...ixLSMTreeInsertDeleteOperatorDescriptor.java |   82 -
 .../IAsterixApplicationContextInfo.java         |   47 -
 .../common/exceptions/ACIDException.java        |   61 -
 .../common/exceptions/AsterixException.java     |   34 -
 .../exceptions/AsterixRuntimeException.java     |   33 -
 .../common/exceptions/FrameDataException.java   |   34 -
 .../common/exceptions/TypeException.java        |   39 -
 .../common/feeds/BasicMonitoredBuffer.java      |   70 -
 .../asterix/common/feeds/CollectionRuntime.java |   88 -
 .../feeds/ComputeSideMonitoredBuffer.java       |   69 -
 .../ics/asterix/common/feeds/DataBucket.java    |   84 -
 .../asterix/common/feeds/DataBucketPool.java    |  106 -
 .../common/feeds/DistributeFeedFrameWriter.java |  135 -
 .../ics/asterix/common/feeds/FeedActivity.java  |  115 -
 .../feeds/FeedCollectRuntimeInputHandler.java   |   57 -
 .../common/feeds/FeedConnectJobInfo.java        |   89 -
 .../asterix/common/feeds/FeedConnectionId.java  |   70 -
 .../common/feeds/FeedConnectionRequest.java     |  122 -
 .../ics/asterix/common/feeds/FeedConstants.java |   67 -
 .../common/feeds/FeedExceptionHandler.java      |  104 -
 .../asterix/common/feeds/FeedFrameCache.java    |  167 -
 .../common/feeds/FeedFrameCollector.java        |  154 -
 .../common/feeds/FeedFrameDiscarder.java        |   63 -
 .../asterix/common/feeds/FeedFrameHandlers.java |  302 --
 .../asterix/common/feeds/FeedFrameSpiller.java  |  172 -
 .../common/feeds/FeedFrameTupleAccessor.java    |  106 -
 .../ics/asterix/common/feeds/FeedFrameUtil.java |   98 -
 .../uci/ics/asterix/common/feeds/FeedId.java    |   62 -
 .../asterix/common/feeds/FeedIntakeInfo.java    |   58 -
 .../ics/asterix/common/feeds/FeedJobInfo.java   |   82 -
 .../ics/asterix/common/feeds/FeedJointKey.java  |   75 -
 .../asterix/common/feeds/FeedMemoryManager.java |  108 -
 .../common/feeds/FeedMessageService.java        |  140 -
 .../common/feeds/FeedMetricCollector.java       |  185 --
 .../common/feeds/FeedPolicyAccessor.java        |  164 -
 .../ics/asterix/common/feeds/FeedRuntime.java   |   70 -
 .../ics/asterix/common/feeds/FeedRuntimeId.java |   76 -
 .../common/feeds/FeedRuntimeInputHandler.java   |  426 ---
 .../common/feeds/FeedRuntimeManager.java        |   77 -
 .../asterix/common/feeds/FeedRuntimeReport.java |   19 -
 .../common/feeds/FeedStorageStatistics.java     |   23 -
 .../common/feeds/FeedTupleCommitAckMessage.java |   93 -
 .../feeds/FeedTupleCommitResponseMessage.java   |   76 -
 .../asterix/common/feeds/FrameCollection.java   |   97 -
 .../asterix/common/feeds/FrameDistributor.java  |  359 --
 .../common/feeds/FrameEventCallback.java        |   98 -
 .../common/feeds/IFramePostProcessor.java       |   24 -
 .../common/feeds/IFramePreprocessor.java        |   22 -
 .../asterix/common/feeds/IngestionRuntime.java  |   71 -
 .../common/feeds/IntakePartitionStatistics.java |   41 -
 .../common/feeds/IntakeSideMonitoredBuffer.java |   70 -
 .../asterix/common/feeds/MessageListener.java   |  122 -
 .../asterix/common/feeds/MessageReceiver.java   |  107 -
 .../asterix/common/feeds/MonitoredBuffer.java   |  386 ---
 .../common/feeds/MonitoredBufferTimerTasks.java |  290 --
 .../uci/ics/asterix/common/feeds/NodeLoad.java  |   58 -
 .../asterix/common/feeds/NodeLoadReport.java    |   95 -
 .../common/feeds/NodeLoadReportService.java     |  104 -
 .../uci/ics/asterix/common/feeds/Series.java    |   40 -
 .../uci/ics/asterix/common/feeds/SeriesAvg.java |   43 -
 .../ics/asterix/common/feeds/SeriesRate.java    |   88 -
 .../common/feeds/StorageFrameHandler.java       |  114 -
 .../feeds/StorageSideMonitoredBuffer.java       |  203 --
 .../common/feeds/SubscribableFeedRuntimeId.java |   48 -
 .../common/feeds/SubscribableRuntime.java       |   86 -
 .../feeds/api/IAdapterRuntimeManager.java       |   80 -
 .../common/feeds/api/ICentralFeedManager.java   |   30 -
 .../common/feeds/api/IDatasourceAdapter.java    |   43 -
 .../common/feeds/api/IExceptionHandler.java     |   39 -
 .../asterix/common/feeds/api/IFeedAdapter.java  |   56 -
 .../feeds/api/IFeedConnectionManager.java       |   71 -
 .../common/feeds/api/IFeedFrameHandler.java     |   35 -
 .../asterix/common/feeds/api/IFeedJoint.java    |  117 -
 .../api/IFeedLifecycleEventSubscriber.java      |   33 -
 .../IFeedLifecycleIntakeEventSubscriber.java    |   24 -
 .../feeds/api/IFeedLifecycleListener.java       |   52 -
 .../common/feeds/api/IFeedLoadManager.java      |   56 -
 .../asterix/common/feeds/api/IFeedManager.java  |   93 -
 .../common/feeds/api/IFeedMemoryComponent.java  |   54 -
 .../common/feeds/api/IFeedMemoryManager.java    |   54 -
 .../asterix/common/feeds/api/IFeedMessage.java  |   48 -
 .../common/feeds/api/IFeedMessageService.java   |   30 -
 .../common/feeds/api/IFeedMetadataManager.java  |   35 -
 .../common/feeds/api/IFeedMetricCollector.java  |   46 -
 .../api/IFeedOperatorOutputSideHandler.java     |   32 -
 .../asterix/common/feeds/api/IFeedProvider.java |   22 -
 .../asterix/common/feeds/api/IFeedRuntime.java  |   58 -
 .../asterix/common/feeds/api/IFeedService.java  |   22 -
 .../feeds/api/IFeedSubscriptionManager.java     |   37 -
 .../common/feeds/api/IFeedTrackingManager.java  |   25 -
 .../ics/asterix/common/feeds/api/IFeedWork.java |   24 -
 .../feeds/api/IFeedWorkEventListener.java       |   37 -
 .../common/feeds/api/IFeedWorkManager.java      |   21 -
 .../common/feeds/api/IFrameEventCallback.java   |   28 -
 .../feeds/api/IIntakeProgressTracker.java       |   25 -
 .../common/feeds/api/IMessageReceiver.java      |   24 -
 .../common/feeds/api/ISubscribableRuntime.java  |   57 -
 .../common/feeds/api/ISubscriberRuntime.java    |   26 -
 .../common/feeds/api/ISubscriptionProvider.java |   25 -
 .../feeds/api/ITupleTrackingFeedAdapter.java    |   20 -
 .../common/feeds/message/EndFeedMessage.java    |   93 -
 .../feeds/message/FeedCongestionMessage.java    |   99 -
 .../common/feeds/message/FeedMessage.java       |   38 -
 .../feeds/message/FeedMessageService.java       |  143 -
 .../common/feeds/message/FeedReportMessage.java |   96 -
 .../message/FeedTupleCommitAckMessage.java      |   95 -
 .../message/FeedTupleCommitResponseMessage.java |   69 -
 .../common/feeds/message/NodeReportMessage.java |   65 -
 .../feeds/message/ScaleInReportMessage.java     |  110 -
 .../feeds/message/StorageReportFeedMessage.java |  125 -
 .../message/ThrottlingEnabledFeedMessage.java   |   82 -
 .../common/functions/FunctionConstants.java     |   21 -
 .../common/functions/FunctionDescriptorTag.java |   23 -
 .../common/functions/FunctionSignature.java     |   79 -
 .../AbstractLSMIOOperationCallback.java         |  150 -
 .../LSMBTreeIOOperationCallback.java            |   59 -
 .../LSMBTreeIOOperationCallbackFactory.java     |   34 -
 .../LSMBTreeWithBuddyIOOperationCallback.java   |   54 -
 ...TreeWithBuddyIOOperationCallbackFactory.java |   33 -
 .../LSMInvertedIndexIOOperationCallback.java    |   57 -
 ...InvertedIndexIOOperationCallbackFactory.java |   34 -
 .../LSMRTreeIOOperationCallback.java            |   58 -
 .../LSMRTreeIOOperationCallbackFactory.java     |   34 -
 .../common/parse/IAsterixTupleParser.java       |   25 -
 .../common/parse/IParseFileSplitsDecl.java      |   29 -
 .../common/parse/ITupleForwardPolicy.java       |   44 -
 .../common/parse/ITupleParserPolicy.java        |   41 -
 .../transactions/AbstractOperationCallback.java |   65 -
 .../AbstractOperationCallbackFactory.java       |   40 -
 .../asterix/common/transactions/DatasetId.java  |   50 -
 .../IAsterixAppRuntimeContextProvider.java      |   58 -
 .../common/transactions/ILockHashTable.java     |   35 -
 .../common/transactions/ILockManager.java       |  117 -
 .../common/transactions/ILockMatrix.java        |   44 -
 .../common/transactions/ILogManager.java        |   53 -
 .../asterix/common/transactions/ILogPage.java   |   23 -
 .../asterix/common/transactions/ILogReader.java |   31 -
 .../asterix/common/transactions/ILogRecord.java |  107 -
 .../common/transactions/IRecoveryManager.java   |   89 -
 .../transactions/ITransactionContext.java       |   51 -
 .../transactions/ITransactionManager.java       |  123 -
 .../transactions/ITransactionSubsystem.java     |   32 -
 .../ics/asterix/common/transactions/JobId.java  |   56 -
 .../transactions/LogManagerProperties.java      |   93 -
 .../asterix/common/transactions/LogRecord.java  |  476 ---
 .../asterix/common/transactions/LogType.java    |   50 -
 .../common/transactions/MutableLong.java        |   51 -
 .../transactions/PrimaryKeyTupleReference.java  |   50 -
 .../ics/asterix/common/utils/ANameSchema.java   |   25 -
 .../asterix/common/utils/UTF8CharSequence.java  |   89 -
 .../asterix/test/base/AsterixTestHelper.java    |   76 -
 .../asterix/common/annotations/AutoDataGen.java |   34 +
 .../annotations/DateBetweenYearsDataGen.java    |   40 +
 .../DatetimeAddRandHoursDataGen.java            |   46 +
 .../DatetimeBetweenYearsDataGen.java            |   40 +
 .../annotations/FieldIntervalDataGen.java       |   53 +
 .../common/annotations/FieldValFileDataGen.java |   36 +
 .../FieldValFileSameIndexDataGen.java           |   41 +
 .../common/annotations/IRecordFieldDataGen.java |   32 +
 .../annotations/IRecordTypeAnnotation.java      |   23 +
 .../annotations/InsertRandIntDataGen.java       |   40 +
 .../asterix/common/annotations/ListDataGen.java |   40 +
 .../common/annotations/ListValFileDataGen.java  |   48 +
 .../annotations/RecordDataGenAnnotation.java    |   43 +
 ...econdaryIndexSearchExpressionAnnotation.java |   42 +
 .../asterix/common/annotations/TypeDataGen.java |   39 +
 .../annotations/UndeclaredFieldsDataGen.java    |   52 +
 .../asterix/common/api/AsterixContextInfo.java  |   40 +
 .../common/api/AsterixThreadExecutor.java       |   39 +
 .../common/api/AsterixThreadFactory.java        |   41 +
 .../common/api/IAsterixAppRuntimeContext.java   |   76 +
 .../asterix/common/api/IAsterixContextInfo.java |   28 +
 .../api/IAsterixRuntimeComponentsProvider.java  |   61 +
 .../common/api/IClusterEventsSubscriber.java    |   47 +
 .../common/api/IClusterManagementWork.java      |   34 +
 .../api/IClusterManagementWorkResponse.java     |   41 +
 .../common/api/ILocalResourceMetadata.java      |   29 +
 .../config/AbstractAsterixProperties.java       |   23 +
 .../config/AsterixCompilerProperties.java       |   54 +
 .../config/AsterixExternalProperties.java       |   89 +
 .../common/config/AsterixFeedProperties.java    |   62 +
 .../config/AsterixMetadataProperties.java       |   53 +
 .../config/AsterixPropertiesAccessor.java       |  164 +
 .../common/config/AsterixStorageProperties.java |  100 +
 .../config/AsterixTransactionProperties.java    |  113 +
 .../asterix/common/config/DatasetConfig.java    |   53 +
 .../asterix/common/config/GlobalConfig.java     |   68 +
 .../config/IAsterixPropertiesProvider.java      |   29 +
 .../common/config/IPropertyInterpreter.java     |   21 +
 .../common/config/OptimizationConfUtil.java     |   26 +
 .../common/config/PropertyInterpreters.java     |   94 +
 .../common/context/AsterixFileMapManager.java   |   87 +
 .../AsterixVirtualBufferCacheProvider.java      |   39 +
 .../common/context/BaseOperationTracker.java    |   64 +
 .../context/CorrelatedPrefixMergePolicy.java    |  126 +
 .../CorrelatedPrefixMergePolicyFactory.java     |   68 +
 .../common/context/DatasetLifecycleManager.java |  603 ++++
 .../context/ITransactionSubsystemProvider.java  |   25 +
 .../context/PrimaryIndexOperationTracker.java   |  196 ++
 .../context/TransactionSubsystemProvider.java   |   34 +
 .../common/dataflow/AsterixLSMIndexUtil.java    |   40 +
 ...erixLSMInsertDeleteOperatorNodePushable.java |  123 +
 ...rtedIndexInsertDeleteOperatorDescriptor.java |   63 +
 ...ixLSMTreeInsertDeleteOperatorDescriptor.java |   82 +
 .../IAsterixApplicationContextInfo.java         |   47 +
 .../common/exceptions/ACIDException.java        |   61 +
 .../common/exceptions/AsterixException.java     |   34 +
 .../exceptions/AsterixRuntimeException.java     |   33 +
 .../common/exceptions/FrameDataException.java   |   34 +
 .../common/exceptions/TypeException.java        |   39 +
 .../common/feeds/BasicMonitoredBuffer.java      |   70 +
 .../asterix/common/feeds/CollectionRuntime.java |   88 +
 .../feeds/ComputeSideMonitoredBuffer.java       |   69 +
 .../apache/asterix/common/feeds/DataBucket.java |   84 +
 .../asterix/common/feeds/DataBucketPool.java    |  106 +
 .../common/feeds/DistributeFeedFrameWriter.java |  135 +
 .../asterix/common/feeds/FeedActivity.java      |  115 +
 .../feeds/FeedCollectRuntimeInputHandler.java   |   57 +
 .../common/feeds/FeedConnectJobInfo.java        |   89 +
 .../asterix/common/feeds/FeedConnectionId.java  |   70 +
 .../common/feeds/FeedConnectionRequest.java     |  122 +
 .../asterix/common/feeds/FeedConstants.java     |   67 +
 .../common/feeds/FeedExceptionHandler.java      |  104 +
 .../asterix/common/feeds/FeedFrameCache.java    |  167 +
 .../common/feeds/FeedFrameCollector.java        |  154 +
 .../common/feeds/FeedFrameDiscarder.java        |   63 +
 .../asterix/common/feeds/FeedFrameHandlers.java |  302 ++
 .../asterix/common/feeds/FeedFrameSpiller.java  |  172 +
 .../common/feeds/FeedFrameTupleAccessor.java    |  106 +
 .../asterix/common/feeds/FeedFrameUtil.java     |   98 +
 .../org/apache/asterix/common/feeds/FeedId.java |   62 +
 .../asterix/common/feeds/FeedIntakeInfo.java    |   58 +
 .../asterix/common/feeds/FeedJobInfo.java       |   82 +
 .../asterix/common/feeds/FeedJointKey.java      |   75 +
 .../asterix/common/feeds/FeedMemoryManager.java |  108 +
 .../common/feeds/FeedMessageService.java        |  140 +
 .../common/feeds/FeedMetricCollector.java       |  185 ++
 .../common/feeds/FeedPolicyAccessor.java        |  164 +
 .../asterix/common/feeds/FeedRuntime.java       |   70 +
 .../asterix/common/feeds/FeedRuntimeId.java     |   76 +
 .../common/feeds/FeedRuntimeInputHandler.java   |  426 +++
 .../common/feeds/FeedRuntimeManager.java        |   77 +
 .../asterix/common/feeds/FeedRuntimeReport.java |   19 +
 .../common/feeds/FeedStorageStatistics.java     |   23 +
 .../common/feeds/FeedTupleCommitAckMessage.java |   93 +
 .../feeds/FeedTupleCommitResponseMessage.java   |   76 +
 .../asterix/common/feeds/FrameCollection.java   |   97 +
 .../asterix/common/feeds/FrameDistributor.java  |  359 ++
 .../common/feeds/FrameEventCallback.java        |   98 +
 .../common/feeds/IFramePostProcessor.java       |   24 +
 .../common/feeds/IFramePreprocessor.java        |   22 +
 .../asterix/common/feeds/IngestionRuntime.java  |   71 +
 .../common/feeds/IntakePartitionStatistics.java |   41 +
 .../common/feeds/IntakeSideMonitoredBuffer.java |   70 +
 .../asterix/common/feeds/MessageListener.java   |  122 +
 .../asterix/common/feeds/MessageReceiver.java   |  107 +
 .../asterix/common/feeds/MonitoredBuffer.java   |  386 +++
 .../common/feeds/MonitoredBufferTimerTasks.java |  290 ++
 .../apache/asterix/common/feeds/NodeLoad.java   |   58 +
 .../asterix/common/feeds/NodeLoadReport.java    |   95 +
 .../common/feeds/NodeLoadReportService.java     |  104 +
 .../org/apache/asterix/common/feeds/Series.java |   40 +
 .../apache/asterix/common/feeds/SeriesAvg.java  |   43 +
 .../apache/asterix/common/feeds/SeriesRate.java |   88 +
 .../common/feeds/StorageFrameHandler.java       |  114 +
 .../feeds/StorageSideMonitoredBuffer.java       |  203 ++
 .../common/feeds/SubscribableFeedRuntimeId.java |   48 +
 .../common/feeds/SubscribableRuntime.java       |   86 +
 .../feeds/api/IAdapterRuntimeManager.java       |   80 +
 .../common/feeds/api/ICentralFeedManager.java   |   30 +
 .../common/feeds/api/IDatasourceAdapter.java    |   43 +
 .../common/feeds/api/IExceptionHandler.java     |   39 +
 .../asterix/common/feeds/api/IFeedAdapter.java  |   56 +
 .../feeds/api/IFeedConnectionManager.java       |   71 +
 .../common/feeds/api/IFeedFrameHandler.java     |   35 +
 .../asterix/common/feeds/api/IFeedJoint.java    |  117 +
 .../api/IFeedLifecycleEventSubscriber.java      |   33 +
 .../IFeedLifecycleIntakeEventSubscriber.java    |   24 +
 .../feeds/api/IFeedLifecycleListener.java       |   52 +
 .../common/feeds/api/IFeedLoadManager.java      |   56 +
 .../asterix/common/feeds/api/IFeedManager.java  |   93 +
 .../common/feeds/api/IFeedMemoryComponent.java  |   54 +
 .../common/feeds/api/IFeedMemoryManager.java    |   54 +
 .../asterix/common/feeds/api/IFeedMessage.java  |   48 +
 .../common/feeds/api/IFeedMessageService.java   |   30 +
 .../common/feeds/api/IFeedMetadataManager.java  |   35 +
 .../common/feeds/api/IFeedMetricCollector.java  |   46 +
 .../api/IFeedOperatorOutputSideHandler.java     |   32 +
 .../asterix/common/feeds/api/IFeedProvider.java |   22 +
 .../asterix/common/feeds/api/IFeedRuntime.java  |   58 +
 .../asterix/common/feeds/api/IFeedService.java  |   22 +
 .../feeds/api/IFeedSubscriptionManager.java     |   37 +
 .../common/feeds/api/IFeedTrackingManager.java  |   25 +
 .../asterix/common/feeds/api/IFeedWork.java     |   24 +
 .../feeds/api/IFeedWorkEventListener.java       |   37 +
 .../common/feeds/api/IFeedWorkManager.java      |   21 +
 .../common/feeds/api/IFrameEventCallback.java   |   28 +
 .../feeds/api/IIntakeProgressTracker.java       |   25 +
 .../common/feeds/api/IMessageReceiver.java      |   24 +
 .../common/feeds/api/ISubscribableRuntime.java  |   57 +
 .../common/feeds/api/ISubscriberRuntime.java    |   26 +
 .../common/feeds/api/ISubscriptionProvider.java |   25 +
 .../feeds/api/ITupleTrackingFeedAdapter.java    |   20 +
 .../common/feeds/message/EndFeedMessage.java    |   93 +
 .../feeds/message/FeedCongestionMessage.java    |   99 +
 .../common/feeds/message/FeedMessage.java       |   38 +
 .../feeds/message/FeedMessageService.java       |  143 +
 .../common/feeds/message/FeedReportMessage.java |   96 +
 .../message/FeedTupleCommitAckMessage.java      |   95 +
 .../message/FeedTupleCommitResponseMessage.java |   69 +
 .../common/feeds/message/NodeReportMessage.java |   65 +
 .../feeds/message/ScaleInReportMessage.java     |  110 +
 .../feeds/message/StorageReportFeedMessage.java |  125 +
 .../message/ThrottlingEnabledFeedMessage.java   |   82 +
 .../common/functions/FunctionConstants.java     |   21 +
 .../common/functions/FunctionDescriptorTag.java |   23 +
 .../common/functions/FunctionSignature.java     |   79 +
 .../AbstractLSMIOOperationCallback.java         |  150 +
 .../LSMBTreeIOOperationCallback.java            |   59 +
 .../LSMBTreeIOOperationCallbackFactory.java     |   34 +
 .../LSMBTreeWithBuddyIOOperationCallback.java   |   54 +
 ...TreeWithBuddyIOOperationCallbackFactory.java |   33 +
 .../LSMInvertedIndexIOOperationCallback.java    |   57 +
 ...InvertedIndexIOOperationCallbackFactory.java |   34 +
 .../LSMRTreeIOOperationCallback.java            |   58 +
 .../LSMRTreeIOOperationCallbackFactory.java     |   34 +
 .../common/parse/IAsterixTupleParser.java       |   25 +
 .../common/parse/IParseFileSplitsDecl.java      |   29 +
 .../common/parse/ITupleForwardPolicy.java       |   44 +
 .../common/parse/ITupleParserPolicy.java        |   41 +
 .../transactions/AbstractOperationCallback.java |   65 +
 .../AbstractOperationCallbackFactory.java       |   40 +
 .../asterix/common/transactions/DatasetId.java  |   50 +
 .../IAsterixAppRuntimeContextProvider.java      |   58 +
 .../common/transactions/ILockHashTable.java     |   35 +
 .../common/transactions/ILockManager.java       |  117 +
 .../common/transactions/ILockMatrix.java        |   44 +
 .../common/transactions/ILogManager.java        |   53 +
 .../asterix/common/transactions/ILogPage.java   |   23 +
 .../asterix/common/transactions/ILogReader.java |   31 +
 .../asterix/common/transactions/ILogRecord.java |  107 +
 .../common/transactions/IRecoveryManager.java   |   89 +
 .../transactions/ITransactionContext.java       |   51 +
 .../transactions/ITransactionManager.java       |  123 +
 .../transactions/ITransactionSubsystem.java     |   32 +
 .../asterix/common/transactions/JobId.java      |   56 +
 .../transactions/LogManagerProperties.java      |   93 +
 .../asterix/common/transactions/LogRecord.java  |  476 +++
 .../asterix/common/transactions/LogType.java    |   50 +
 .../common/transactions/MutableLong.java        |   51 +
 .../transactions/PrimaryKeyTupleReference.java  |   50 +
 .../asterix/common/utils/ANameSchema.java       |   25 +
 .../asterix/common/utils/UTF8CharSequence.java  |   89 +
 .../asterix/test/base/AsterixTestHelper.java    |   76 +
 .../uci/ics/asterix/test/aql/TestHelper.java    |   30 -
 .../uci/ics/asterix/test/aql/TestsUtils.java    |  530 ---
 .../org/apache/asterix/test/aql/TestHelper.java |   30 +
 .../org/apache/asterix/test/aql/TestsUtils.java |  530 +++
 .../uci/ics/asterix/event/api/ClusterInfo.java  |   26 -
 .../edu/uci/ics/asterix/event/api/NodeInfo.java |   34 -
 .../ics/asterix/event/driver/EventConfig.java   |   36 -
 .../ics/asterix/event/driver/EventDriver.java   |  151 -
 .../ics/asterix/event/error/EventException.java |   25 -
 .../ics/asterix/event/error/OutputHandler.java  |   89 -
 .../asterix/event/error/VerificationUtil.java   |  126 -
 .../management/AsterixEventServiceClient.java   |  229 --
 .../event/management/DefaultOutputHandler.java  |   26 -
 .../asterix/event/management/ErrorHandler.java  |   19 -
 .../asterix/event/management/EventExecutor.java |  131 -
 .../ics/asterix/event/management/EventTask.java |  154 -
 .../event/management/EventTaskReport.java       |   50 -
 .../ics/asterix/event/management/EventUtil.java |  256 --
 .../event/management/IOutputHandler.java        |   22 -
 .../event/management/IPatternListener.java      |   24 -
 .../event/management/OutputAnalysis.java        |   34 -
 .../asterix/event/management/Randomizer.java    |   65 -
 .../ics/asterix/event/management/ValueType.java |   95 -
 .../asterix/event/model/AsterixInstance.java    |  215 --
 .../event/model/AsterixRuntimeState.java        |   54 -
 .../uci/ics/asterix/event/model/BackupInfo.java |   68 -
 .../uci/ics/asterix/event/model/EventList.java  |   34 -
 .../ics/asterix/event/model/ProcessInfo.java    |   54 -
 .../event/service/AsterixEventService.java      |   92 -
 .../event/service/AsterixEventServiceUtil.java  |  552 ----
 .../asterix/event/service/ILookupService.java   |   41 -
 .../asterix/event/service/ServiceProvider.java  |   30 -
 .../asterix/event/service/ZooKeeperService.java |  247 --
 .../asterix/event/util/AsterixConstants.java    |   21 -
 .../ics/asterix/event/util/PatternCreator.java  |  645 ----
 .../apache/asterix/event/api/ClusterInfo.java   |   26 +
 .../org/apache/asterix/event/api/NodeInfo.java  |   34 +
 .../asterix/event/driver/EventConfig.java       |   36 +
 .../asterix/event/driver/EventDriver.java       |  151 +
 .../asterix/event/error/EventException.java     |   25 +
 .../asterix/event/error/OutputHandler.java      |   89 +
 .../asterix/event/error/VerificationUtil.java   |  126 +
 .../management/AsterixEventServiceClient.java   |  229 ++
 .../event/management/DefaultOutputHandler.java  |   26 +
 .../asterix/event/management/ErrorHandler.java  |   19 +
 .../asterix/event/management/EventExecutor.java |  131 +
 .../asterix/event/management/EventTask.java     |  154 +
 .../event/management/EventTaskReport.java       |   50 +
 .../asterix/event/management/EventUtil.java     |  256 ++
 .../event/management/IOutputHandler.java        |   22 +
 .../event/management/IPatternListener.java      |   24 +
 .../event/management/OutputAnalysis.java        |   34 +
 .../asterix/event/management/Randomizer.java    |   65 +
 .../asterix/event/management/ValueType.java     |   95 +
 .../asterix/event/model/AsterixInstance.java    |  215 ++
 .../event/model/AsterixRuntimeState.java        |   54 +
 .../apache/asterix/event/model/BackupInfo.java  |   68 +
 .../apache/asterix/event/model/EventList.java   |   34 +
 .../apache/asterix/event/model/ProcessInfo.java |   54 +
 .../event/service/AsterixEventService.java      |   92 +
 .../event/service/AsterixEventServiceUtil.java  |  552 ++++
 .../asterix/event/service/ILookupService.java   |   41 +
 .../asterix/event/service/ServiceProvider.java  |   30 +
 .../asterix/event/service/ZooKeeperService.java |  247 ++
 .../asterix/event/util/AsterixConstants.java    |   21 +
 .../asterix/event/util/PatternCreator.java      |  645 ++++
 .../adapter/factory/CNNFeedAdapterFactory.java  |  153 -
 .../adapter/factory/HDFSAdapterFactory.java     |  342 --
 .../factory/HDFSIndexingAdapterFactory.java     |  208 --
 .../adapter/factory/HiveAdapterFactory.java     |  123 -
 .../factory/NCFileSystemAdapterFactory.java     |  164 -
 .../PullBasedAzureTwitterAdapterFactory.java    |  169 -
 .../factory/PullBasedTwitterAdapterFactory.java |  116 -
 .../factory/PushBasedTwitterAdapterFactory.java |  105 -
 .../adapter/factory/RSSFeedAdapterFactory.java  |  111 -
 .../factory/StreamBasedAdapterFactory.java      |   45 -
 .../dataset/adapter/AzureTweetEntity.java       |   47 -
 .../adapter/AzureTweetMetadataEntity.java       |   91 -
 .../dataset/adapter/ClientBasedFeedAdapter.java |  150 -
 .../external/dataset/adapter/FeedClient.java    |  170 -
 .../dataset/adapter/FileSystemBasedAdapter.java |   58 -
 .../external/dataset/adapter/HDFSAdapter.java   |   93 -
 .../dataset/adapter/HDFSIndexingAdapter.java    |   75 -
 .../external/dataset/adapter/HiveAdapter.java   |   45 -
 .../external/dataset/adapter/IFeedClient.java   |   42 -
 .../dataset/adapter/IFeedClientFactory.java     |   36 -
 .../dataset/adapter/IPullBasedFeedClient.java   |   42 -
 .../dataset/adapter/NCFileSystemAdapter.java    |   67 -
 .../dataset/adapter/PullBasedAdapter.java       |  157 -
 .../adapter/PullBasedAzureFeedClient.java       |  186 --
 .../adapter/PullBasedAzureTwitterAdapter.java   |   99 -
 .../adapter/PullBasedTwitterAdapter.java        |   76 -
 .../adapter/PullBasedTwitterFeedClient.java     |  101 -
 .../adapter/PushBasedTwitterAdapter.java        |   66 -
 .../adapter/PushBasedTwitterFeedClient.java     |  118 -
 .../dataset/adapter/RSSFeedAdapter.java         |   94 -
 .../external/dataset/adapter/RSSFeedClient.java |  157 -
 .../dataset/adapter/StreamBasedAdapter.java     |   60 -
 .../dataflow/AbstractIndexingTupleParser.java   |   92 -
 .../AdmOrDelimitedControlledTupleParser.java    |  238 --
 .../AdmOrDelimitedIndexingTupleParser.java      |  101 -
 .../dataflow/FileIndexTupleTranslator.java      |   85 -
 .../dataflow/HDFSIndexingParserFactory.java     |  141 -
 .../indexing/dataflow/HDFSLookupAdapter.java    |  183 --
 .../dataflow/HDFSLookupAdapterFactory.java      |  178 -
 .../dataflow/HDFSObjectTupleParser.java         |   76 -
 .../dataflow/HDFSObjectTupleParserFactory.java  |   65 -
 .../indexing/dataflow/HiveObjectParser.java     |  420 ---
 .../dataflow/IAsterixHDFSRecordParser.java      |   51 -
 .../dataflow/IControlledTupleParser.java        |   40 -
 .../dataflow/IControlledTupleParserFactory.java |   19 -
 .../indexing/dataflow/IndexingScheduler.java    |  347 --
 .../dataflow/RCFileControlledTupleParser.java   |  199 --
 .../dataflow/RCFileIndexingTupleParser.java     |   58 -
 .../dataflow/SeqOrTxtControlledTupleParser.java |  187 --
 .../dataflow/TextOrSeqIndexingTupleParser.java  |   43 -
 .../input/AbstractHDFSLookupInputStream.java    |  114 -
 .../indexing/input/AbstractHDFSReader.java      |  103 -
 .../input/GenericFileAwareRecordReader.java     |  125 -
 .../indexing/input/GenericRecordReader.java     |  131 -
 .../indexing/input/HDFSSeekableLineReader.java  |  246 --
 .../external/indexing/input/ILookupReader.java  |   20 -
 .../indexing/input/RCFileDataReader.java        |  176 -
 .../indexing/input/RCFileLookupReader.java      |  103 -
 .../input/SequenceFileLookupInputStream.java    |   73 -
 .../input/SequenceFileLookupReader.java         |  106 -
 .../input/TextFileLookupInputStream.java        |   66 -
 .../indexing/input/TextFileLookupReader.java    |   93 -
 .../indexing/input/TextualDataReader.java       |  234 --
 .../input/TextualFullScanDataReader.java        |  205 --
 ...xternalDatasetIndexesOperatorDescriptor.java |  118 -
 ...alDatasetIndexesAbortOperatorDescriptor.java |   55 -
 ...lDatasetIndexesCommitOperatorDescriptor.java |   63 -
 ...DatasetIndexesRecoverOperatorDescriptor.java |   55 -
 ...ternalIndexBulkModifyOperatorDescriptor.java |   65 -
 ...rnalIndexBulkModifyOperatorNodePushable.java |  104 -
 .../operators/IndexInfoOperatorDescriptor.java  |  122 -
 .../external/library/ExternalFunction.java      |  110 -
 .../ExternalFunctionDescriptorProvider.java     |   65 -
 .../library/ExternalFunctionProvider.java       |   82 -
 .../ExternalScalarFunctionEvaluatorFactory.java |   40 -
 .../external/library/IExternalFunction.java     |   23 -
 .../library/IExternalScalarFunction.java        |   21 -
 .../external/library/IFunctionFactory.java      |   21 -
 .../external/library/IFunctionHelper.java       |   36 -
 .../external/library/IResultCollector.java      |   41 -
 .../external/library/JTypeObjectFactory.java    |  146 -
 .../external/library/JavaFunctionHelper.java    |  181 --
 .../external/library/ResultCollector.java       |  147 -
 .../library/RuntimeExternalFunctionUtil.java    |  100 -
 .../ics/asterix/external/library/TypeInfo.java  |   63 -
 .../external/library/java/IJListAccessor.java   |   26 -
 .../asterix/external/library/java/IJObject.java |   33 -
 .../external/library/java/IJObjectAccessor.java |   25 -
 .../external/library/java/IJRecordAccessor.java |   29 -
 .../asterix/external/library/java/IJType.java   |   25 -
 .../external/library/java/JObjectAccessors.java |  584 ----
 .../library/java/JObjectPointableVisitor.java   |   89 -
 .../external/library/java/JObjectUtil.java      |  448 ---
 .../asterix/external/library/java/JObjects.java | 1182 -------
 .../asterix/external/library/java/JTypeTag.java |   27 -
 .../ics/asterix/external/util/DNSResolver.java  |   50 -
 .../external/util/DNSResolverFactory.java       |   29 -
 .../ics/asterix/external/util/Datatypes.java    |   35 -
 .../asterix/external/util/INodeResolver.java    |   33 -
 .../external/util/INodeResolverFactory.java     |   30 -
 .../external/util/IdentitiyResolverFactory.java |   31 -
 .../asterix/external/util/IdentityResolver.java |   28 -
 .../asterix/external/util/TweetProcessor.java   |   82 -
 .../ics/asterix/external/util/TwitterUtil.java  |  178 -
 .../adapter/factory/CNNFeedAdapterFactory.java  |  153 +
 .../adapter/factory/HDFSAdapterFactory.java     |  342 ++
 .../factory/HDFSIndexingAdapterFactory.java     |  208 ++
 .../adapter/factory/HiveAdapterFactory.java     |  123 +
 .../factory/NCFileSystemAdapterFactory.java     |  164 +
 .../PullBasedAzureTwitterAdapterFactory.java    |  169 +
 .../factory/PullBasedTwitterAdapterFactory.java |  116 +
 .../factory/PushBasedTwitterAdapterFactory.java |  105 +
 .../adapter/factory/RSSFeedAdapterFactory.java  |  111 +
 .../factory/StreamBasedAdapterFactory.java      |   45 +
 .../dataset/adapter/AzureTweetEntity.java       |   47 +
 .../adapter/AzureTweetMetadataEntity.java       |   91 +
 .../dataset/adapter/ClientBasedFeedAdapter.java |  150 +
 .../external/dataset/adapter/FeedClient.java    |  170 +
 .../dataset/adapter/FileSystemBasedAdapter.java |   58 +
 .../external/dataset/adapter/HDFSAdapter.java   |   93 +
 .../dataset/adapter/HDFSIndexingAdapter.java    |   75 +
 .../external/dataset/adapter/HiveAdapter.java   |   45 +
 .../external/dataset/adapter/IFeedClient.java   |   42 +
 .../dataset/adapter/IFeedClientFactory.java     |   36 +
 .../dataset/adapter/IPullBasedFeedClient.java   |   42 +
 .../dataset/adapter/NCFileSystemAdapter.java    |   67 +
 .../dataset/adapter/PullBasedAdapter.java       |  157 +
 .../adapter/PullBasedAzureFeedClient.java       |  186 ++
 .../adapter/PullBasedAzureTwitterAdapter.java   |   99 +
 .../adapter/PullBasedTwitterAdapter.java        |   76 +
 .../adapter/PullBasedTwitterFeedClient.java     |  101 +
 .../adapter/PushBasedTwitterAdapter.java        |   66 +
 .../adapter/PushBasedTwitterFeedClient.java     |  118 +
 .../dataset/adapter/RSSFeedAdapter.java         |   94 +
 .../external/dataset/adapter/RSSFeedClient.java |  157 +
 .../dataset/adapter/StreamBasedAdapter.java     |   60 +
 .../dataflow/AbstractIndexingTupleParser.java   |   92 +
 .../AdmOrDelimitedControlledTupleParser.java    |  238 ++
 .../AdmOrDelimitedIndexingTupleParser.java      |  101 +
 .../dataflow/FileIndexTupleTranslator.java      |   85 +
 .../dataflow/HDFSIndexingParserFactory.java     |  141 +
 .../indexing/dataflow/HDFSLookupAdapter.java    |  183 ++
 .../dataflow/HDFSLookupAdapterFactory.java      |  178 +
 .../dataflow/HDFSObjectTupleParser.java         |   76 +
 .../dataflow/HDFSObjectTupleParserFactory.java  |   65 +
 .../indexing/dataflow/HiveObjectParser.java     |  420 +++
 .../dataflow/IAsterixHDFSRecordParser.java      |   51 +
 .../dataflow/IControlledTupleParser.java        |   40 +
 .../dataflow/IControlledTupleParserFactory.java |   19 +
 .../indexing/dataflow/IndexingScheduler.java    |  347 ++
 .../dataflow/RCFileControlledTupleParser.java   |  199 ++
 .../dataflow/RCFileIndexingTupleParser.java     |   58 +
 .../dataflow/SeqOrTxtControlledTupleParser.java |  187 ++
 .../dataflow/TextOrSeqIndexingTupleParser.java  |   43 +
 .../input/AbstractHDFSLookupInputStream.java    |  114 +
 .../indexing/input/AbstractHDFSReader.java      |  103 +
 .../input/GenericFileAwareRecordReader.java     |  125 +
 .../indexing/input/GenericRecordReader.java     |  131 +
 .../indexing/input/HDFSSeekableLineReader.java  |  246 ++
 .../external/indexing/input/ILookupReader.java  |   20 +
 .../indexing/input/RCFileDataReader.java        |  176 +
 .../indexing/input/RCFileLookupReader.java      |  103 +
 .../input/SequenceFileLookupInputStream.java    |   73 +
 .../input/SequenceFileLookupReader.java         |  106 +
 .../input/TextFileLookupInputStream.java        |   66 +
 .../indexing/input/TextFileLookupReader.java    |   93 +
 .../indexing/input/TextualDataReader.java       |  234 ++
 .../input/TextualFullScanDataReader.java        |  205 ++
 ...xternalDatasetIndexesOperatorDescriptor.java |  118 +
 ...alDatasetIndexesAbortOperatorDescriptor.java |   55 +
 ...lDatasetIndexesCommitOperatorDescriptor.java |   63 +
 ...DatasetIndexesRecoverOperatorDescriptor.java |   55 +
 ...ternalIndexBulkModifyOperatorDescriptor.java |   65 +
 ...rnalIndexBulkModifyOperatorNodePushable.java |  104 +
 .../operators/IndexInfoOperatorDescriptor.java  |  122 +
 .../external/library/ExternalFunction.java      |  110 +
 .../ExternalFunctionDescriptorProvider.java     |   65 +
 .../library/ExternalFunctionProvider.java       |   82 +
 .../ExternalScalarFunctionEvaluatorFactory.java |   40 +
 .../external/library/IExternalFunction.java     |   23 +
 .../library/IExternalScalarFunction.java        |   21 +
 .../external/library/IFunctionFactory.java      |   21 +
 .../external/library/IFunctionHelper.java       |   36 +
 .../external/library/IResultCollector.java      |   41 +
 .../external/library/JTypeObjectFactory.java    |  146 +
 .../external/library/JavaFunctionHelper.java    |  181 ++
 .../external/library/ResultCollector.java       |  147 +
 .../library/RuntimeExternalFunctionUtil.java    |  100 +
 .../asterix/external/library/TypeInfo.java      |   63 +
 .../external/library/java/IJListAccessor.java   |   26 +
 .../asterix/external/library/java/IJObject.java |   33 +
 .../external/library/java/IJObjectAccessor.java |   25 +
 .../external/library/java/IJRecordAccessor.java |   29 +
 .../asterix/external/library/java/IJType.java   |   25 +
 .../external/library/java/JObjectAccessors.java |  584 ++++
 .../library/java/JObjectPointableVisitor.java   |   89 +
 .../external/library/java/JObjectUtil.java      |  448 +++
 .../asterix/external/library/java/JObjects.java | 1182 +++++++
 .../asterix/external/library/java/JTypeTag.java |   27 +
 .../asterix/external/util/DNSResolver.java      |   50 +
 .../external/util/DNSResolverFactory.java       |   29 +
 .../apache/asterix/external/util/Datatypes.java |   35 +
 .../asterix/external/util/INodeResolver.java    |   33 +
 .../external/util/INodeResolverFactory.java     |   30 +
 .../external/util/IdentitiyResolverFactory.java |   31 +
 .../asterix/external/util/IdentityResolver.java |   28 +
 .../asterix/external/util/TweetProcessor.java   |   82 +
 .../asterix/external/util/TwitterUtil.java      |  178 +
 .../external/library/AddHashTagsFactory.java    |   27 -
 .../external/library/AddHashTagsFunction.java   |   76 -
 .../library/AddHashTagsInPlaceFactory.java      |   25 -
 .../library/AddHashTagsInPlaceFunction.java     |   54 -
 .../external/library/AllTypesFactory.java       |   27 -
 .../external/library/AllTypesFunction.java      |  108 -
 .../external/library/CapitalFinderFactory.java  |   27 -
 .../external/library/CapitalFinderFunction.java |   55 -
 .../external/library/EchoDelayFactory.java      |   25 -
 .../external/library/EchoDelayFunction.java     |   46 -
 .../external/library/ParseTweetFactory.java     |   27 -
 .../external/library/ParseTweetFunction.java    |   60 -
 .../asterix/external/library/SumFactory.java    |   25 -
 .../asterix/external/library/SumFunction.java   |   43 -
 .../external/library/UpperCaseFactory.java      |   27 -
 .../external/library/UpperCaseFunction.java     |   58 -
 .../library/adapter/TestTypedAdapter.java       |  140 -
 .../adapter/TestTypedAdapterFactory.java        |  106 -
 .../external/library/AddHashTagsFactory.java    |   27 +
 .../external/library/AddHashTagsFunction.java   |   76 +
 .../library/AddHashTagsInPlaceFactory.java      |   25 +
 .../library/AddHashTagsInPlaceFunction.java     |   54 +
 .../external/library/AllTypesFactory.java       |   27 +
 .../external/library/AllTypesFunction.java      |  108 +
 .../external/library/CapitalFinderFactory.java  |   27 +
 .../external/library/CapitalFinderFunction.java |   55 +
 .../external/library/EchoDelayFactory.java      |   25 +
 .../external/library/EchoDelayFunction.java     |   46 +
 .../external/library/ParseTweetFactory.java     |   27 +
 .../external/library/ParseTweetFunction.java    |   60 +
 .../asterix/external/library/SumFactory.java    |   25 +
 .../asterix/external/library/SumFunction.java   |   43 +
 .../external/library/UpperCaseFactory.java      |   27 +
 .../external/library/UpperCaseFunction.java     |   58 +
 .../library/adapter/TestTypedAdapter.java       |  140 +
 .../adapter/TestTypedAdapterFactory.java        |  106 +
 .../asterix/fuzzyjoin/FuzzyFiltersJaccard.java  |  104 -
 .../fuzzyjoin/FuzzyJoinAppendLength.java        |   60 -
 .../ics/asterix/fuzzyjoin/FuzzyJoinConfig.java  |   66 -
 .../ics/asterix/fuzzyjoin/FuzzyJoinContext.java |   38 -
 .../ics/asterix/fuzzyjoin/FuzzyJoinMemory.java  |  317 --
 .../asterix/fuzzyjoin/FuzzyJoinTokenize.java    |  135 -
 .../ics/asterix/fuzzyjoin/FuzzyJoinUtil.java    |   87 -
 .../edu/uci/ics/asterix/fuzzyjoin/IntArray.java |   80 -
 .../edu/uci/ics/asterix/fuzzyjoin/IntPair.java  |   96 -
 .../fuzzyjoin/LittleEndianIntInputStream.java   |   52 -
 .../fuzzyjoin/LittleEndianIntOutputStream.java  |   37 -
 .../ics/asterix/fuzzyjoin/MutableInteger.java   |   70 -
 .../asterix/fuzzyjoin/RIDPairSimilarity.java    |   57 -
 .../uci/ics/asterix/fuzzyjoin/ResultJoin.java   |   30 -
 .../ics/asterix/fuzzyjoin/ResultSelfJoin.java   |   31 -
 .../fuzzyjoin/invertedlist/InvertedList.java    |   27 -
 .../invertedlist/InvertedListLengthFixed.java   |   78 -
 .../invertedlist/InvertedListLengthList.java    |  107 -
 .../invertedlist/InvertedListPlain.java         |   49 -
 .../fuzzyjoin/invertedlist/InvertedLists.java   |   26 -
 .../invertedlist/InvertedListsLengthFixed.java  |   44 -
 .../invertedlist/InvertedListsLengthList.java   |   49 -
 .../fuzzyjoin/recordgroup/RecordGroup.java      |   36 -
 .../recordgroup/RecordGroupFactory.java         |   44 -
 .../recordgroup/RecordGroupLengthCount.java     |  111 -
 .../recordgroup/RecordGroupLengthIdentity.java  |   47 -
 .../recordgroup/RecordGroupLengthRange.java     |   75 -
 .../recordgroup/RecordGroupSingle.java          |   45 -
 .../recordgroup/RecordGroupTokenFrequency.java  |   47 -
 .../RecordGroupTokenFrequencyMirror.java        |   53 -
 .../recordgroup/RecordGroupTokenIdentity.java   |   47 -
 .../similarity/IGenericSimilarityMetric.java    |   32 -
 .../fuzzyjoin/similarity/IListIterator.java     |   38 -
 .../fuzzyjoin/similarity/PartialIntersect.java  |   42 -
 .../fuzzyjoin/similarity/SimilarityFilters.java |   45 -
 .../similarity/SimilarityFiltersFactory.java    |   29 -
 .../similarity/SimilarityFiltersJaccard.java    |  297 --
 .../fuzzyjoin/similarity/SimilarityMetric.java  |  185 --
 .../SimilarityMetricEditDistance.java           |  324 --
 .../similarity/SimilarityMetricFactory.java     |   29 -
 .../similarity/SimilarityMetricJaccard.java     |  120 -
 .../AbstractUTF8StringBinaryTokenizer.java      |   77 -
 .../fuzzyjoin/tokenizer/AbstractUTF8Token.java  |  103 -
 .../tokenizer/AbstractUTF8TokenFactory.java     |   36 -
 .../DelimitedUTF8StringBinaryTokenizer.java     |   79 -
 ...limitedUTF8StringBinaryTokenizerFactory.java |   40 -
 .../tokenizer/HashedUTF8NGramToken.java         |   62 -
 .../tokenizer/HashedUTF8NGramTokenFactory.java  |   38 -
 .../tokenizer/HashedUTF8WordToken.java          |   84 -
 .../tokenizer/HashedUTF8WordTokenFactory.java   |   38 -
 .../fuzzyjoin/tokenizer/IBinaryTokenizer.java   |   30 -
 .../tokenizer/IBinaryTokenizerFactory.java      |   26 -
 .../fuzzyjoin/tokenizer/INGramToken.java        |   28 -
 .../ics/asterix/fuzzyjoin/tokenizer/IToken.java |   39 -
 .../fuzzyjoin/tokenizer/ITokenFactory.java      |   26 -
 .../fuzzyjoin/tokenizer/NGramTokenizer.java     |   90 -
 .../NGramUTF8StringBinaryTokenizer.java         |  116 -
 .../fuzzyjoin/tokenizer/StringUtils.java        |  216 --
 .../ics/asterix/fuzzyjoin/tokenizer/Token.java  |  118 -
 .../asterix/fuzzyjoin/tokenizer/Tokenizer.java  |   27 -
 .../fuzzyjoin/tokenizer/TokenizerBuffered.java  |   30 -
 .../tokenizer/TokenizerBufferedFactory.java     |   33 -
 .../fuzzyjoin/tokenizer/TokenizerFactory.java   |   31 -
 .../fuzzyjoin/tokenizer/UTF8NGramToken.java     |   83 -
 .../tokenizer/UTF8NGramTokenFactory.java        |   39 -
 .../fuzzyjoin/tokenizer/UTF8WordToken.java      |   44 -
 .../tokenizer/UTF8WordTokenFactory.java         |   39 -
 .../fuzzyjoin/tokenizer/WordTokenizer.java      |   68 -
 .../tokenizer/WordTokenizerBuffered.java        |   92 -
 .../fuzzyjoin/tokenorder/IntTokenCountRank.java |   28 -
 .../tokenorder/IntTokenCountRankFrequency.java  |   58 -
 .../fuzzyjoin/tokenorder/IntTokenRank.java      |   28 -
 .../tokenorder/IntTokenRankFrequency.java       |   54 -
 .../asterix/fuzzyjoin/tokenorder/TokenLoad.java |   62 -
 .../asterix/fuzzyjoin/tokenorder/TokenRank.java |   31 -
 .../tokenorder/TokenRankBufferedFrequency.java  |   75 -
 .../tokenorder/TokenRankFrequency.java          |   61 -
 .../asterix/fuzzyjoin/FuzzyFiltersJaccard.java  |  104 +
 .../fuzzyjoin/FuzzyJoinAppendLength.java        |   60 +
 .../asterix/fuzzyjoin/FuzzyJoinConfig.java      |   66 +
 .../asterix/fuzzyjoin/FuzzyJoinContext.java     |   38 +
 .../asterix/fuzzyjoin/FuzzyJoinMemory.java      |  317 ++
 .../asterix/fuzzyjoin/FuzzyJoinTokenize.java    |  135 +
 .../apache/asterix/fuzzyjoin/FuzzyJoinUtil.java |   87 +
 .../org/apache/asterix/fuzzyjoin/IntArray.java  |   80 +
 .../org/apache/asterix/fuzzyjoin/IntPair.java   |   96 +
 .../fuzzyjoin/LittleEndianIntInputStream.java   |   52 +
 .../fuzzyjoin/LittleEndianIntOutputStream.java  |   37 +
 .../asterix/fuzzyjoin/MutableInteger.java       |   70 +
 .../asterix/fuzzyjoin/RIDPairSimilarity.java    |   57 +
 .../apache/asterix/fuzzyjoin/ResultJoin.java    |   30 +
 .../asterix/fuzzyjoin/ResultSelfJoin.java       |   31 +
 .../fuzzyjoin/invertedlist/InvertedList.java    |   27 +
 .../invertedlist/InvertedListLengthFixed.java   |   78 +
 .../invertedlist/InvertedListLengthList.java    |  107 +
 .../invertedlist/InvertedListPlain.java         |   49 +
 .../fuzzyjoin/invertedlist/InvertedLists.java   |   26 +
 .../invertedlist/InvertedListsLengthFixed.java  |   44 +
 .../invertedlist/InvertedListsLengthList.java   |   49 +
 .../fuzzyjoin/recordgroup/RecordGroup.java      |   36 +
 .../recordgroup/RecordGroupFactory.java         |   44 +
 .../recordgroup/RecordGroupLengthCount.java     |  111 +
 .../recordgroup/RecordGroupLengthIdentity.java  |   47 +
 .../recordgroup/RecordGroupLengthRange.java     |   75 +
 .../recordgroup/RecordGroupSingle.java          |   45 +
 .../recordgroup/RecordGroupTokenFrequency.java  |   47 +
 .../RecordGroupTokenFrequencyMirror.java        |   53 +
 .../recordgroup/RecordGroupTokenIdentity.java   |   47 +
 .../similarity/IGenericSimilarityMetric.java    |   32 +
 .../fuzzyjoin/similarity/IListIterator.java     |   38 +
 .../fuzzyjoin/similarity/PartialIntersect.java  |   42 +
 .../fuzzyjoin/similarity/SimilarityFilters.java |   45 +
 .../similarity/SimilarityFiltersFactory.java    |   29 +
 .../similarity/SimilarityFiltersJaccard.java    |  297 ++
 .../fuzzyjoin/similarity/SimilarityMetric.java  |  185 ++
 .../SimilarityMetricEditDistance.java           |  324 ++
 .../similarity/SimilarityMetricFactory.java     |   29 +
 .../similarity/SimilarityMetricJaccard.java     |  120 +
 .../AbstractUTF8StringBinaryTokenizer.java      |   77 +
 .../fuzzyjoin/tokenizer/AbstractUTF8Token.java  |  103 +
 .../tokenizer/AbstractUTF8TokenFactory.java     |   36 +
 .../DelimitedUTF8StringBinaryTokenizer.java     |   79 +
 ...limitedUTF8StringBinaryTokenizerFactory.java |   40 +
 .../tokenizer/HashedUTF8NGramToken.java         |   62 +
 .../tokenizer/HashedUTF8NGramTokenFactory.java  |   38 +
 .../tokenizer/HashedUTF8WordToken.java          |   84 +
 .../tokenizer/HashedUTF8WordTokenFactory.java   |   38 +
 .../fuzzyjoin/tokenizer/IBinaryTokenizer.java   |   30 +
 .../tokenizer/IBinaryTokenizerFactory.java      |   26 +
 .../fuzzyjoin/tokenizer/INGramToken.java        |   28 +
 .../asterix/fuzzyjoin/tokenizer/IToken.java     |   39 +
 .../fuzzyjoin/tokenizer/ITokenFactory.java      |   26 +
 .../fuzzyjoin/tokenizer/NGramTokenizer.java     |   90 +
 .../NGramUTF8StringBinaryTokenizer.java         |  116 +
 .../fuzzyjoin/tokenizer/StringUtils.java        |  216 ++
 .../asterix/fuzzyjoin/tokenizer/Token.java      |  118 +
 .../asterix/fuzzyjoin/tokenizer/Tokenizer.java  |   27 +
 .../fuzzyjoin/tokenizer/TokenizerBuffered.java  |   30 +
 .../tokenizer/TokenizerBufferedFactory.java     |   33 +
 .../fuzzyjoin/tokenizer/TokenizerFactory.java   |   31 +
 .../fuzzyjoin/tokenizer/UTF8NGramToken.java     |   83 +
 .../tokenizer/UTF8NGramTokenFactory.java        |   39 +
 .../fuzzyjoin/tokenizer/UTF8WordToken.java      |   44 +
 .../tokenizer/UTF8WordTokenFactory.java         |   39 +
 .../fuzzyjoin/tokenizer/WordTokenizer.java      |   68 +
 .../tokenizer/WordTokenizerBuffered.java        |   92 +
 .../fuzzyjoin/tokenorder/IntTokenCountRank.java |   28 +
 .../tokenorder/IntTokenCountRankFrequency.java  |   58 +
 .../fuzzyjoin/tokenorder/IntTokenRank.java      |   28 +
 .../tokenorder/IntTokenRankFrequency.java       |   54 +
 .../asterix/fuzzyjoin/tokenorder/TokenLoad.java |   62 +
 .../asterix/fuzzyjoin/tokenorder/TokenRank.java |   31 +
 .../tokenorder/TokenRankBufferedFrequency.java  |   75 +
 .../tokenorder/TokenRankFrequency.java          |   61 +
 .../asterix/fuzzyjoin/tests/FuzzyJoinTest.java  |   65 -
 .../fuzzyjoin/tests/FuzzyJoinTestUtil.java      |   63 -
 .../fuzzyjoin/tests/NGramTokenizerTest.java     |  239 --
 .../fuzzyjoin/tests/WordTokenizerTest.java      |  214 --
 .../tests/dataset/AbstractDataset.java          |  158 -
 .../dataset/AbstractTokenizableDataset.java     |   20 -
 .../fuzzyjoin/tests/dataset/DBLPDataset.java    |   36 -
 .../tests/dataset/DBLPSmallDataset.java         |   26 -
 .../tests/dataset/IntArrayBagSmallDataset.java  |   56 -
 .../tests/dataset/IntArraySetSmallDataset.java  |   56 -
 .../fuzzyjoin/tests/dataset/PUBDataset.java     |   41 -
 .../tests/dataset/PUBSmallDataset.java          |   26 -
 .../tests/dataset/PublicationsDataset.java      |   80 -
 .../dataset/UsersVisitorsSmallDataset.java      |   67 -
 .../asterix/fuzzyjoin/tests/FuzzyJoinTest.java  |   65 +
 .../fuzzyjoin/tests/FuzzyJoinTestUtil.java      |   63 +
 .../fuzzyjoin/tests/NGramTokenizerTest.java     |  239 ++
 .../fuzzyjoin/tests/WordTokenizerTest.java      |  214 ++
 .../tests/dataset/AbstractDataset.java          |  158 +
 .../dataset/AbstractTokenizableDataset.java     |   20 +
 .../fuzzyjoin/tests/dataset/DBLPDataset.java    |   36 +
 .../tests/dataset/DBLPSmallDataset.java         |   26 +
 .../tests/dataset/IntArrayBagSmallDataset.java  |   56 +
 .../tests/dataset/IntArraySetSmallDataset.java  |   56 +
 .../fuzzyjoin/tests/dataset/PUBDataset.java     |   41 +
 .../tests/dataset/PUBSmallDataset.java          |   26 +
 .../tests/dataset/PublicationsDataset.java      |   80 +
 .../dataset/UsersVisitorsSmallDataset.java      |   67 +
 .../installer/command/AbstractCommand.java      |   42 -
 .../asterix/installer/command/AlterCommand.java |   72 -
 .../installer/command/BackupCommand.java        |   76 -
 .../installer/command/CommandConfig.java        |   19 -
 .../installer/command/CommandHandler.java       |   84 -
 .../installer/command/ConfigureCommand.java     |  120 -
 .../installer/command/CreateCommand.java        |  114 -
 .../installer/command/DeleteCommand.java        |   63 -
 .../installer/command/DescribeCommand.java      |   91 -
 .../asterix/installer/command/HelpCommand.java  |  105 -
 .../ics/asterix/installer/command/ICommand.java |   41 -
 .../installer/command/InstallCommand.java       |   72 -
 .../asterix/installer/command/LogCommand.java   |  105 -
 .../installer/command/RestoreCommand.java       |   72 -
 .../installer/command/ShutdownCommand.java      |   45 -
 .../asterix/installer/command/StartCommand.java |   76 -
 .../installer/command/StartNodeCommand.java     |  101 -
 .../asterix/installer/command/StopCommand.java  |   96 -
 .../installer/command/StopNodeCommand.java      |  108 -
 .../installer/command/UninstallCommand.java     |   74 -
 .../installer/command/ValidateCommand.java      |  266 --
 .../installer/command/VersionCommand.java       |   48 -
 .../installer/driver/InstallerDriver.java       |  129 -
 .../asterix/installer/driver/InstallerUtil.java |   76 -
 .../installer/error/InstallerException.java     |   25 -
 .../asterix/installer/error/OutputHandler.java  |   89 -
 .../installer/command/AbstractCommand.java      |   42 +
 .../asterix/installer/command/AlterCommand.java |   72 +
 .../installer/command/BackupCommand.java        |   76 +
 .../installer/command/CommandConfig.java        |   19 +
 .../installer/command/CommandHandler.java       |   84 +
 .../installer/command/ConfigureCommand.java     |  120 +
 .../installer/command/CreateCommand.java        |  114 +
 .../installer/command/DeleteCommand.java        |   63 +
 .../installer/command/DescribeCommand.java      |   91 +
 .../asterix/installer/command/HelpCommand.java  |  105 +
 .../asterix/installer/command/ICommand.java     |   41 +
 .../installer/command/InstallCommand.java       |   72 +
 .../asterix/installer/command/LogCommand.java   |  105 +
 .../installer/command/RestoreCommand.java       |   72 +
 .../installer/command/ShutdownCommand.java      |   45 +
 .../asterix/installer/command/StartCommand.java |   76 +
 .../installer/command/StartNodeCommand.java     |  101 +
 .../asterix/installer/command/StopCommand.java  |   96 +
 .../installer/command/StopNodeCommand.java      |  108 +
 .../installer/command/UninstallCommand.java     |   74 +
 .../installer/command/ValidateCommand.java      |  266 ++
 .../installer/command/VersionCommand.java       |   48 +
 .../installer/driver/InstallerDriver.java       |  129 +
 .../asterix/installer/driver/InstallerUtil.java |   76 +
 .../installer/error/InstallerException.java     |   25 +
 .../asterix/installer/error/OutputHandler.java  |   89 +
 .../test/AsterixClusterLifeCycleIT.java         |  187 --
 .../test/AsterixExternalLibraryIT.java          |   78 -
 .../test/AsterixInstallerIntegrationUtil.java   |  218 --
 .../installer/test/AsterixLifecycleIT.java      |  144 -
 .../installer/transaction/DmlRecoveryIT.java    |  130 -
 .../installer/transaction/RecoveryIT.java       |  112 -
 .../test/AsterixClusterLifeCycleIT.java         |  187 ++
 .../test/AsterixExternalLibraryIT.java          |   78 +
 .../test/AsterixInstallerIntegrationUtil.java   |  218 ++
 .../installer/test/AsterixLifecycleIT.java      |  144 +
 .../installer/transaction/DmlRecoveryIT.java    |  130 +
 .../installer/transaction/RecoveryIT.java       |  112 +
 .../asterix/lexergenerator/LexerGenerator.java  |  202 --
 .../lexergenerator/LexerGeneratorMojo.java      |   92 -
 .../ics/asterix/lexergenerator/LexerNode.java   |  248 --
 .../lexergenerator/NodeChainFactory.java        |   43 -
 .../uci/ics/asterix/lexergenerator/Token.java   |   70 -
 .../ics/asterix/lexergenerator/TokenAux.java    |   25 -
 .../rulegenerators/RuleGenerator.java           |   21 -
 .../RuleGeneratorAnythingUntil.java             |   31 -
 .../RuleGeneratorCaseInsensitiveChar.java       |   34 -
 .../rulegenerators/RuleGeneratorChar.java       |   31 -
 .../RuleGeneratorCharOrNothing.java             |   33 -
 .../RuleGeneratorDigitSequence.java             |   29 -
 .../rulegenerators/RuleGeneratorNothing.java    |   29 -
 .../rulegenerators/RuleGeneratorSign.java       |   30 -
 .../RuleGeneratorSignOrNothing.java             |   32 -
 .../rulegenerators/RuleGeneratorString.java     |   33 -
 .../rulegenerators/RuleGeneratorToken.java      |   31 -
 .../ics/asterix/lexergenerator/rules/Rule.java  |   29 -
 .../lexergenerator/rules/RuleAnythingUntil.java |   64 -
 .../asterix/lexergenerator/rules/RuleChar.java  |   70 -
 .../lexergenerator/rules/RuleDigitSequence.java |   57 -
 .../lexergenerator/rules/RuleEpsilon.java       |   54 -
 .../lexergenerator/rules/RulePartial.java       |   71 -
 .../asterix/lexergenerator/LexerGenerator.java  |  202 ++
 .../lexergenerator/LexerGeneratorMojo.java      |   92 +
 .../asterix/lexergenerator/LexerNode.java       |  248 ++
 .../lexergenerator/NodeChainFactory.java        |   43 +
 .../apache/asterix/lexergenerator/Token.java    |   70 +
 .../apache/asterix/lexergenerator/TokenAux.java |   25 +
 .../rulegenerators/RuleGenerator.java           |   21 +
 .../RuleGeneratorAnythingUntil.java             |   31 +
 .../RuleGeneratorCaseInsensitiveChar.java       |   34 +
 .../rulegenerators/RuleGeneratorChar.java       |   31 +
 .../RuleGeneratorCharOrNothing.java             |   33 +
 .../RuleGeneratorDigitSequence.java             |   29 +
 .../rulegenerators/RuleGeneratorNothing.java    |   29 +
 .../rulegenerators/RuleGeneratorSign.java       |   30 +
 .../RuleGeneratorSignOrNothing.java             |   32 +
 .../rulegenerators/RuleGeneratorString.java     |   33 +
 .../rulegenerators/RuleGeneratorToken.java      |   31 +
 .../asterix/lexergenerator/rules/Rule.java      |   29 +
 .../lexergenerator/rules/RuleAnythingUntil.java |   64 +
 .../asterix/lexergenerator/rules/RuleChar.java  |   70 +
 .../lexergenerator/rules/RuleDigitSequence.java |   57 +
 .../lexergenerator/rules/RuleEpsilon.java       |   54 +
 .../lexergenerator/rules/RulePartial.java       |   71 +
 .../ics/asterix/lexergenerator/Fixtures.java    |  114 -
 .../lexergenerator/LexerNodeAddRuleTest.java    |   65 -
 .../lexergenerator/LexerNodeAppendNodeTest.java |   95 -
 .../lexergenerator/LexerNodeAppendRuleTest.java |   61 -
 .../LexerNodeAuxFunctionsTest.java              |  125 -
 .../lexergenerator/LexerNodeCloneTest.java      |   72 -
 .../lexergenerator/LexerNodeMergeNodeTest.java  |  102 -
 .../apache/asterix/lexergenerator/Fixtures.java |  114 +
 .../lexergenerator/LexerNodeAddRuleTest.java    |   65 +
 .../lexergenerator/LexerNodeAppendNodeTest.java |   95 +
 .../lexergenerator/LexerNodeAppendRuleTest.java |   61 +
 .../LexerNodeAuxFunctionsTest.java              |  125 +
 .../lexergenerator/LexerNodeCloneTest.java      |   72 +
 .../lexergenerator/LexerNodeMergeNodeTest.java  |  102 +
 .../recordmanagergenerator/Generator.java       |  164 -
 .../RecordManagerGeneratorMojo.java             |  145 -
 .../recordmanagergenerator/RecordType.java      |  423 ---
 .../recordmanagergenerator/Generator.java       |  164 +
 .../RecordManagerGeneratorMojo.java             |  145 +
 .../recordmanagergenerator/RecordType.java      |  423 +++
 .../ics/asterix/metadata/GarbageCollector.java  |   39 -
 .../ics/asterix/metadata/IDatasetDetails.java   |   49 -
 .../uci/ics/asterix/metadata/MetadataCache.java |  588 ----
 .../ics/asterix/metadata/MetadataException.java |   38 -
 .../ics/asterix/metadata/MetadataManager.java   |  948 ------
 .../uci/ics/asterix/metadata/MetadataNode.java  | 1596 ---------
 .../metadata/MetadataTransactionContext.java    |  255 --
 .../metadata/api/IAsterixStateProxy.java        |   29 -
 .../asterix/metadata/api/IClusterManager.java   |   53 -
 .../asterix/metadata/api/IMetadataEntity.java   |   31 -
 .../api/IMetadataEntityTupleTranslator.java     |   54 -
 .../asterix/metadata/api/IMetadataIndex.java    |   83 -
 .../asterix/metadata/api/IMetadataManager.java  |  653 ----
 .../ics/asterix/metadata/api/IMetadataNode.java |  745 -----
 .../asterix/metadata/api/IValueExtractor.java   |   46 -
 .../metadata/bootstrap/AsterixStateProxy.java   |   55 -
 .../metadata/bootstrap/MetadataBootstrap.java   |  543 ----
 .../metadata/bootstrap/MetadataConstants.java   |   29 -
 .../metadata/bootstrap/MetadataIndex.java       |  281 --
 .../bootstrap/MetadataPrimaryIndexes.java       |  137 -
 .../metadata/bootstrap/MetadataRecordTypes.java |  568 ----
 .../bootstrap/MetadataSecondaryIndexes.java     |   64 -
 .../cluster/AbstractClusterManagementWork.java  |   54 -
 .../asterix/metadata/cluster/AddNodeWork.java   |   49 -
 .../metadata/cluster/AddNodeWorkResponse.java   |   41 -
 .../cluster/ClusterManagementWorkResponse.java  |   47 -
 .../metadata/cluster/ClusterManager.java        |  169 -
 .../metadata/cluster/RemoveNodeWork.java        |   50 -
 .../cluster/RemoveNodeWorkResponse.java         |   34 -
 .../metadata/dataset/hints/DatasetHints.java    |  128 -
 .../asterix/metadata/dataset/hints/IHint.java   |   42 -
 .../AqlCompiledMetadataDeclarations.java        |  272 --
 .../metadata/declared/AqlDataSource.java        |  205 --
 .../ics/asterix/metadata/declared/AqlIndex.java |   54 -
 .../declared/AqlLogicalPlanAndMetadataImpl.java |   60 -
 .../declared/AqlMetadataImplConfig.java         |   28 -
 .../metadata/declared/AqlMetadataProvider.java  | 2367 --------------
 .../asterix/metadata/declared/AqlSourceId.java  |   42 -
 .../metadata/declared/DatasetDataSource.java    |  120 -
 .../metadata/declared/FeedDataSource.java       |  124 -
 .../declared/FieldExtractingAdapter.java        |  166 -
 .../declared/FieldExtractingAdapterFactory.java |   82 -
 .../metadata/declared/FileSplitDataSink.java    |   46 -
 .../metadata/declared/FileSplitSinkId.java      |   37 -
 .../metadata/declared/LoadableDataSource.java   |  138 -
 .../metadata/declared/PKGeneratingAdapter.java  |  180 -
 .../declared/PKGeneratingAdapterFactory.java    |   82 -
 .../metadata/declared/ResultSetDataSink.java    |   47 -
 .../metadata/declared/ResultSetSinkId.java      |   35 -
 .../entities/AsterixBuiltinTypeMap.java         |   83 -
 .../metadata/entities/CompactionPolicy.java     |   78 -
 .../ics/asterix/metadata/entities/Dataset.java  |  144 -
 .../metadata/entities/DatasourceAdapter.java    |   62 -
 .../ics/asterix/metadata/entities/Datatype.java |   67 -
 .../asterix/metadata/entities/Dataverse.java    |   59 -
 .../entities/ExternalDatasetDetails.java        |  156 -
 .../asterix/metadata/entities/ExternalFile.java |  140 -
 .../uci/ics/asterix/metadata/entities/Feed.java |  112 -
 .../asterix/metadata/entities/FeedPolicy.java   |   95 -
 .../ics/asterix/metadata/entities/Function.java |   93 -
 .../ics/asterix/metadata/entities/Index.java    |  247 --
 .../entities/InternalDatasetDetails.java        |  258 --
 .../ics/asterix/metadata/entities/Library.java  |   50 -
 .../uci/ics/asterix/metadata/entities/Node.java |   48 -
 .../asterix/metadata/entities/NodeGroup.java    |   56 -
 .../asterix/metadata/entities/PrimaryFeed.java  |   76 -
 .../metadata/entities/SecondaryFeed.java        |   60 -
 .../AbstractTupleTranslator.java                |   66 -
 .../CompactionPolicyTupleTranslator.java        |  124 -
 .../DatasetTupleTranslator.java                 |  403 ---
 .../DatasourceAdapterTupleTranslator.java       |  138 -
 .../DatatypeTupleTranslator.java                |  383 ---
 .../DataverseTupleTranslator.java               |  119 -
 .../ExternalFileTupleTranslator.java            |  179 -
 .../FeedPolicyTupleTranslator.java              |  200 --
 .../FeedTupleTranslator.java                    |  313 --
 .../FunctionTupleTranslator.java                |  193 --
 .../IndexTupleTranslator.java                   |  304 --
 .../LibraryTupleTranslator.java                 |  120 -
 .../NodeGroupTupleTranslator.java               |  129 -
 .../NodeTupleTranslator.java                    |  135 -
 .../external/ExternalFileIndexAccessor.java     |  143 -
 .../ExternalLoopkupOperatorDiscriptor.java      |  126 -
 .../external/FilesIndexDescription.java         |   96 -
 .../metadata/external/IAdapterFactory.java      |   94 -
 .../metadata/external/IControlledAdapter.java   |   61 -
 .../external/IControlledAdapterFactory.java     |   30 -
 .../metadata/external/IndexingConstants.java    |  203 --
 .../feeds/AbstractDatasourceAdapter.java        |   39 -
 .../feeds/AbstractFeedDatasourceAdapter.java    |   34 -
 .../asterix/metadata/feeds/AdapterExecutor.java |   70 -
 .../metadata/feeds/AdapterIdentifier.java       |   62 -
 .../metadata/feeds/AdapterRuntimeManager.java   |  124 -
 .../metadata/feeds/BuiltinFeedPolicies.java     |  188 --
 .../feeds/CollectTransformFeedFrameWriter.java  |  116 -
 .../asterix/metadata/feeds/EndFeedMessage.java  |   38 -
 .../ExternalDataScanOperatorDescriptor.java     |   70 -
 .../metadata/feeds/FeedActivityIdFactory.java   |   36 -
 .../feeds/FeedCollectOperatorDescriptor.java    |  166 -
 .../feeds/FeedCollectOperatorNodePushable.java  |  204 --
 .../metadata/feeds/FeedConnectionManager.java   |  105 -
 .../metadata/feeds/FeedFrameTupleDecorator.java |  104 -
 .../feeds/FeedIntakeOperatorDescriptor.java     |  131 -
 .../feeds/FeedIntakeOperatorNodePushable.java   |  209 --
 .../feeds/FeedLifecycleEventSubscriber.java     |   62 -
 .../metadata/feeds/FeedManagerElectMessage.java |   61 -
 .../ics/asterix/metadata/feeds/FeedMessage.java |   43 -
 .../feeds/FeedMessageOperatorDescriptor.java    |   49 -
 .../feeds/FeedMessageOperatorNodePushable.java  |  294 --
 .../feeds/FeedMetaComputeNodePushable.java      |  221 --
 .../metadata/feeds/FeedMetaNodePushable.java    |  184 --
 .../feeds/FeedMetaOperatorDescriptor.java       |  128 -
 .../feeds/FeedMetaStoreNodePushable.java        |  212 --
 .../metadata/feeds/FeedPolicyEnforcer.java      |   46 -
 .../metadata/feeds/FeedSubscriptionManager.java |   72 -
 .../ics/asterix/metadata/feeds/FeedUtil.java    |  586 ----
 .../asterix/metadata/feeds/FeedWorkManager.java |   46 -
 .../metadata/feeds/IAdapterExecutor.java        |   36 -
 .../metadata/feeds/IFeedAdapterFactory.java     |   26 -
 .../asterix/metadata/feeds/IFeedMessage.java    |   28 -
 .../metadata/feeds/IPullBasedFeedAdapter.java   |   30 -
 .../metadata/feeds/ITypedAdapterFactory.java    |   27 -
 .../asterix/metadata/feeds/MessageListener.java |  134 -
 .../metadata/feeds/PrepareStallMessage.java     |   66 -
 .../feeds/RemoteSocketMessageListener.java      |  178 -
 .../metadata/feeds/SocketMessageListener.java   |  158 -
 .../feeds/TerminateDataFlowMessage.java         |   50 -
 .../asterix/metadata/feeds/XAQLFeedMessage.java |   64 -
 .../AsterixExternalScalarFunctionInfo.java      |   38 -
 .../functions/ExternalFunctionCompilerUtil.java |  223 --
 .../functions/ExternalLibraryManager.java       |   54 -
 .../functions/MetadataBuiltinFunctions.java     |  187 --
 .../ics/asterix/metadata/utils/DatasetLock.java |  102 -
 .../asterix/metadata/utils/DatasetUtils.java    |  281 --
 .../utils/ExternalDatasetAccessManager.java     |  118 -
 .../utils/ExternalDatasetsRegistry.java         |  134 -
 .../metadata/utils/MetadataLockManager.java     |  595 ----
 .../DatasetNameValueExtractor.java              |   44 -
 .../DatatypeNameValueExtractor.java             |   64 -
 .../MetadataEntityValueExtractor.java           |   41 -
 .../NestedDatatypeNameValueExtractor.java       |   60 -
 .../TupleCopyValueExtractor.java                |   57 -
 .../asterix/metadata/GarbageCollector.java      |   39 +
 .../asterix/metadata/IDatasetDetails.java       |   49 +
 .../apache/asterix/metadata/MetadataCache.java  |  588 ++++
 .../asterix/metadata/MetadataException.java     |   38 +
 .../asterix/metadata/MetadataManager.java       |  948 ++++++
 .../apache/asterix/metadata/MetadataNode.java   | 1596 +++++++++
 .../metadata/MetadataTransactionContext.java    |  255 ++
 .../metadata/api/IAsterixStateProxy.java        |   29 +
 .../asterix/metadata/api/IClusterManager.java   |   53 +
 .../asterix/metadata/api/IMetadataEntity.java   |   31 +
 .../api/IMetadataEntityTupleTranslator.java     |   54 +
 .../asterix/metadata/api/IMetadataIndex.java    |   83 +
 .../asterix/metadata/api/IMetadataManager.java  |  653 ++++
 .../asterix/metadata/api/IMetadataNode.java     |  745 +++++
 .../asterix/metadata/api/IValueExtractor.java   |   46 +
 .../metadata/bootstrap/AsterixStateProxy.java   |   55 +
 .../metadata/bootstrap/MetadataBootstrap.java   |  543 ++++
 .../metadata/bootstrap/MetadataConstants.java   |   29 +
 .../metadata/bootstrap/MetadataIndex.java       |  281 ++
 .../bootstrap/MetadataPrimaryIndexes.java       |  137 +
 .../metadata/bootstrap/MetadataRecordTypes.java |  568 ++++
 .../bootstrap/MetadataSecondaryIndexes.java     |   64 +
 .../cluster/AbstractClusterManagementWork.java  |   54 +
 .../asterix/metadata/cluster/AddNodeWork.java   |   49 +
 .../metadata/cluster/AddNodeWorkResponse.java   |   41 +
 .../cluster/ClusterManagementWorkResponse.java  |   47 +
 .../metadata/cluster/ClusterManager.java        |  169 +
 .../metadata/cluster/RemoveNodeWork.java        |   50 +
 .../cluster/RemoveNodeWorkResponse.java         |   34 +
 .../metadata/dataset/hints/DatasetHints.java    |  128 +
 .../asterix/metadata/dataset/hints/IHint.java   |   42 +
 .../AqlCompiledMetadataDeclarations.java        |  272 ++
 .../metadata/declared/AqlDataSource.java        |  205 ++
 .../asterix/metadata/declared/AqlIndex.java     |   54 +
 .../declared/AqlLogicalPlanAndMetadataImpl.java |   60 +
 .../declared/AqlMetadataImplConfig.java         |   28 +
 .../metadata/declared/AqlMetadataProvider.java  | 2367 ++++++++++++++
 .../asterix/metadata/declared/AqlSourceId.java  |   42 +
 .../metadata/declared/DatasetDataSource.java    |  120 +
 .../metadata/declared/FeedDataSource.java       |  124 +
 .../declared/FieldExtractingAdapter.java        |  166 +
 .../declared/FieldExtractingAdapterFactory.java |   82 +
 .../metadata/declared/FileSplitDataSink.java    |   46 +
 .../metadata/declared/FileSplitSinkId.java      |   37 +
 .../metadata/declared/LoadableDataSource.java   |  138 +
 .../metadata/declared/PKGeneratingAdapter.java  |  180 +
 .../declared/PKGeneratingAdapterFactory.java    |   82 +
 .../metadata/declared/ResultSetDataSink.java    |   47 +
 .../metadata/declared/ResultSetSinkId.java      |   35 +
 .../entities/AsterixBuiltinTypeMap.java         |   83 +
 .../metadata/entities/CompactionPolicy.java     |   78 +
 .../asterix/metadata/entities/Dataset.java      |  144 +
 .../metadata/entities/DatasourceAdapter.java    |   62 +
 .../asterix/metadata/entities/Datatype.java     |   67 +
 .../asterix/metadata/entities/Dataverse.java    |   59 +
 .../entities/ExternalDatasetDetails.java        |  156 +
 .../asterix/metadata/entities/ExternalFile.java |  140 +
 .../apache/asterix/metadata/entities/Feed.java  |  112 +
 .../asterix/metadata/entities/FeedPolicy.java   |   95 +
 .../asterix/metadata/entities/Function.java     |   93 +
 .../apache/asterix/metadata/entities/Index.java |  247 ++
 .../entities/InternalDatasetDetails.java        |  258 ++
 .../asterix/metadata/entities/Library.java      |   50 +
 .../apache/asterix/metadata/entities/Node.java  |   48 +
 .../asterix/metadata/entities/NodeGroup.java    |   56 +
 .../asterix/metadata/entities/PrimaryFeed.java  |   76 +
 .../metadata/entities/SecondaryFeed.java        |   60 +
 .../AbstractTupleTranslator.java                |   66 +
 .../CompactionPolicyTupleTranslator.java        |  124 +
 .../DatasetTupleTranslator.java                 |  403 +++
 .../DatasourceAdapterTupleTranslator.java       |  138 +
 .../DatatypeTupleTranslator.java                |  383 +++
 .../DataverseTupleTranslator.java               |  119 +
 .../ExternalFileTupleTranslator.java            |  179 +
 .../FeedPolicyTupleTranslator.java              |  200 ++
 .../FeedTupleTranslator.java                    |  313 ++
 .../FunctionTupleTranslator.java                |  193 ++
 .../IndexTupleTranslator.java                   |  304 ++
 .../LibraryTupleTranslator.java                 |  120 +
 .../NodeGroupTupleTranslator.java               |  129 +
 .../NodeTupleTranslator.java                    |  135 +
 .../external/ExternalFileIndexAccessor.java     |  143 +
 .../ExternalLoopkupOperatorDiscriptor.java      |  126 +
 .../external/FilesIndexDescription.java         |   96 +
 .../metadata/external/IAdapterFactory.java      |   94 +
 .../metadata/external/IControlledAdapter.java   |   61 +
 .../external/IControlledAdapterFactory.java     |   30 +
 .../metadata/external/IndexingConstants.java    |  203 ++
 .../feeds/AbstractDatasourceAdapter.java        |   39 +
 .../feeds/AbstractFeedDatasourceAdapter.java    |   34 +
 .../asterix/metadata/feeds/AdapterExecutor.java |   70 +
 .../metadata/feeds/AdapterIdentifier.java       |   62 +
 .../metadata/feeds/AdapterRuntimeManager.java   |  124 +
 .../metadata/feeds/BuiltinFeedPolicies.java     |  188 ++
 .../feeds/CollectTransformFeedFrameWriter.java  |  116 +
 .../asterix/metadata/feeds/EndFeedMessage.java  |   38 +
 .../ExternalDataScanOperatorDescriptor.java     |   70 +
 .../metadata/feeds/FeedActivityIdFactory.java   |   36 +
 .../feeds/FeedCollectOperatorDescriptor.java    |  166 +
 .../feeds/FeedCollectOperatorNodePushable.java  |  204 ++
 .../metadata/feeds/FeedConnectionManager.java   |  105 +
 .../metadata/feeds/FeedFrameTupleDecorator.java |  104 +
 .../feeds/FeedIntakeOperatorDescriptor.java     |  131 +
 .../feeds/FeedIntakeOperatorNodePushable.java   |  209 ++
 .../feeds/FeedLifecycleEventSubscriber.java     |   62 +
 .../metadata/feeds/FeedManagerElectMessage.java |   61 +
 .../asterix/metadata/feeds/FeedMessage.java     |   43 +
 .../feeds/FeedMessageOperatorDescriptor.java    |   49 +
 .../feeds/FeedMessageOperatorNodePushable.java  |  294 ++
 .../feeds/FeedMetaComputeNodePushable.java      |  221 ++
 .../metadata/feeds/FeedMetaNodePushable.java    |  184 ++
 .../feeds/FeedMetaOperatorDescriptor.java       |  128 +
 .../feeds/FeedMetaStoreNodePushable.java        |  212 ++
 .../metadata/feeds/FeedPolicyEnforcer.java      |   46 +
 .../metadata/feeds/FeedSubscriptionManager.java |   72 +
 .../apache/asterix/metadata/feeds/FeedUtil.java |  586 ++++
 .../asterix/metadata/feeds/FeedWorkManager.java |   46 +
 .../metadata/feeds/IAdapterExecutor.java        |   36 +
 .../metadata/feeds/IFeedAdapterFactory.java     |   26 +
 .../asterix/metadata/feeds/IFeedMessage.java    |   28 +
 .../metadata/feeds/IPullBasedFeedAdapter.java   |   30 +
 .../metadata/feeds/ITypedAdapterFactory.java    |   27 +
 .../asterix/metadata/feeds/MessageListener.java |  134 +
 .../metadata/feeds/PrepareStallMessage.java     |   66 +
 .../feeds/RemoteSocketMessageListener.java      |  178 +
 .../metadata/feeds/SocketMessageListener.java   |  158 +
 .../feeds/TerminateDataFlowMessage.java         |   50 +
 .../asterix/metadata/feeds/XAQLFeedMessage.java |   64 +
 .../AsterixExternalScalarFunctionInfo.java      |   38 +
 .../functions/ExternalFunctionCompilerUtil.java |  223 ++
 .../functions/ExternalLibraryManager.java       |   54 +
 .../functions/MetadataBuiltinFunctions.java     |  187 ++
 .../asterix/metadata/utils/DatasetLock.java     |  102 +
 .../asterix/metadata/utils/DatasetUtils.java    |  281 ++
 .../utils/ExternalDatasetAccessManager.java     |  118 +
 .../utils/ExternalDatasetsRegistry.java         |  134 +
 .../metadata/utils/MetadataLockManager.java     |  595 ++++
 .../DatasetNameValueExtractor.java              |   44 +
 .../DatatypeNameValueExtractor.java             |   64 +
 .../MetadataEntityValueExtractor.java           |   41 +
 .../NestedDatatypeNameValueExtractor.java       |   60 +
 .../TupleCopyValueExtractor.java                |   57 +
 .../asterix/builders/AbstractListBuilder.java   |  124 -
 .../asterix/builders/AbvsBuilderFactory.java    |   28 -
 .../ics/asterix/builders/IARecordBuilder.java   |   72 -
 .../asterix/builders/IAsterixListBuilder.java   |   43 -
 .../asterix/builders/ListBuilderFactory.java    |   30 -
 .../asterix/builders/OrderedListBuilder.java    |   35 -
 .../uci/ics/asterix/builders/RecordBuilder.java |  289 --
 .../asterix/builders/RecordBuilderFactory.java  |   26 -
 .../asterix/builders/UnorderedListBuilder.java  |   24 -
 .../dataflow/data/common/AListElementToken.java |   70 -
 .../data/common/AListElementTokenFactory.java   |   28 -
 .../common/AOrderedListBinaryTokenizer.java     |   88 -
 .../AOrderedListBinaryTokenizerFactory.java     |   34 -
 .../common/AUnorderedListBinaryTokenizer.java   |   36 -
 .../AUnorderedListBinaryTokenizerFactory.java   |   34 -
 .../data/common/AqlExpressionTypeComputer.java  |   97 -
 .../AqlMergeAggregationExpressionFactory.java   |   58 -
 .../data/common/AqlNullableTypeComputer.java    |   51 -
 .../AqlPartialAggregationTypeComputer.java      |   55 -
 .../common/IBinaryTokenizerFactoryProvider.java |   25 -
 .../dataflow/data/common/SerializationUtil.java |   31 -
 .../data/nontagged/AqlNullWriterFactory.java    |   49 -
 .../dataflow/data/nontagged/Coordinate.java     |   21 -
 .../comparators/ABinaryComparator.java          |   50 -
 .../ACirclePartialBinaryComparatorFactory.java  |   82 -
 ...ADurationPartialBinaryComparatorFactory.java |   59 -
 ...AIntervalPartialBinaryComparatorFactory.java |   61 -
 .../ALinePartialBinaryComparatorFactory.java    |   88 -
 .../AObjectAscBinaryComparatorFactory.java      |  313 --
 .../AObjectDescBinaryComparatorFactory.java     |   42 -
 .../APoint3DPartialBinaryComparatorFactory.java |   71 -
 .../APointPartialBinaryComparatorFactory.java   |   61 -
 .../APolygonPartialBinaryComparatorFactory.java |   84 -
 ...RectanglePartialBinaryComparatorFactory.java |   61 -
 .../AUUIDPartialBinaryComparatorFactory.java    |   46 -
 .../BooleanBinaryComparatorFactory.java         |   47 -
 .../ListItemBinaryComparatorFactory.java        |  176 -
 .../LongBinaryComparatorFactory.java            |   43 -
 .../comparators/RawBinaryComparatorFactory.java |   47 -
 .../AMurmurHash3BinaryHashFunctionFamily.java   |  104 -
 .../hash/AObjectBinaryHashFunctionFactory.java  |   43 -
 .../hash/BooleanBinaryHashFunctionFactory.java  |   42 -
 .../hash/DoubleBinaryHashFunctionFactory.java   |   42 -
 .../hash/ListItemBinaryHashFunctionFactory.java |   95 -
 .../hash/LongBinaryHashFunctionFactory.java     |   42 -
 ...AWrappedAscNormalizedKeyComputerFactory.java |   47 -
 ...WrappedDescNormalizedKeyComputerFactory.java |   47 -
 .../printers/ABinaryBase64Printer.java          |  147 -
 .../nontagged/printers/ABinaryHexPrinter.java   |   55 -
 .../printers/ABinaryPrinterFactory.java         |   31 -
 .../nontagged/printers/ABooleanPrinter.java     |   36 -
 .../printers/ABooleanPrinterFactory.java        |   30 -
 .../data/nontagged/printers/ACirclePrinter.java |   42 -
 .../printers/ACirclePrinterFactory.java         |   30 -
 .../data/nontagged/printers/ADatePrinter.java   |   53 -
 .../nontagged/printers/ADatePrinterFactory.java |   30 -
 .../nontagged/printers/ADateTimePrinter.java    |   53 -
 .../printers/ADateTimePrinterFactory.java       |   30 -
 .../printers/ADayTimeDurationPrinter.java       |   90 -
 .../ADayTimeDurationPrinterFactory.java         |   30 -
 .../data/nontagged/printers/ADoublePrinter.java |   36 -
 .../printers/ADoublePrinterFactory.java         |   30 -
 .../nontagged/printers/ADurationPrinter.java    |  102 -
 .../printers/ADurationPrinterFactory.java       |   30 -
 .../data/nontagged/printers/AFloatPrinter.java  |   36 -
 .../printers/AFloatPrinterFactory.java          |   30 -
 .../data/nontagged/printers/AInt16Printer.java  |   63 -
 .../printers/AInt16PrinterFactory.java          |   30 -
 .../data/nontagged/printers/AInt32Printer.java  |   62 -
 .../printers/AInt32PrinterFactory.java          |   30 -
 .../data/nontagged/printers/AInt64Printer.java  |   43 -
 .../printers/AInt64PrinterFactory.java          |   30 -
 .../data/nontagged/printers/AInt8Printer.java   |   63 -
 .../nontagged/printers/AInt8PrinterFactory.java |   30 -
 .../nontagged/printers/AIntervalPrinter.java    |   70 -
 .../printers/AIntervalPrinterFactory.java       |   30 -
 .../data/nontagged/printers/ALinePrinter.java   |   44 -
 .../nontagged/printers/ALinePrinterFactory.java |   30 -
 .../data/nontagged/printers/ANullPrinter.java   |   35 -
 .../nontagged/printers/ANullPrinterFactory.java |   30 -
 .../printers/ANullableFieldPrinterFactory.java  |   62 -
 .../data/nontagged/printers/AObjectPrinter.java |  173 -
 .../printers/AObjectPrinterFactory.java         |   30 -
 .../printers/AOrderedlistPrinterFactory.java    |   69 -
 .../nontagged/printers/APoint3DPrinter.java     |   42 -
 .../printers/APoint3DPrinterFactory.java        |   30 -
 .../data/nontagged/printers/APointPrinter.java  |   40 -
 .../printers/APointPrinterFactory.java          |   30 -
 .../nontagged/printers/APolygonPrinter.java     |   49 -
 .../printers/APolygonPrinterFactory.java        |   30 -
 .../printers/ARecordPrinterFactory.java         |   69 -
 .../nontagged/printers/ARectanglePrinter.java   |   44 -
 .../printers/ARectanglePrinterFactory.java      |   30 -
 .../data/nontagged/printers/AStringPrinter.java |   41 -
 .../printers/AStringPrinterFactory.java         |   30 -
 .../data/nontagged/printers/ATimePrinter.java   |   53 -
 .../nontagged/printers/ATimePrinterFactory.java |   30 -
 .../data/nontagged/printers/AUUIDPrinter.java   |   44 -
 .../nontagged/printers/AUUIDPrinterFactory.java |   32 -
 .../printers/AUnionPrinterFactory.java          |   72 -
 .../printers/AUnorderedlistPrinterFactory.java  |   69 -
 .../printers/AYearMonthDurationPrinter.java     |   75 -
 .../AYearMonthDurationPrinterFactory.java       |   33 -
 .../data/nontagged/printers/PrintTools.java     |  155 -
 .../printers/ShortWithoutTypeInfoPrinter.java   |   43 -
 .../ShortWithoutTypeInfoPrinterFactory.java     |   30 -
 .../nontagged/printers/csv/ABooleanPrinter.java |   36 -
 .../printers/csv/ABooleanPrinterFactory.java    |   30 -
 .../nontagged/printers/csv/ACirclePrinter.java  |   36 -
 .../printers/csv/ACirclePrinterFactory.java     |   30 -
 .../nontagged/printers/csv/ADatePrinter.java    |   36 -
 .../printers/csv/ADatePrinterFactory.java       |   30 -
 .../printers/csv/ADateTimePrinter.java          |   36 -
 .../printers/csv/ADateTimePrinterFactory.java   |   30 -
 .../printers/csv/ADayTimeDurationPrinter.java   |   39 -
 .../csv/ADayTimeDurationPrinterFactory.java     |   30 -
 .../nontagged/printers/csv/ADoublePrinter.java  |   36 -
 .../printers/csv/ADoublePrinterFactory.java     |   30 -
 .../printers/csv/ADurationPrinter.java          |   38 -
 .../printers/csv/ADurationPrinterFactory.java   |   30 -
 .../nontagged/printers/csv/AFloatPrinter.java   |   36 -
 .../printers/csv/AFloatPrinterFactory.java      |   30 -
 .../nontagged/printers/csv/AInt16Printer.java   |   36 -
 .../printers/csv/AInt16PrinterFactory.java      |   30 -
 .../nontagged/printers/csv/AInt32Printer.java   |   37 -
 .../printers/csv/AInt32PrinterFactory.java      |   30 -
 .../nontagged/printers/csv/AInt64Printer.java   |   35 -
 .../printers/csv/AInt64PrinterFactory.java      |   30 -
 .../nontagged/printers/csv/AInt8Printer.java    |   37 -
 .../printers/csv/AInt8PrinterFactory.java       |   30 -
 .../printers/csv/AIntervalPrinter.java          |   40 -
 .../printers/csv/AIntervalPrinterFactory.java   |   30 -
 .../nontagged/printers/csv/ALinePrinter.java    |   36 -
 .../printers/csv/ALinePrinterFactory.java       |   30 -
 .../nontagged/printers/csv/ANullPrinter.java    |   35 -
 .../printers/csv/ANullPrinterFactory.java       |   30 -
 .../csv/ANullableFieldPrinterFactory.java       |   63 -
 .../nontagged/printers/csv/AObjectPrinter.java  |  151 -
 .../printers/csv/AObjectPrinterFactory.java     |   30 -
 .../nontagged/printers/csv/APoint3DPrinter.java |   36 -
 .../printers/csv/APoint3DPrinterFactory.java    |   30 -
 .../nontagged/printers/csv/APointPrinter.java   |   36 -
 .../printers/csv/APointPrinterFactory.java      |   30 -
 .../nontagged/printers/csv/APolygonPrinter.java |   37 -
 .../printers/csv/APolygonPrinterFactory.java    |   30 -
 .../printers/csv/ARecordPrinterFactory.java     |   69 -
 .../printers/csv/ARectanglePrinter.java         |   36 -
 .../printers/csv/ARectanglePrinterFactory.java  |   30 -
 .../nontagged/printers/csv/AStringPrinter.java  |   41 -
 .../printers/csv/AStringPrinterFactory.java     |   30 -
 .../nontagged/printers/csv/ATimePrinter.java    |   38 -
 .../printers/csv/ATimePrinterFactory.java       |   30 -
 .../printers/csv/AUnionPrinterFactory.java      |   72 -
 .../printers/csv/AYearMonthDurationPrinter.java |   44 -
 .../csv/AYearMonthDurationPrinterFactory.java   |   33 -
 .../printers/json/ABooleanPrinter.java          |   36 -
 .../printers/json/ABooleanPrinterFactory.java   |   30 -
 .../nontagged/printers/json/ACirclePrinter.java |   42 -
 .../printers/json/ACirclePrinterFactory.java    |   30 -
 .../nontagged/printers/json/ADatePrinter.java   |   41 -
 .../printers/json/ADatePrinterFactory.java      |   30 -
 .../printers/json/ADateTimePrinter.java         |   40 -
 .../printers/json/ADateTimePrinterFactory.java  |   30 -
 .../printers/json/ADayTimeDurationPrinter.java  |   41 -
 .../json/ADayTimeDurationPrinterFactory.java    |   30 -
 .../nontagged/printers/json/ADoublePrinter.java |   36 -
 .../printers/json/ADoublePrinterFactory.java    |   30 -
 .../printers/json/ADurationPrinter.java         |   45 -
 .../printers/json/ADurationPrinterFactory.java  |   30 -
 .../nontagged/printers/json/AFloatPrinter.java  |   36 -
 .../printers/json/AFloatPrinterFactory.java     |   30 -
 .../nontagged/printers/json/AInt16Printer.java  |   39 -
 .../printers/json/AInt16PrinterFactory.java     |   30 -
 .../nontagged/printers/json/AInt32Printer.java  |   40 -
 .../printers/json/AInt32PrinterFactory.java     |   30 -
 .../nontagged/printers/json/AInt64Printer.java  |   37 -
 .../printers/json/AInt64PrinterFactory.java     |   30 -
 .../nontagged/printers/json/AInt8Printer.java   |   40 -
 .../printers/json/AInt8PrinterFactory.java      |   30 -
 .../printers/json/AIntervalPrinter.java         |   67 -
 .../printers/json/AIntervalPrinterFactory.java  |   30 -
 .../nontagged/printers/json/ALinePrinter.java   |   45 -
 .../printers/json/ALinePrinterFactory.java      |   30 -
 .../nontagged/printers/json/ANullPrinter.java   |   35 -
 .../printers/json/ANullPrinterFactory.java      |   30 -
 .../json/ANullableFieldPrinterFactory.java      |   63 -
 .../nontagged/printers/json/AObjectPrinter.java |  167 -
 .../printers/json/AObjectPrinterFactory.java    |   30 -
 .../json/AOrderedlistPrinterFactory.java        |   69 -
 .../printers/json/APoint3DPrinter.java          |   42 -
 .../printers/json/APoint3DPrinterFactory.java   |   30 -
 .../nontagged/printers/json/APointPrinter.java  |   40 -
 .../printers/json/APointPrinterFactory.java     |   30 -
 .../printers/json/APolygonPrinter.java          |   56 -
 .../printers/json/APolygonPrinterFactory.java   |   30 -
 .../printers/json/ARecordPrinterFactory.java    |   69 -
 .../printers/json/ARectanglePrinter.java        |   45 -
 .../printers/json/ARectanglePrinterFactory.java |   30 -
 .../nontagged/printers/json/AStringPrinter.java |   41 -
 .../printers/json/AStringPrinterFactory.java    |   30 -
 .../nontagged/printers/json/ATimePrinter.java   |   41 -
 .../printers/json/ATimePrinterFactory.java      |   30 -
 .../printers/json/AUnionPrinterFactory.java     |   72 -
 .../json/AUnorderedlistPrinterFactory.java      |   69 -
 .../json/AYearMonthDurationPrinter.java         |   47 -
 .../json/AYearMonthDurationPrinterFactory.java  |   33 -
 .../serde/ABinarySerializerDeserializer.java    |   55 -
 .../serde/ABooleanSerializerDeserializer.java   |   60 -
 .../serde/ACircleSerializerDeserializer.java    |   93 -
 .../serde/ADateSerializerDeserializer.java      |   82 -
 .../serde/ADateTimeSerializerDeserializer.java  |   95 -
 .../ADayTimeDurationSerializerDeserializer.java |   72 -
 .../serde/ADoubleSerializerDeserializer.java    |   60 -
 .../serde/ADurationSerializerDeserializer.java  |  100 -
 .../serde/AFloatSerializerDeserializer.java     |   54 -
 .../serde/AInt16SerializerDeserializer.java     |   57 -
 .../serde/AInt32SerializerDeserializer.java     |   49 -
 .../serde/AInt64SerializerDeserializer.java     |   60 -
 .../serde/AInt8SerializerDeserializer.java      |   53 -
 .../serde/AIntervalSerializerDeserializer.java  |  263 --
 .../serde/ALineSerializerDeserializer.java      |  106 -
 .../serde/ANullSerializerDeserializer.java      |   42 -
 .../serde/AObjectSerializerDeserializer.java    |  264 --
 .../AOrderedListSerializerDeserializer.java     |  172 -
 .../serde/APoint3DSerializerDeserializer.java   |  101 -
 .../serde/APointSerializerDeserializer.java     |   93 -
 .../serde/APolygonSerializerDeserializer.java   |   94 -
 .../serde/ARecordSerializerDeserializer.java    |  341 --
 .../serde/ARectangleSerializerDeserializer.java |  117 -
 .../serde/AStringSerializerDeserializer.java    |   52 -
 .../serde/ATimeSerializerDeserializer.java      |   81 -
 .../serde/ATypeSerializerDeserializer.java      |   43 -
 .../serde/AUUIDSerializerDeserializer.java      |   51 -
 .../AUUIDStringSerializerDeserializer.java      |   56 -
 .../AUnorderedListSerializerDeserializer.java   |  160 -
 ...YearMonthDurationSerializerDeserializer.java |   73 -
 .../serde/SerializerDeserializerUtil.java       |   76 -
 .../AqlPrimitiveValueProviderFactory.java       |   66 -
 .../ics/asterix/formats/base/IDataFormat.java   |   93 -
 .../AqlBinaryBooleanInspectorImpl.java          |   50 -
 .../AqlBinaryComparatorFactoryProvider.java     |  222 --
 .../AqlBinaryHashFunctionFactoryProvider.java   |   62 -
 .../AqlBinaryHashFunctionFamilyProvider.java    |   44 -
 .../nontagged/AqlBinaryIntegerInspector.java    |   41 -
 .../AqlBinaryTokenizerFactoryProvider.java      |   89 -
 .../nontagged/AqlCSVPrinterFactoryProvider.java |  145 -
 .../AqlJSONPrinterFactoryProvider.java          |  150 -
 .../AqlLinearizeComparatorFactoryProvider.java  |   99 -
 ...AqlNormalizedKeyComputerFactoryProvider.java |   99 -
 .../AqlPredicateEvaluatorFactoryProvider.java   |   78 -
 .../nontagged/AqlPrinterFactoryProvider.java    |  151 -
 .../AqlSerializerDeserializerProvider.java      |  215 --
 .../formats/nontagged/AqlTypeTraitProvider.java |  101 -
 .../NGramUTF8StringBinaryTokenizerFactory.java  |   47 -
 .../nontagged/UTF8StringLowercasePointable.java |  102 -
 .../edu/uci/ics/asterix/om/base/ABinary.java    |  137 -
 .../edu/uci/ics/asterix/om/base/ABitArray.java  |  149 -
 .../edu/uci/ics/asterix/om/base/ABoolean.java   |   86 -
 .../edu/uci/ics/asterix/om/base/ACircle.java    |   91 -
 .../ics/asterix/om/base/ACollectionCursor.java  |   70 -
 .../java/edu/uci/ics/asterix/om/base/ADate.java |  102 -
 .../edu/uci/ics/asterix/om/base/ADateTime.java  |  133 -
 .../ics/asterix/om/base/ADayTimeDuration.java   |   94 -
 .../edu/uci/ics/asterix/om/base/ADouble.java    |   84 -
 .../edu/uci/ics/asterix/om/base/ADuration.java  |  126 -
 .../edu/uci/ics/asterix/om/base/AFloat.java     |   84 -
 .../edu/uci/ics/asterix/om/base/AInt16.java     |   74 -
 .../edu/uci/ics/asterix/om/base/AInt32.java     |   99 -
 .../edu/uci/ics/asterix/om/base/AInt64.java     |   73 -
 .../java/edu/uci/ics/asterix/om/base/AInt8.java |   73 -
 .../edu/uci/ics/asterix/om/base/AInterval.java  |  159 -
 .../java/edu/uci/ics/asterix/om/base/ALine.java |   84 -
 .../uci/ics/asterix/om/base/AMutableBinary.java |   30 -
 .../uci/ics/asterix/om/base/AMutableCircle.java |   28 -
 .../uci/ics/asterix/om/base/AMutableDate.java   |   27 -
 .../ics/asterix/om/base/AMutableDateTime.java   |   27 -
 .../om/base/AMutableDayTimeDuration.java        |   27 -
 .../uci/ics/asterix/om/base/AMutableDouble.java |   27 -
 .../ics/asterix/om/base/AMutableDuration.java   |   28 -
 .../uci/ics/asterix/om/base/AMutableFloat.java  |   27 -
 .../uci/ics/asterix/om/base/AMutableInt16.java  |   27 -
 .../uci/ics/asterix/om/base/AMutableInt32.java  |   27 -
 .../uci/ics/asterix/om/base/AMutableInt64.java  |   27 -
 .../uci/ics/asterix/om/base/AMutableInt8.java   |   27 -
 .../ics/asterix/om/base/AMutableInterval.java   |   34 -
 .../uci/ics/asterix/om/base/AMutableLine.java   |   28 -
 .../asterix/om/base/AMutableOrderedList.java    |   39 -
 .../uci/ics/asterix/om/base/AMutablePoint.java  |   28 -
 .../ics/asterix/om/base/AMutablePoint3D.java    |   29 -
 .../ics/asterix/om/base/AMutablePolygon.java    |   27 -
 .../uci/ics/asterix/om/base/AMutableRecord.java |   29 -
 .../ics/asterix/om/base/AMutableRectangle.java  |   28 -
 .../uci/ics/asterix/om/base/AMutableString.java |   26 -
 .../uci/ics/asterix/om/base/AMutableTime.java   |   27 -
 .../uci/ics/asterix/om/base/AMutableUUID.java   |   60 -
 .../asterix/om/base/AMutableUnorderedList.java  |   38 -
 .../om/base/AMutableYearMonthDuration.java      |   27 -
 .../java/edu/uci/ics/asterix/om/base/ANull.java |   67 -
 .../uci/ics/asterix/om/base/AOrderedList.java   |  137 -
 .../edu/uci/ics/asterix/om/base/APoint.java     |   84 -
 .../edu/uci/ics/asterix/om/base/APoint3D.java   |   91 -
 .../edu/uci/ics/asterix/om/base/APolygon.java   |  105 -
 .../edu/uci/ics/asterix/om/base/ARecord.java    |  109 -
 .../edu/uci/ics/asterix/om/base/ARectangle.java |   84 -
 .../edu/uci/ics/asterix/om/base/AString.java    |   82 -
 .../java/edu/uci/ics/asterix/om/base/ATime.java |  118 -
 .../java/edu/uci/ics/asterix/om/base/AUUID.java |  170 -
 .../uci/ics/asterix/om/base/AUnorderedList.java |  127 -
 .../ics/asterix/om/base/AYearMonthDuration.java |  102 -
 .../uci/ics/asterix/om/base/IACollection.java   |   22 -
 .../edu/uci/ics/asterix/om/base/IACursor.java   |   33 -
 .../edu/uci/ics/asterix/om/base/IAObject.java   |   33 -
 .../edu/uci/ics/asterix/om/base/InMemUtils.java |   61 -
 .../asterix/om/base/ShortWithoutTypeInfo.java   |   97 -
 .../om/base/temporal/ADateParserFactory.java    |  340 --
 .../base/temporal/ADateTimeParserFactory.java   |   70 -
 .../base/temporal/ADurationParserFactory.java   |  277 --
 .../om/base/temporal/ATimeParserFactory.java    |  513 ---
 .../AsterixTemporalTypeParseException.java      |   35 -
 .../om/base/temporal/DateTimeFormatUtils.java   |  971 ------
 .../temporal/DurationArithmeticOperations.java  |   90 -
 .../base/temporal/GregorianCalendarSystem.java  |  729 -----
 .../om/base/temporal/ICalendarSystem.java       |   61 -
 .../om/constants/AsterixConstantValue.java      |   71 -
 .../functions/AbstractFunctionDescriptor.java   |   66 -
 .../om/functions/AsterixBuiltinFunctions.java   | 1398 --------
 .../functions/AsterixExternalFunctionInfo.java  |   89 -
 .../asterix/om/functions/AsterixFunction.java   |   58 -
 .../om/functions/AsterixFunctionIdentifier.java |   44 -
 .../om/functions/AsterixFunctionInfo.java       |   75 -
 .../om/functions/FunctionInfoRepository.java    |   44 -
 .../om/functions/FunctionManagerHolder.java     |   27 -
 .../om/functions/IExternalFunctionInfo.java     |   39 -
 .../om/functions/IFunctionDescriptor.java       |   47 -
 .../functions/IFunctionDescriptorFactory.java   |   26 -
 .../asterix/om/functions/IFunctionManager.java  |   27 -
 .../ics/asterix/om/io/AsterixIOException.java   |   26 -
 .../edu/uci/ics/asterix/om/io/IALocation.java   |   19 -
 .../edu/uci/ics/asterix/om/io/IAOMReader.java   |   29 -
 .../asterix/om/io/IAOMReaderWriterFactory.java  |   21 -
 .../edu/uci/ics/asterix/om/io/IAOMWriter.java   |   30 -
 .../om/pointables/AFlatValuePointable.java      |   81 -
 .../om/pointables/AListVisitablePointable.java  |  181 --
 .../pointables/ARecordVisitablePointable.java   |  285 --
 .../pointables/AbstractVisitablePointable.java  |   58 -
 .../om/pointables/PointableAllocator.java       |  128 -
 .../pointables/base/DefaultOpenFieldType.java   |   66 -
 .../om/pointables/base/IVisitablePointable.java |   29 -
 .../om/pointables/cast/ACastVisitor.java        |  140 -
 .../asterix/om/pointables/cast/AListCaster.java |  106 -
 .../om/pointables/cast/ARecordCaster.java       |  367 ---
 .../pointables/nonvisitor/AListPointable.java   |  195 --
 .../pointables/nonvisitor/ARecordPointable.java |  336 --
 .../om/pointables/printer/AListPrinter.java     |   85 -
 .../om/pointables/printer/APrintVisitor.java    |  215 --
 .../om/pointables/printer/ARecordPrinter.java   |   88 -
 .../pointables/printer/csv/APrintVisitor.java   |  202 --
 .../pointables/printer/csv/ARecordPrinter.java  |   77 -
 .../pointables/printer/json/AListPrinter.java   |   82 -
 .../pointables/printer/json/APrintVisitor.java  |  210 --
 .../pointables/printer/json/ARecordPrinter.java |   88 -
 .../visitor/IVisitablePointableVisitor.java     |   35 -
 .../typecomputer/base/IResultTypeComputer.java  |   28 -
 .../base/TypeComputerUtilities.java             |  120 -
 .../typecomputer/impl/ABinaryTypeComputer.java  |   37 -
 .../typecomputer/impl/ABooleanTypeComputer.java |   38 -
 .../typecomputer/impl/ACircleTypeComputer.java  |   38 -
 .../impl/ADateTimeTypeComputer.java             |   38 -
 .../om/typecomputer/impl/ADateTypeComputer.java |   38 -
 .../typecomputer/impl/ADoubleTypeComputer.java  |   38 -
 .../typecomputer/impl/AFloatTypeComputer.java   |   37 -
 .../typecomputer/impl/AInt32TypeComputer.java   |   38 -
 .../typecomputer/impl/AInt64TypeComputer.java   |   38 -
 .../impl/AIntervalTypeComputer.java             |   41 -
 .../om/typecomputer/impl/ALineTypeComputer.java |   38 -
 .../om/typecomputer/impl/ANullTypeComputer.java |   38 -
 .../typecomputer/impl/APoint3DTypeComputer.java |   38 -
 .../typecomputer/impl/APointTypeComputer.java   |   38 -
 .../typecomputer/impl/APolygonTypeComputer.java |   38 -
 .../impl/ARectangleTypeComputer.java            |   38 -
 .../typecomputer/impl/AStringTypeComputer.java  |   38 -
 .../om/typecomputer/impl/ATimeTypeComputer.java |   38 -
 .../om/typecomputer/impl/AUUIDTypeComputer.java |   41 -
 .../impl/AbstractBinaryStringTypeComputer.java  |   55 -
 .../impl/AbstractQuadStringTypeComputer.java    |   62 -
 .../impl/AbstractTripleStringTypeComputer.java  |   58 -
 .../om/typecomputer/impl/AnyTypeComputer.java   |   38 -
 ...BinaryBooleanOrNullFunctionTypeComputer.java |   57 -
 .../BinaryStringBoolOrNullTypeComputer.java     |   38 -
 .../BinaryStringStringOrNullTypeComputer.java   |   40 -
 .../impl/CastListResultTypeComputer.java        |   42 -
 .../impl/CastRecordResultTypeComputer.java      |   36 -
 .../impl/ClosedRecordConstructorResultType.java |   79 -
 .../impl/CollectionToSequenceTypeComputer.java  |   65 -
 .../impl/ConcatNonNullTypeComputer.java         |   58 -
 .../impl/FieldAccessByIndexResultType.java      |   74 -
 .../impl/FieldAccessNestedResultType.java       |   91 -
 .../impl/FlowRecordResultTypeComputer.java      |   41 -
 .../om/typecomputer/impl/GetItemResultType.java |   50 -
 .../GetOverlappingInvervalTypeComputer.java     |   39 -
 .../impl/InjectFailureTypeComputer.java         |   53 -
 .../NonTaggedCollectionMemberResultType.java    |   51 -
 .../NonTaggedFieldAccessByNameResultType.java   |  100 -
 .../impl/NonTaggedGetItemResultType.java        |   53 -
 .../impl/NonTaggedLocalAvgTypeComputer.java     |   44 -
 .../impl/NonTaggedMinMaxAggTypeComputer.java    |  102 -
 ...onTaggedNumericAddSubMulDivTypeComputer.java |  339 --
 .../impl/NonTaggedNumericAggTypeComputer.java   |   84 -
 ...ggedNumericRoundHalfToEven2TypeComputer.java |  111 -
 ...nTaggedNumericUnaryFunctionTypeComputer.java |   88 -
 .../impl/NonTaggedSwitchCaseComputer.java       |   63 -
 .../impl/NonTaggedUnaryMinusTypeComputer.java   |   44 -
 .../typecomputer/impl/NotNullTypeComputer.java  |   72 -
 .../impl/NumericAddSubMulTypeDescriptor.java    |   79 -
 .../impl/OpenRecordConstructorResultType.java   |   83 -
 .../impl/OptionalABinaryTypeComputer.java       |   44 -
 .../impl/OptionalABooleanTypeComputer.java      |   44 -
 .../impl/OptionalACircleTypeComputer.java       |   44 -
 .../impl/OptionalADateTimeTypeComputer.java     |   44 -
 .../impl/OptionalADateTypeComputer.java         |   44 -
 .../OptionalADayTimeDurationTypeComputer.java   |   48 -
 .../impl/OptionalADoubleTypeComputer.java       |   43 -
 .../impl/OptionalADurationTypeComputer.java     |   44 -
 .../impl/OptionalAFloatTypeComputer.java        |   44 -
 .../impl/OptionalAInt16TypeComputer.java        |   44 -
 .../impl/OptionalAInt32TypeComputer.java        |   44 -
 .../impl/OptionalAInt64TypeComputer.java        |   44 -
 .../impl/OptionalAInt8TypeComputer.java         |   44 -
 .../impl/OptionalAIntervalTypeComputer.java     |   44 -
 .../impl/OptionalALineTypeComputer.java         |   44 -
 .../impl/OptionalAPoint3DTypeComputer.java      |   44 -
 .../impl/OptionalAPointTypeComputer.java        |   44 -
 .../impl/OptionalAPolygonTypeComputer.java      |   44 -
 .../impl/OptionalARectangleTypeComputer.java    |   44 -
 .../impl/OptionalAStringTypeComputer.java       |   44 -
 .../OptionalATemporalInstanceTypeComputer.java  |   50 -
 .../impl/OptionalATimeTypeComputer.java         |   44 -
 .../OptionalAYearMonthDurationTypeComputer.java |   48 -
 .../impl/OrderedListConstructorResultType.java  |   65 -
 .../impl/OrderedListOfAInt32TypeComputer.java   |   38 -
 .../impl/OrderedListOfAInt64TypeComputer.java   |   38 -
 .../OrderedListOfAIntervalTypeComputer.java     |   39 -
 .../impl/OrderedListOfAPointTypeComputer.java   |   38 -
 .../impl/OrderedListOfAStringTypeComputer.java  |   38 -
 .../impl/OrderedListOfAnyTypeComputer.java      |   38 -
 .../QuadStringStringOrNullTypeComputer.java     |   40 -
 .../impl/RecordConstructorResultType.java       |   84 -
 .../impl/RecordMergeTypeComputer.java           |  168 -
 .../ScalarVersionOfAggregateResultType.java     |   57 -
 .../impl/SubsetCollectionTypeComputer.java      |   71 -
 .../impl/Substring2TypeComputer.java            |   70 -
 .../impl/SubstringTypeComputer.java             |   85 -
 .../TripleStringBoolOrNullTypeComputer.java     |   39 -
 .../TripleStringStringOrNullTypeComputer.java   |   38 -
 .../impl/TypeCompatibilityChecker.java          |   76 -
 .../UnaryBinaryInt64OrNullTypeComputer.java     |   63 -
 .../UnaryBooleanOrNullFunctionTypeComputer.java |   56 -
 .../UnaryStringInt64OrNullTypeComputer.java     |   74 -
 .../impl/UnaryStringOrNullTypeComputer.java     |   65 -
 .../UnorderedListConstructorResultType.java     |   65 -
 .../ics/asterix/om/types/AOrderedListType.java  |   80 -
 .../uci/ics/asterix/om/types/ARecordType.java   |  625 ----
 .../edu/uci/ics/asterix/om/types/ATypeTag.java  |  103 -
 .../uci/ics/asterix/om/types/AUnionType.java    |  161 -
 .../asterix/om/types/AUnorderedListType.java    |   80 -
 .../om/types/AbstractCollectionType.java        |   65 -
 .../asterix/om/types/AbstractComplexType.java   |   44 -
 .../uci/ics/asterix/om/types/BuiltinType.java   |  904 ------
 .../ics/asterix/om/types/EnumDeserializer.java  |   36 -
 .../edu/uci/ics/asterix/om/types/IAType.java    |   29 -
 .../ics/asterix/om/types/IEnumSerializer.java   |   23 -
 .../uci/ics/asterix/om/types/TypeHelper.java    |   83 -
 .../uci/ics/asterix/om/types/TypeHierarchy.java |   40 -
 .../uci/ics/asterix/om/types/TypeSignature.java |   56 -
 .../uci/ics/asterix/om/types/TypeTagUtil.java   |   83 -
 .../om/types/hierachy/ATypeHierarchy.java       |  804 -----
 .../AbstractIntegerTypeConvertComputer.java     |  141 -
 .../DoubleToFloatTypeConvertComputer.java       |   44 -
 .../DoubleToInt16TypeConvertComputer.java       |   46 -
 .../DoubleToInt32TypeConvertComputer.java       |   46 -
 .../DoubleToInt64TypeConvertComputer.java       |   46 -
 .../DoubleToInt8TypeConvertComputer.java        |   46 -
 .../FloatToDoubleTypeConvertComputer.java       |   39 -
 .../FloatToInt16TypeConvertComputer.java        |   46 -
 .../FloatToInt32TypeConvertComputer.java        |   46 -
 .../FloatToInt64TypeConvertComputer.java        |   46 -
 .../FloatToInt8TypeConvertComputer.java         |   46 -
 .../om/types/hierachy/ITypeConvertComputer.java |   25 -
 .../IntegerToDoubleTypeConvertComputer.java     |   71 -
 .../IntegerToFloatTypeConvertComputer.java      |   71 -
 .../IntegerToInt16TypeConvertComputer.java      |   35 -
 .../IntegerToInt32TypeConvertComputer.java      |   34 -
 .../IntegerToInt64TypeConvertComputer.java      |   34 -
 .../IntegerToInt8TypeConvertComputer.java       |   35 -
 .../asterix/om/util/AsterixAppContextInfo.java  |  126 -
 .../om/util/AsterixClusterProperties.java       |  182 --
 .../ics/asterix/om/util/AsterixRuntimeUtil.java |   52 -
 .../om/util/JSONDeserializerForTypes.java       |   91 -
 .../asterix/om/util/NonTaggedFormatUtil.java    |  235 --
 .../util/ResettableByteArrayOutputStream.java   |   28 -
 .../om/util/container/IObjectFactory.java       |   30 -
 .../asterix/om/util/container/IObjectPool.java  |   36 -
 .../om/util/container/ListObjectPool.java       |   81 -
 .../uci/ics/asterix/om/visitors/IOMVisitor.java |  110 -
 .../om/visitors/OMPrintToStringVisitor.java     |  296 --
 .../asterix/builders/AbstractListBuilder.java   |  124 +
 .../asterix/builders/AbvsBuilderFactory.java    |   28 +
 .../asterix/builders/IARecordBuilder.java       |   72 +
 .../asterix/builders/IAsterixListBuilder.java   |   43 +
 .../asterix/builders/ListBuilderFactory.java    |   30 +
 .../asterix/builders/OrderedListBuilder.java    |   35 +
 .../apache/asterix/builders/RecordBuilder.java  |  289 ++
 .../asterix/builders/RecordBuilderFactory.java  |   26 +
 .../asterix/builders/UnorderedListBuilder.java  |   24 +
 .../dataflow/data/common/AListElementToken.java |   70 +
 .../data/common/AListElementTokenFactory.java   |   28 +
 .../common/AOrderedListBinaryTokenizer.java     |   88 +
 .../AOrderedListBinaryTokenizerFactory.java     |   34 +
 .../common/AUnorderedListBinaryTokenizer.java   |   36 +
 .../AUnorderedListBinaryTokenizerFactory.java   |   34 +
 .../data/common/AqlExpressionTypeComputer.java  |   97 +
 .../AqlMergeAggregationExpressionFactory.java   |   58 +
 .../data/common/AqlNullableTypeComputer.java    |   51 +
 .../AqlPartialAggregationTypeComputer.java      |   55 +
 .../common/IBinaryTokenizerFactoryProvider.java |   25 +
 .../dataflow/data/common/SerializationUtil.java |   31 +
 .../data/nontagged/AqlNullWriterFactory.java    |   49 +
 .../dataflow/data/nontagged/Coordinate.java     |   21 +
 .../comparators/ABinaryComparator.java          |   50 +
 .../ACirclePartialBinaryComparatorFactory.java  |   82 +
 ...ADurationPartialBinaryComparatorFactory.java |   59 +
 ...AIntervalPartialBinaryComparatorFactory.java |   61 +
 .../ALinePartialBinaryComparatorFactory.java    |   88 +
 .../AObjectAscBinaryComparatorFactory.java      |  313 ++
 .../AObjectDescBinaryComparatorFactory.java     |   42 +
 .../APoint3DPartialBinaryComparatorFactory.java |   71 +
 .../APointPartialBinaryComparatorFactory.java   |   61 +
 .../APolygonPartialBinaryComparatorFactory.java |   84 +
 ...RectanglePartialBinaryComparatorFactory.java |   61 +
 .../AUUIDPartialBinaryComparatorFactory.java    |   46 +
 .../BooleanBinaryComparatorFactory.java         |   47 +
 .../ListItemBinaryComparatorFactory.java        |  176 +
 .../LongBinaryComparatorFactory.java            |   43 +
 .../comparators/RawBinaryComparatorFactory.java |   47 +
 .../AMurmurHash3BinaryHashFunctionFamily.java   |  104 +
 .../hash/AObjectBinaryHashFunctionFactory.java  |   43 +
 .../hash/BooleanBinaryHashFunctionFactory.java  |   42 +
 .../hash/DoubleBinaryHashFunctionFactory.java   |   42 +
 .../hash/ListItemBinaryHashFunctionFactory.java |   95 +
 .../hash/LongBinaryHashFunctionFactory.java     |   42 +
 ...AWrappedAscNormalizedKeyComputerFactory.java |   47 +
 ...WrappedDescNormalizedKeyComputerFactory.java |   47 +
 .../printers/ABinaryBase64Printer.java          |  147 +
 .../nontagged/printers/ABinaryHexPrinter.java   |   55 +
 .../printers/ABinaryPrinterFactory.java         |   31 +
 .../nontagged/printers/ABooleanPrinter.java     |   36 +
 .../printers/ABooleanPrinterFactory.java        |   30 +
 .../data/nontagged/printers/ACirclePrinter.java |   42 +
 .../printers/ACirclePrinterFactory.java         |   30 +
 .../data/nontagged/printers/ADatePrinter.java   |   53 +
 .../nontagged/printers/ADatePrinterFactory.java |   30 +
 .../nontagged/printers/ADateTimePrinter.java    |   53 +
 .../printers/ADateTimePrinterFactory.java       |   30 +
 .../printers/ADayTimeDurationPrinter.java       |   90 +
 .../ADayTimeDurationPrinterFactory.java         |   30 +
 .../data/nontagged/printers/ADoublePrinter.java |   36 +
 .../printers/ADoublePrinterFactory.java         |   30 +
 .../nontagged/printers/ADurationPrinter.java    |  102 +
 .../printers/ADurationPrinterFactory.java       |   30 +
 .../data/nontagged/printers/AFloatPrinter.java  |   36 +
 .../printers/AFloatPrinterFactory.java          |   30 +
 .../data/nontagged/printers/AInt16Printer.java  |   63 +
 .../printers/AInt16PrinterFactory.java          |   30 +
 .../data/nontagged/printers/AInt32Printer.java  |   62 +
 .../printers/AInt32PrinterFactory.java          |   30 +
 .../data/nontagged/printers/AInt64Printer.java  |   43 +
 .../printers/AInt64PrinterFactory.java          |   30 +
 .../data/nontagged/printers/AInt8Printer.java   |   63 +
 .../nontagged/printers/AInt8PrinterFactory.java |   30 +
 .../nontagged/printers/AIntervalPrinter.java    |   70 +
 .../printers/AIntervalPrinterFactory.java       |   30 +
 .../data/nontagged/printers/ALinePrinter.java   |   44 +
 .../nontagged/printers/ALinePrinterFactory.java |   30 +
 .../data/nontagged/printers/ANullPrinter.java   |   35 +
 .../nontagged/printers/ANullPrinterFactory.java |   30 +
 .../printers/ANullableFieldPrinterFactory.java  |   62 +
 .../data/nontagged/printers/AObjectPrinter.java |  173 +
 .../printers/AObjectPrinterFactory.java         |   30 +
 .../printers/AOrderedlistPrinterFactory.java    |   69 +
 .../nontagged/printers/APoint3DPrinter.java     |   42 +
 .../printers/APoint3DPrinterFactory.java        |   30 +
 .../data/nontagged/printers/APointPrinter.java  |   40 +
 .../printers/APointPrinterFactory.java          |   30 +
 .../nontagged/printers/APolygonPrinter.java     |   49 +
 .../printers/APolygonPrinterFactory.java        |   30 +
 .../printers/ARecordPrinterFactory.java         |   69 +
 .../nontagged/printers/ARectanglePrinter.java   |   44 +
 .../printers/ARectanglePrinterFactory.java      |   30 +
 .../data/nontagged/printers/AStringPrinter.java |   41 +
 .../printers/AStringPrinterFactory.java         |   30 +
 .../data/nontagged/printers/ATimePrinter.java   |   53 +
 .../nontagged/printers/ATimePrinterFactory.java |   30 +
 .../data/nontagged/printers/AUUIDPrinter.java   |   44 +
 .../nontagged/printers/AUUIDPrinterFactory.java |   32 +
 .../printers/AUnionPrinterFactory.java          |   72 +
 .../printers/AUnorderedlistPrinterFactory.java  |   69 +
 .../printers/AYearMonthDurationPrinter.java     |   75 +
 .../AYearMonthDurationPrinterFactory.java       |   33 +
 .../data/nontagged/printers/PrintTools.java     |  155 +
 .../printers/ShortWithoutTypeInfoPrinter.java   |   43 +
 .../ShortWithoutTypeInfoPrinterFactory.java     |   30 +
 .../nontagged/printers/csv/ABooleanPrinter.java |   36 +
 .../printers/csv/ABooleanPrinterFactory.java    |   30 +
 .../nontagged/printers/csv/ACirclePrinter.java  |   36 +
 .../printers/csv/ACirclePrinterFactory.java     |   30 +
 .../nontagged/printers/csv/ADatePrinter.java    |   36 +
 .../printers/csv/ADatePrinterFactory.java       |   30 +
 .../printers/csv/ADateTimePrinter.java          |   36 +
 .../printers/csv/ADateTimePrinterFactory.java   |   30 +
 .../printers/csv/ADayTimeDurationPrinter.java   |   39 +
 .../csv/ADayTimeDurationPrinterFactory.java     |   30 +
 .../nontagged/printers/csv/ADoublePrinter.java  |   36 +
 .../printers/csv/ADoublePrinterFactory.java     |   30 +
 .../printers/csv/ADurationPrinter.java          |   38 +
 .../printers/csv/ADurationPrinterFactory.java   |   30 +
 .../nontagged/printers/csv/AFloatPrinter.java   |   36 +
 .../printers/csv/AFloatPrinterFactory.java      |   30 +
 .../nontagged/printers/csv/AInt16Printer.java   |   36 +
 .../printers/csv/AInt16PrinterFactory.java      |   30 +
 .../nontagged/printers/csv/AInt32Printer.java   |   37 +
 .../printers/csv/AInt32PrinterFactory.java      |   30 +
 .../nontagged/printers/csv/AInt64Printer.java   |   35 +
 .../printers/csv/AInt64PrinterFactory.java      |   30 +
 .../nontagged/printers/csv/AInt8Printer.java    |   37 +
 .../printers/csv/AInt8PrinterFactory.java       |   30 +
 .../printers/csv/AIntervalPrinter.java          |   40 +
 .../printers/csv/AIntervalPrinterFactory.java   |   30 +
 .../nontagged/printers/csv/ALinePrinter.java    |   36 +
 .../printers/csv/ALinePrinterFactory.java       |   30 +
 .../nontagged/printers/csv/ANullPrinter.java    |   35 +
 .../printers/csv/ANullPrinterFactory.java       |   30 +
 .../csv/ANullableFieldPrinterFactory.java       |   63 +
 .../nontagged/printers/csv/AObjectPrinter.java  |  151 +
 .../printers/csv/AObjectPrinterFactory.java     |   30 +
 .../nontagged/printers/csv/APoint3DPrinter.java |   36 +
 .../printers/csv/APoint3DPrinterFactory.java    |   30 +
 .../nontagged/printers/csv/APointPrinter.java   |   36 +
 .../printers/csv/APointPrinterFactory.java      |   30 +
 .../nontagged/printers/csv/APolygonPrinter.java |   37 +
 .../printers/csv/APolygonPrinterFactory.java    |   30 +
 .../printers/csv/ARecordPrinterFactory.java     |   69 +
 .../printers/csv/ARectanglePrinter.java         |   36 +
 .../printers/csv/ARectanglePrinterFactory.java  |   30 +
 .../nontagged/printers/csv/AStringPrinter.java  |   41 +
 .../printers/csv/AStringPrinterFactory.java     |   30 +
 .../nontagged/printers/csv/ATimePrinter.java    |   38 +
 .../printers/csv/ATimePrinterFactory.java       |   30 +
 .../printers/csv/AUnionPrinterFactory.java      |   72 +
 .../printers/csv/AYearMonthDurationPrinter.java |   44 +
 .../csv/AYearMonthDurationPrinterFactory.java   |   33 +
 .../printers/json/ABooleanPrinter.java          |   36 +
 .../printers/json/ABooleanPrinterFactory.java   |   30 +
 .../nontagged/printers/json/ACirclePrinter.java |   42 +
 .../printers/json/ACirclePrinterFactory.java    |   30 +
 .../nontagged/printers/json/ADatePrinter.java   |   41 +
 .../printers/json/ADatePrinterFactory.java      |   30 +
 .../printers/json/ADateTimePrinter.java         |   40 +
 .../printers/json/ADateTimePrinterFactory.java  |   30 +
 .../printers/json/ADayTimeDurationPrinter.java  |   41 +
 .../json/ADayTimeDurationPrinterFactory.java    |   30 +
 .../nontagged/printers/json/ADoublePrinter.java |   36 +
 .../printers/json/ADoublePrinterFactory.java    |   30 +
 .../printers/json/ADurationPrinter.java         |   45 +
 .../printers/json/ADurationPrinterFactory.java  |   30 +
 .../nontagged/printers/json/AFloatPrinter.java  |   36 +
 .../printers/json/AFloatPrinterFactory.java     |   30 +
 .../nontagged/printers/json/AInt16Printer.java  |   39 +
 .../printers/json/AInt16PrinterFactory.java     |   30 +
 .../nontagged/printers/json/AInt32Printer.java  |   40 +
 .../printers/json/AInt32PrinterFactory.java     |   30 +
 .../nontagged/printers/json/AInt64Printer.java  |   37 +
 .../printers/json/AInt64PrinterFactory.java     |   30 +
 .../nontagged/printers/json/AInt8Printer.java   |   40 +
 .../printers/json/AInt8PrinterFactory.java      |   30 +
 .../printers/json/AIntervalPrinter.java         |   67 +
 .../printers/json/AIntervalPrinterFactory.java  |   30 +
 .../nontagged/printers/json/ALinePrinter.java   |   45 +
 .../printers/json/ALinePrinterFactory.java      |   30 +
 .../nontagged/printers/json/ANullPrinter.java   |   35 +
 .../printers/json/ANullPrinterFactory.java      |   30 +
 .../json/ANullableFieldPrinterFactory.java      |   63 +
 .../nontagged/printers/json/AObjectPrinter.java |  167 +
 .../printers/json/AObjectPrinterFactory.java    |   30 +
 .../json/AOrderedlistPrinterFactory.java        |   69 +
 .../printers/json/APoint3DPrinter.java          |   42 +
 .../printers/json/APoint3DPrinterFactory.java   |   30 +
 .../nontagged/printers/json/APointPrinter.java  |   40 +
 .../printers/json/APointPrinterFactory.java     |   30 +
 .../printers/json/APolygonPrinter.java          |   56 +
 .../printers/json/APolygonPrinterFactory.java   |   30 +
 .../printers/json/ARecordPrinterFactory.java    |   69 +
 .../printers/json/ARectanglePrinter.java        |   45 +
 .../printers/json/ARectanglePrinterFactory.java |   30 +
 .../nontagged/printers/json/AStringPrinter.java |   41 +
 .../printers/json/AStringPrinterFactory.java    |   30 +
 .../nontagged/printers/json/ATimePrinter.java   |   41 +
 .../printers/json/ATimePrinterFactory.java      |   30 +
 .../printers/json/AUnionPrinterFactory.java     |   72 +
 .../json/AUnorderedlistPrinterFactory.java      |   69 +
 .../json/AYearMonthDurationPrinter.java         |   47 +
 .../json/AYearMonthDurationPrinterFactory.java  |   33 +
 .../serde/ABinarySerializerDeserializer.java    |   55 +
 .../serde/ABooleanSerializerDeserializer.java   |   60 +
 .../serde/ACircleSerializerDeserializer.java    |   93 +
 .../serde/ADateSerializerDeserializer.java      |   82 +
 .../serde/ADateTimeSerializerDeserializer.java  |   95 +
 .../ADayTimeDurationSerializerDeserializer.java |   72 +
 .../serde/ADoubleSerializerDeserializer.java    |   60 +
 .../serde/ADurationSerializerDeserializer.java  |  100 +
 .../serde/AFloatSerializerDeserializer.java     |   54 +
 .../serde/AInt16SerializerDeserializer.java     |   57 +
 .../serde/AInt32SerializerDeserializer.java     |   49 +
 .../serde/AInt64SerializerDeserializer.java     |   60 +
 .../serde/AInt8SerializerDeserializer.java      |   53 +
 .../serde/AIntervalSerializerDeserializer.java  |  263 ++
 .../serde/ALineSerializerDeserializer.java      |  106 +
 .../serde/ANullSerializerDeserializer.java      |   42 +
 .../serde/AObjectSerializerDeserializer.java    |  264 ++
 .../AOrderedListSerializerDeserializer.java     |  172 +
 .../serde/APoint3DSerializerDeserializer.java   |  101 +
 .../serde/APointSerializerDeserializer.java     |   93 +
 .../serde/APolygonSerializerDeserializer.java   |   94 +
 .../serde/ARecordSerializerDeserializer.java    |  341 ++
 .../serde/ARectangleSerializerDeserializer.java |  117 +
 .../serde/AStringSerializerDeserializer.java    |   52 +
 .../serde/ATimeSerializerDeserializer.java      |   81 +
 .../serde/ATypeSerializerDeserializer.java      |   43 +
 .../serde/AUUIDSerializerDeserializer.java      |   51 +
 .../AUUIDStringSerializerDeserializer.java      |   56 +
 .../AUnorderedListSerializerDeserializer.java   |  160 +
 ...YearMonthDurationSerializerDeserializer.java |   73 +
 .../serde/SerializerDeserializerUtil.java       |   76 +
 .../AqlPrimitiveValueProviderFactory.java       |   66 +
 .../asterix/formats/base/IDataFormat.java       |   93 +
 .../AqlBinaryBooleanInspectorImpl.java          |   50 +
 .../AqlBinaryComparatorFactoryProvider.java     |  222 ++
 .../AqlBinaryHashFunctionFactoryProvider.java   |   62 +
 .../AqlBinaryHashFunctionFamilyProvider.java    |   44 +
 .../nontagged/AqlBinaryIntegerInspector.java    |   41 +
 .../AqlBinaryTokenizerFactoryProvider.java      |   89 +
 .../nontagged/AqlCSVPrinterFactoryProvider.java |  145 +
 .../AqlJSONPrinterFactoryProvider.java          |  150 +
 .../AqlLinearizeComparatorFactoryProvider.java  |   99 +
 ...AqlNormalizedKeyComputerFactoryProvider.java |   99 +
 .../AqlPredicateEvaluatorFactoryProvider.java   |   78 +
 .../nontagged/AqlPrinterFactoryProvider.java    |  151 +
 .../AqlSerializerDeserializerProvider.java      |  215 ++
 .../formats/nontagged/AqlTypeTraitProvider.java |  101 +
 .../NGramUTF8StringBinaryTokenizerFactory.java  |   47 +
 .../nontagged/UTF8StringLowercasePointable.java |  102 +
 .../org/apache/asterix/om/base/ABinary.java     |  137 +
 .../org/apache/asterix/om/base/ABitArray.java   |  149 +
 .../org/apache/asterix/om/base/ABoolean.java    |   86 +
 .../org/apache/asterix/om/base/ACircle.java     |   91 +
 .../asterix/om/base/ACollectionCursor.java      |   70 +
 .../java/org/apache/asterix/om/base/ADate.java  |  102 +
 .../org/apache/asterix/om/base/ADateTime.java   |  133 +
 .../asterix/om/base/ADayTimeDuration.java       |   94 +
 .../org/apache/asterix/om/base/ADouble.java     |   84 +
 .../org/apache/asterix/om/base/ADuration.java   |  126 +
 .../java/org/apache/asterix/om/base/AFloat.java |   84 +
 .../java/org/apache/asterix/om/base/AInt16.java |   74 +
 .../java/org/apache/asterix/om/base/AInt32.java |   99 +
 .../java/org/apache/asterix/om/base/AInt64.java |   73 +
 .../java/org/apache/asterix/om/base/AInt8.java  |   73 +
 .../org/apache/asterix/om/base/AInterval.java   |  159 +
 .../java/org/apache/asterix/om/base/ALine.java  |   84 +
 .../apache/asterix/om/base/AMutableBinary.java  |   30 +
 .../apache/asterix/om/base/AMutableCircle.java  |   28 +
 .../apache/asterix/om/base/AMutableDate.java    |   27 +
 .../asterix/om/base/AMutableDateTime.java       |   27 +
 .../om/base/AMutableDayTimeDuration.java        |   27 +
 .../apache/asterix/om/base/AMutableDouble.java  |   27 +
 .../asterix/om/base/AMutableDuration.java       |   28 +
 .../apache/asterix/om/base/AMutableFloat.java   |   27 +
 .../apache/asterix/om/base/AMutableInt16.java   |   27 +
 .../apache/asterix/om/base/AMutableInt32.java   |   27 +
 .../apache/asterix/om/base/AMutableInt64.java   |   27 +
 .../apache/asterix/om/base/AMutableInt8.java    |   27 +
 .../asterix/om/base/AMutableInterval.java       |   34 +
 .../apache/asterix/om/base/AMutableLine.java    |   28 +
 .../asterix/om/base/AMutableOrderedList.java    |   39 +
 .../apache/asterix/om/base/AMutablePoint.java   |   28 +
 .../apache/asterix/om/base/AMutablePoint3D.java |   29 +
 .../apache/asterix/om/base/AMutablePolygon.java |   27 +
 .../apache/asterix/om/base/AMutableRecord.java  |   29 +
 .../asterix/om/base/AMutableRectangle.java      |   28 +
 .../apache/asterix/om/base/AMutableString.java  |   26 +
 .../apache/asterix/om/base/AMutableTime.java    |   27 +
 .../apache/asterix/om/base/AMutableUUID.java    |   60 +
 .../asterix/om/base/AMutableUnorderedList.java  |   38 +
 .../om/base/AMutableYearMonthDuration.java      |   27 +
 .../java/org/apache/asterix/om/base/ANull.java  |   67 +
 .../apache/asterix/om/base/AOrderedList.java    |  137 +
 .../java/org/apache/asterix/om/base/APoint.java |   84 +
 .../org/apache/asterix/om/base/APoint3D.java    |   91 +
 .../org/apache/asterix/om/base/APolygon.java    |  105 +
 .../org/apache/asterix/om/base/ARecord.java     |  109 +
 .../org/apache/asterix/om/base/ARectangle.java  |   84 +
 .../org/apache/asterix/om/base/AString.java     |   82 +
 .../java/org/apache/asterix/om/base/ATime.java  |  118 +
 .../java/org/apache/asterix/om/base/AUUID.java  |  170 +
 .../apache/asterix/om/base/AUnorderedList.java  |  127 +
 .../asterix/om/base/AYearMonthDuration.java     |  102 +
 .../apache/asterix/om/base/IACollection.java    |   22 +
 .../org/apache/asterix/om/base/IACursor.java    |   33 +
 .../org/apache/asterix/om/base/IAObject.java    |   33 +
 .../org/apache/asterix/om/base/InMemUtils.java  |   61 +
 .../asterix/om/base/ShortWithoutTypeInfo.java   |   97 +
 .../om/base/temporal/ADateParserFactory.java    |  340 ++
 .../base/temporal/ADateTimeParserFactory.java   |   70 +
 .../base/temporal/ADurationParserFactory.java   |  277 ++
 .../om/base/temporal/ATimeParserFactory.java    |  513 +++
 .../AsterixTemporalTypeParseException.java      |   35 +
 .../om/base/temporal/DateTimeFormatUtils.java   |  971 ++++++
 .../temporal/DurationArithmeticOperations.java  |   90 +
 .../base/temporal/GregorianCalendarSystem.java  |  729 +++++
 .../om/base/temporal/ICalendarSystem.java       |   61 +
 .../om/constants/AsterixConstantValue.java      |   71 +
 .../functions/AbstractFunctionDescriptor.java   |   66 +
 .../om/functions/AsterixBuiltinFunctions.java   | 1398 ++++++++
 .../functions/AsterixExternalFunctionInfo.java  |   89 +
 .../asterix/om/functions/AsterixFunction.java   |   58 +
 .../om/functions/AsterixFunctionIdentifier.java |   44 +
 .../om/functions/AsterixFunctionInfo.java       |   75 +
 .../om/functions/FunctionInfoRepository.java    |   44 +
 .../om/functions/FunctionManagerHolder.java     |   27 +
 .../om/functions/IExternalFunctionInfo.java     |   39 +
 .../om/functions/IFunctionDescriptor.java       |   47 +
 .../functions/IFunctionDescriptorFactory.java   |   26 +
 .../asterix/om/functions/IFunctionManager.java  |   27 +
 .../asterix/om/io/AsterixIOException.java       |   26 +
 .../org/apache/asterix/om/io/IALocation.java    |   19 +
 .../org/apache/asterix/om/io/IAOMReader.java    |   29 +
 .../asterix/om/io/IAOMReaderWriterFactory.java  |   21 +
 .../org/apache/asterix/om/io/IAOMWriter.java    |   30 +
 .../om/pointables/AFlatValuePointable.java      |   81 +
 .../om/pointables/AListVisitablePointable.java  |  181 ++
 .../pointables/ARecordVisitablePointable.java   |  285 ++
 .../pointables/AbstractVisitablePointable.java  |   58 +
 .../om/pointables/PointableAllocator.java       |  128 +
 .../pointables/base/DefaultOpenFieldType.java   |   66 +
 .../om/pointables/base/IVisitablePointable.java |   29 +
 .../om/pointables/cast/ACastVisitor.java        |  140 +
 .../asterix/om/pointables/cast/AListCaster.java |  106 +
 .../om/pointables/cast/ARecordCaster.java       |  367 +++
 .../pointables/nonvisitor/AListPointable.java   |  195 ++
 .../pointables/nonvisitor/ARecordPointable.java |  336 ++
 .../om/pointables/printer/AListPrinter.java     |   85 +
 .../om/pointables/printer/APrintVisitor.java    |  215 ++
 .../om/pointables/printer/ARecordPrinter.java   |   88 +
 .../pointables/printer/csv/APrintVisitor.java   |  202 ++
 .../pointables/printer/csv/ARecordPrinter.java  |   77 +
 .../pointables/printer/json/AListPrinter.java   |   82 +
 .../pointables/printer/json/APrintVisitor.java  |  210 ++
 .../pointables/printer/json/ARecordPrinter.java |   88 +
 .../visitor/IVisitablePointableVisitor.java     |   35 +
 .../typecomputer/base/IResultTypeComputer.java  |   28 +
 .../base/TypeComputerUtilities.java             |  120 +
 .../typecomputer/impl/ABinaryTypeComputer.java  |   37 +
 .../typecomputer/impl/ABooleanTypeComputer.java |   38 +
 .../typecomputer/impl/ACircleTypeComputer.java  |   38 +
 .../impl/ADateTimeTypeComputer.java             |   38 +
 .../om/typecomputer/impl/ADateTypeComputer.java |   38 +
 .../typecomputer/impl/ADoubleTypeComputer.java  |   38 +
 .../typecomputer/impl/AFloatTypeComputer.java   |   37 +
 .../typecomputer/impl/AInt32TypeComputer.java   |   38 +
 .../typecomputer/impl/AInt64TypeComputer.java   |   38 +
 .../impl/AIntervalTypeComputer.java             |   41 +
 .../om/typecomputer/impl/ALineTypeComputer.java |   38 +
 .../om/typecomputer/impl/ANullTypeComputer.java |   38 +
 .../typecomputer/impl/APoint3DTypeComputer.java |   38 +
 .../typecomputer/impl/APointTypeComputer.java   |   38 +
 .../typecomputer/impl/APolygonTypeComputer.java |   38 +
 .../impl/ARectangleTypeComputer.java            |   38 +
 .../typecomputer/impl/AStringTypeComputer.java  |   38 +
 .../om/typecomputer/impl/ATimeTypeComputer.java |   38 +
 .../om/typecomputer/impl/AUUIDTypeComputer.java |   41 +
 .../impl/AbstractBinaryStringTypeComputer.java  |   55 +
 .../impl/AbstractQuadStringTypeComputer.java    |   62 +
 .../impl/AbstractTripleStringTypeComputer.java  |   58 +
 .../om/typecomputer/impl/AnyTypeComputer.java   |   38 +
 ...BinaryBooleanOrNullFunctionTypeComputer.java |   57 +
 .../BinaryStringBoolOrNullTypeComputer.java     |   38 +
 .../BinaryStringStringOrNullTypeComputer.java   |   40 +
 .../impl/CastListResultTypeComputer.java        |   42 +
 .../impl/CastRecordResultTypeComputer.java      |   36 +
 .../impl/ClosedRecordConstructorResultType.java |   79 +
 .../impl/CollectionToSequenceTypeComputer.java  |   65 +
 .../impl/ConcatNonNullTypeComputer.java         |   58 +
 .../impl/FieldAccessByIndexResultType.java      |   74 +
 .../impl/FieldAccessNestedResultType.java       |   91 +
 .../impl/FlowRecordResultTypeComputer.java      |   41 +
 .../om/typecomputer/impl/GetItemResultType.java |   50 +
 .../GetOverlappingInvervalTypeComputer.java     |   39 +
 .../impl/InjectFailureTypeComputer.java         |   53 +
 .../NonTaggedCollectionMemberResultType.java    |   51 +
 .../NonTaggedFieldAccessByNameResultType.java   |  100 +
 .../impl/NonTaggedGetItemResultType.java        |   53 +
 .../impl/NonTaggedLocalAvgTypeComputer.java     |   44 +
 .../impl/NonTaggedMinMaxAggTypeComputer.java    |  102 +
 ...onTaggedNumericAddSubMulDivTypeComputer.java |  339 ++
 .../impl/NonTaggedNumericAggTypeComputer.java   |   84 +
 ...ggedNumericRoundHalfToEven2TypeComputer.java |  111 +
 ...nTaggedNumericUnaryFunctionTypeComputer.java |   88 +
 .../impl/NonTaggedSwitchCaseComputer.java       |   63 +
 .../impl/NonTaggedUnaryMinusTypeComputer.java   |   44 +
 .../typecomputer/impl/NotNullTypeComputer.java  |   72 +
 .../impl/NumericAddSubMulTypeDescriptor.java    |   79 +
 .../impl/OpenRecordConstructorResultType.java   |   83 +
 .../impl/OptionalABinaryTypeComputer.java       |   44 +
 .../impl/OptionalABooleanTypeComputer.java      |   44 +
 .../impl/OptionalACircleTypeComputer.java       |   44 +
 .../impl/OptionalADateTimeTypeComputer.java     |   44 +
 .../impl/OptionalADateTypeComputer.java         |   44 +
 .../OptionalADayTimeDurationTypeComputer.java   |   48 +
 .../impl/OptionalADoubleTypeComputer.java       |   43 +
 .../impl/OptionalADurationTypeComputer.java     |   44 +
 .../impl/OptionalAFloatTypeComputer.java        |   44 +
 .../impl/OptionalAInt16TypeComputer.java        |   44 +
 .../impl/OptionalAInt32TypeComputer.java        |   44 +
 .../impl/OptionalAInt64TypeComputer.java        |   44 +
 .../impl/OptionalAInt8TypeComputer.java         |   44 +
 .../impl/OptionalAIntervalTypeComputer.java     |   44 +
 .../impl/OptionalALineTypeComputer.java         |   44 +
 .../impl/OptionalAPoint3DTypeComputer.java      |   44 +
 .../impl/OptionalAPointTypeComputer.java        |   44 +
 .../impl/OptionalAPolygonTypeComputer.java      |   44 +
 .../impl/OptionalARectangleTypeComputer.java    |   44 +
 .../impl/OptionalAStringTypeComputer.java       |   44 +
 .../OptionalATemporalInstanceTypeComputer.java  |   50 +
 .../impl/OptionalATimeTypeComputer.java         |   44 +
 .../OptionalAYearMonthDurationTypeComputer.java |   48 +
 .../impl/OrderedListConstructorResultType.java  |   65 +
 .../impl/OrderedListOfAInt32TypeComputer.java   |   38 +
 .../impl/OrderedListOfAInt64TypeComputer.java   |   38 +
 .../OrderedListOfAIntervalTypeComputer.java     |   39 +
 .../impl/OrderedListOfAPointTypeComputer.java   |   38 +
 .../impl/OrderedListOfAStringTypeComputer.java  |   38 +
 .../impl/OrderedListOfAnyTypeComputer.java      |   38 +
 .../QuadStringStringOrNullTypeComputer.java     |   40 +
 .../impl/RecordConstructorResultType.java       |   84 +
 .../impl/RecordMergeTypeComputer.java           |  168 +
 .../ScalarVersionOfAggregateResultType.java     |   57 +
 .../impl/SubsetCollectionTypeComputer.java      |   71 +
 .../impl/Substring2TypeComputer.java            |   70 +
 .../impl/SubstringTypeComputer.java             |   85 +
 .../TripleStringBoolOrNullTypeComputer.java     |   39 +
 .../TripleStringStringOrNullTypeComputer.java   |   38 +
 .../impl/TypeCompatibilityChecker.java          |   76 +
 .../UnaryBinaryInt64OrNullTypeComputer.java     |   63 +
 .../UnaryBooleanOrNullFunctionTypeComputer.java |   56 +
 .../UnaryStringInt64OrNullTypeComputer.java     |   74 +
 .../impl/UnaryStringOrNullTypeComputer.java     |   65 +
 .../UnorderedListConstructorResultType.java     |   65 +
 .../asterix/om/types/AOrderedListType.java      |   80 +
 .../apache/asterix/om/types/ARecordType.java    |  625 ++++
 .../org/apache/asterix/om/types/ATypeTag.java   |  103 +
 .../org/apache/asterix/om/types/AUnionType.java |  161 +
 .../asterix/om/types/AUnorderedListType.java    |   80 +
 .../om/types/AbstractCollectionType.java        |   65 +
 .../asterix/om/types/AbstractComplexType.java   |   44 +
 .../apache/asterix/om/types/BuiltinType.java    |  904 ++++++
 .../asterix/om/types/EnumDeserializer.java      |   36 +
 .../org/apache/asterix/om/types/IAType.java     |   29 +
 .../asterix/om/types/IEnumSerializer.java       |   23 +
 .../org/apache/asterix/om/types/TypeHelper.java |   83 +
 .../apache/asterix/om/types/TypeHierarchy.java  |   40 +
 .../apache/asterix/om/types/TypeSignature.java  |   56 +
 .../apache/asterix/om/types/TypeTagUtil.java    |   83 +
 .../om/types/hierachy/ATypeHierarchy.java       |  804 +++++
 .../AbstractIntegerTypeConvertComputer.java     |  141 +
 .../DoubleToFloatTypeConvertComputer.java       |   44 +
 .../DoubleToInt16TypeConvertComputer.java       |   46 +
 .../DoubleToInt32TypeConvertComputer.java       |   46 +
 .../DoubleToInt64TypeConvertComputer.java       |   46 +
 .../DoubleToInt8TypeConvertComputer.java        |   46 +
 .../FloatToDoubleTypeConvertComputer.java       |   39 +
 .../FloatToInt16TypeConvertComputer.java        |   46 +
 .../FloatToInt32TypeConvertComputer.java        |   46 +
 .../FloatToInt64TypeConvertComputer.java        |   46 +
 .../FloatToInt8TypeConvertComputer.java         |   46 +
 .../om/types/hierachy/ITypeConvertComputer.java |   25 +
 .../IntegerToDoubleTypeConvertComputer.java     |   71 +
 .../IntegerToFloatTypeConvertComputer.java      |   71 +
 .../IntegerToInt16TypeConvertComputer.java      |   35 +
 .../IntegerToInt32TypeConvertComputer.java      |   34 +
 .../IntegerToInt64TypeConvertComputer.java      |   34 +
 .../IntegerToInt8TypeConvertComputer.java       |   35 +
 .../asterix/om/util/AsterixAppContextInfo.java  |  126 +
 .../om/util/AsterixClusterProperties.java       |  182 ++
 .../asterix/om/util/AsterixRuntimeUtil.java     |   52 +
 .../om/util/JSONDeserializerForTypes.java       |   91 +
 .../asterix/om/util/NonTaggedFormatUtil.java    |  235 ++
 .../util/ResettableByteArrayOutputStream.java   |   28 +
 .../om/util/container/IObjectFactory.java       |   30 +
 .../asterix/om/util/container/IObjectPool.java  |   36 +
 .../om/util/container/ListObjectPool.java       |   81 +
 .../apache/asterix/om/visitors/IOMVisitor.java  |  110 +
 .../om/visitors/OMPrintToStringVisitor.java     |  296 ++
 .../nontagged/printers/ABinaryPrinterTest.java  |   66 -
 .../om/util/JSONDeserializerForTypesTest.java   |   72 -
 .../nontagged/printers/ABinaryPrinterTest.java  |   66 +
 .../om/util/JSONDeserializerForTypesTest.java   |   72 +
 ...tractAggregateFunctionDynamicDescriptor.java |   27 -
 ...zableAggregateFunctionDynamicDescriptor.java |   28 -
 .../base/SingleFieldFrameTupleReference.java    |   61 -
 .../collections/ListifyAggregateDescriptor.java |   53 -
 .../ListifyAggregateFunctionEvalFactory.java    |   86 -
 .../AbstractScalarAggregateDescriptor.java      |   62 -
 .../scalar/GenericScalarAggregateFunction.java  |   56 -
 .../scalar/ScalarAvgAggregateDescriptor.java    |   36 -
 .../scalar/ScalarCountAggregateDescriptor.java  |   36 -
 .../scalar/ScalarMaxAggregateDescriptor.java    |   36 -
 .../scalar/ScalarMinAggregateDescriptor.java    |   36 -
 .../scalar/ScalarSqlAvgAggregateDescriptor.java |   36 -
 .../ScalarSqlCountAggregateDescriptor.java      |   36 -
 .../scalar/ScalarSqlMaxAggregateDescriptor.java |   36 -
 .../scalar/ScalarSqlMinAggregateDescriptor.java |   36 -
 .../scalar/ScalarSqlSumAggregateDescriptor.java |   36 -
 .../scalar/ScalarSumAggregateDescriptor.java    |   36 -
 ...bstractSerializableAvgAggregateFunction.java |  278 --
 ...tractSerializableCountAggregateFunction.java |  106 -
 ...bstractSerializableSumAggregateFunction.java |  221 --
 .../serializable/std/BufferSerDeUtil.java       |   81 -
 .../std/SerializableAvgAggregateDescriptor.java |   54 -
 .../std/SerializableAvgAggregateFunction.java   |   56 -
 .../SerializableCountAggregateDescriptor.java   |   57 -
 .../std/SerializableCountAggregateFunction.java |   27 -
 ...erializableGlobalAvgAggregateDescriptor.java |   55 -
 .../SerializableGlobalAvgAggregateFunction.java |   57 -
 ...alizableGlobalSqlAvgAggregateDescriptor.java |   55 -
 ...rializableGlobalSqlAvgAggregateFunction.java |   49 -
 ...zableIntermediateAvgAggregateDescriptor.java |   55 -
 ...lizableIntermediateAvgAggregateFunction.java |   57 -
 ...leIntermediateSqlAvgAggregateDescriptor.java |   55 -
 ...ableIntermediateSqlAvgAggregateFunction.java |   49 -
 ...SerializableLocalAvgAggregateDescriptor.java |   54 -
 .../SerializableLocalAvgAggregateFunction.java  |   57 -
 ...ializableLocalSqlAvgAggregateDescriptor.java |   54 -
 ...erializableLocalSqlAvgAggregateFunction.java |   49 -
 ...ializableLocalSqlSumAggregateDescriptor.java |   53 -
 ...SerializableLocalSumAggregateDescriptor.java |   53 -
 .../SerializableSqlAvgAggregateDescriptor.java  |   52 -
 .../SerializableSqlAvgAggregateFunction.java    |   48 -
 ...SerializableSqlCountAggregateDescriptor.java |   57 -
 .../SerializableSqlCountAggregateFunction.java  |   31 -
 .../SerializableSqlSumAggregateDescriptor.java  |   53 -
 .../SerializableSqlSumAggregateFunction.java    |   62 -
 .../std/SerializableSumAggregateDescriptor.java |   53 -
 .../std/SerializableSumAggregateFunction.java   |   70 -
 .../std/AbstractAvgAggregateFunction.java       |  257 --
 .../std/AbstractCountAggregateFunction.java     |   88 -
 .../std/AbstractMinMaxAggregateFunction.java    |  191 --
 .../std/AbstractSumAggregateFunction.java       |  211 --
 .../aggregates/std/AvgAggregateDescriptor.java  |   57 -
 .../aggregates/std/AvgAggregateFunction.java    |   55 -
 .../std/CountAggregateDescriptor.java           |   59 -
 .../aggregates/std/CountAggregateFunction.java  |   34 -
 .../std/GlobalAvgAggregateDescriptor.java       |   58 -
 .../std/GlobalAvgAggregateFunction.java         |   56 -
 .../std/GlobalSqlAvgAggregateDescriptor.java    |   58 -
 .../std/GlobalSqlAvgAggregateFunction.java      |   48 -
 .../std/IntermediateAvgAggregateDescriptor.java |   58 -
 .../std/IntermediateAvgAggregateFunction.java   |   56 -
 .../IntermediateSqlAvgAggregateDescriptor.java  |   57 -
 .../IntermediateSqlAvgAggregateFunction.java    |   47 -
 .../std/LocalAvgAggregateDescriptor.java        |   56 -
 .../std/LocalAvgAggregateFunction.java          |   56 -
 .../std/LocalMaxAggregateDescriptor.java        |   57 -
 .../std/LocalMinAggregateDescriptor.java        |   56 -
 .../std/LocalSqlAvgAggregateDescriptor.java     |   56 -
 .../std/LocalSqlAvgAggregateFunction.java       |   49 -
 .../std/LocalSqlMaxAggregateDescriptor.java     |   57 -
 .../std/LocalSqlMinAggregateDescriptor.java     |   56 -
 .../std/LocalSqlSumAggregateDescriptor.java     |   56 -
 .../std/LocalSumAggregateDescriptor.java        |   56 -
 .../aggregates/std/MaxAggregateDescriptor.java  |   55 -
 .../aggregates/std/MinAggregateDescriptor.java  |   55 -
 .../aggregates/std/MinMaxAggregateFunction.java |   59 -
 .../std/SqlAvgAggregateDescriptor.java          |   57 -
 .../aggregates/std/SqlAvgAggregateFunction.java |   47 -
 .../std/SqlCountAggregateDescriptor.java        |   59 -
 .../std/SqlCountAggregateFunction.java          |   34 -
 .../std/SqlMaxAggregateDescriptor.java          |   55 -
 .../std/SqlMinAggregateDescriptor.java          |   55 -
 .../std/SqlMinMaxAggregateFunction.java         |   53 -
 .../std/SqlSumAggregateDescriptor.java          |   55 -
 .../aggregates/std/SqlSumAggregateFunction.java |   61 -
 .../aggregates/std/SumAggregateDescriptor.java  |   55 -
 .../aggregates/std/SumAggregateFunction.java    |   67 -
 .../stream/EmptyStreamAggregateDescriptor.java  |  103 -
 .../NonEmptyStreamAggregateDescriptor.java      |  101 -
 .../runtime/base/AsterixTupleFilter.java        |   44 -
 .../runtime/base/AsterixTupleFilterFactory.java |   43 -
 .../accessors/CircleCenterAccessor.java         |  117 -
 .../accessors/CircleRadiusAccessor.java         |  112 -
 .../accessors/LineRectanglePolygonAccessor.java |  181 --
 .../accessors/PointXCoordinateAccessor.java     |  113 -
 .../accessors/PointYCoordinateAccessor.java     |  113 -
 .../accessors/TemporalDayAccessor.java          |  154 -
 .../accessors/TemporalHourAccessor.java         |  152 -
 .../accessors/TemporalIntervalEndAccessor.java  |  142 -
 .../TemporalIntervalEndDateAccessor.java        |  128 -
 .../TemporalIntervalEndDatetimeAccessor.java    |  127 -
 .../TemporalIntervalEndTimeAccessor.java        |  127 -
 .../TemporalIntervalStartAccessor.java          |  142 -
 .../TemporalIntervalStartDateAccessor.java      |  127 -
 .../TemporalIntervalStartDatetimeAccessor.java  |  127 -
 .../TemporalIntervalStartTimeAccessor.java      |  126 -
 .../accessors/TemporalMillisecondAccessor.java  |  153 -
 .../accessors/TemporalMinuteAccessor.java       |  153 -
 .../accessors/TemporalMonthAccessor.java        |  155 -
 .../accessors/TemporalSecondAccessor.java       |  153 -
 .../accessors/TemporalYearAccessor.java         |  166 -
 ...AbstractScalarFunctionDynamicDescriptor.java |   29 -
 .../common/AbstractAsterixListIterator.java     |  157 -
 .../common/AccessibleByteArrayEval.java         |   68 -
 .../evaluators/common/AsterixListAccessor.java  |  109 -
 .../common/AsterixOrderedListIterator.java      |   36 -
 .../common/AsterixUnorderedListIterator.java    |   36 -
 .../ClosedRecordConstructorEvalFactory.java     |   97 -
 .../evaluators/common/CreateMBREvalFactory.java |  332 --
 .../runtime/evaluators/common/DoubleArray.java  |   75 -
 .../common/EditDistanceCheckEvaluator.java      |  107 -
 .../common/EditDistanceContainsEvaluator.java   |   58 -
 .../common/EditDistanceEvaluator.java           |  161 -
 .../evaluators/common/FunctionManagerImpl.java  |   65 -
 .../evaluators/common/GramTokensEvaluator.java  |   94 -
 .../common/SimilarityFiltersCache.java          |   53 -
 .../common/SimilarityJaccardCheckEvaluator.java |  129 -
 .../common/SimilarityJaccardEvaluator.java      |  282 --
 .../SimilarityJaccardPrefixEvaluator.java       |  284 --
 .../SimilarityJaccardSortedCheckEvaluator.java  |   42 -
 .../SimilarityJaccardSortedEvaluator.java       |   42 -
 .../runtime/evaluators/common/SpatialUtils.java |  175 -
 .../evaluators/common/WordTokensEvaluator.java  |   65 -
 .../AbstractComparisonEvaluator.java            |  557 ----
 .../comparisons/ComparisonEvalFactory.java      |  379 ---
 ...BinaryBase64StringConstructorDescriptor.java |   49 -
 .../ABinaryHexStringConstructorDescriptor.java  |  110 -
 .../ABooleanConstructorDescriptor.java          |  111 -
 .../ACircleConstructorDescriptor.java           |  107 -
 .../ADateConstructorDescriptor.java             |  133 -
 .../ADateTimeConstructorDescriptor.java         |  129 -
 .../ADayTimeDurationConstructorDescriptor.java  |  114 -
 .../ADoubleConstructorDescriptor.java           |  217 --
 .../ADurationConstructorDescriptor.java         |  109 -
 .../AFloatConstructorDescriptor.java            |  219 --
 .../AInt16ConstructorDescriptor.java            |  126 -
 .../AInt32ConstructorDescriptor.java            |  125 -
 .../AInt64ConstructorDescriptor.java            |  127 -
 .../AInt8ConstructorDescriptor.java             |  126 -
 .../AIntervalFromDateConstructorDescriptor.java |  150 -
 ...tervalFromDateTimeConstructorDescriptor.java |  173 -
 .../AIntervalFromTimeConstructorDescriptor.java |  163 -
 ...ervalStartFromDateConstructorDescriptor.java |  180 -
 ...lStartFromDateTimeConstructorDescriptor.java |  193 --
 ...ervalStartFromTimeConstructorDescriptor.java |  191 --
 .../ALineConstructorDescriptor.java             |  110 -
 .../ANullConstructorDescriptor.java             |   98 -
 .../APoint3DConstructorDescriptor.java          |  106 -
 .../APointConstructorDescriptor.java            |  102 -
 .../APolygonConstructorDescriptor.java          |  103 -
 .../ARectangleConstructorDescriptor.java        |  117 -
 .../AStringConstructorDescriptor.java           |  176 -
 .../ATimeConstructorDescriptor.java             |  121 -
 .../AUUIDFromStringConstructorDescriptor.java   |  217 --
 ...AYearMonthDurationConstructorDescriptor.java |  114 -
 .../ClosedRecordConstructorDescriptor.java      |   52 -
 .../OpenRecordConstructorDescriptor.java        |  113 -
 .../functions/AbstractBinaryStringBoolEval.java |  132 -
 .../AbstractNumericArithmeticEval.java          |  526 ---
 .../functions/AbstractQuadStringStringEval.java |  157 -
 .../functions/AbstractStringContainsEval.java   |   99 -
 .../functions/AbstractTripleStringBoolEval.java |  139 -
 .../AbstractTripleStringStringEval.java         |  171 -
 .../evaluators/functions/AndDescriptor.java     |  110 -
 .../AnyCollectionMemberDescriptor.java          |  149 -
 .../evaluators/functions/BinaryHashMap.java     |  288 --
 .../functions/CastListDescriptor.java           |  104 -
 .../functions/CastRecordDescriptor.java         |   98 -
 .../functions/CodePointToStringDescriptor.java  |  177 -
 .../evaluators/functions/CodepointIterator.java |  101 -
 .../functions/ContainsDescriptor.java           |   95 -
 .../CountHashedGramTokensDescriptor.java        |   61 -
 .../CountHashedWordTokensDescriptor.java        |   61 -
 .../functions/CreateCircleDescriptor.java       |  117 -
 .../functions/CreateLineDescriptor.java         |  120 -
 .../functions/CreateMBRDescriptor.java          |   45 -
 .../functions/CreatePointDescriptor.java        |  117 -
 .../functions/CreatePolygonDescriptor.java      |  140 -
 .../functions/CreateRectangleDescriptor.java    |  125 -
 .../functions/CreateUUIDDescriptor.java         |   78 -
 .../functions/EditDistanceCheckDescriptor.java  |   53 -
 .../EditDistanceContainsDescriptor.java         |   53 -
 .../functions/EditDistanceDescriptor.java       |   54 -
 .../functions/EditDistanceListIsFilterable.java |  144 -
 .../EditDistanceStringIsFilterable.java         |  167 -
 .../functions/EmbedTypeDescriptor.java          |   71 -
 .../functions/EndsWithDescriptor.java           |   91 -
 .../functions/FlowRecordDescriptor.java         |   89 -
 .../evaluators/functions/FuzzyEqDescriptor.java |   45 -
 .../evaluators/functions/GetItemDescriptor.java |  159 -
 .../functions/GramTokensDescriptor.java         |   61 -
 .../functions/HashedGramTokensDescriptor.java   |   61 -
 .../functions/HashedWordTokensDescriptor.java   |   61 -
 .../functions/InjectFailureDescriptor.java      |   93 -
 .../evaluators/functions/IsNullDescriptor.java  |   82 -
 .../functions/IsSystemNullDescriptor.java       |   82 -
 .../evaluators/functions/LenDescriptor.java     |  124 -
 .../evaluators/functions/LikeDescriptor.java    |  200 --
 .../evaluators/functions/NotDescriptor.java     |  101 -
 .../evaluators/functions/NotNullDescriptor.java |   88 -
 .../functions/NumericAbsDescriptor.java         |  159 -
 .../functions/NumericAddDescriptor.java         |   64 -
 .../functions/NumericCaretDescriptor.java       |   76 -
 .../functions/NumericCeilingDescriptor.java     |  160 -
 .../functions/NumericDivideDescriptor.java      |   60 -
 .../functions/NumericFloorDescriptor.java       |  159 -
 .../functions/NumericModuloDescriptor.java      |  195 --
 .../functions/NumericMultiplyDescriptor.java    |   72 -
 .../functions/NumericRoundDescriptor.java       |  161 -
 .../NumericRoundHalfToEven2Descriptor.java      |  198 --
 .../NumericRoundHalfToEvenDescriptor.java       |  162 -
 .../functions/NumericSubDescriptor.java         |   77 -
 .../functions/NumericSubtractDescriptor.java    |  193 --
 .../functions/NumericUnaryMinusDescriptor.java  |  149 -
 .../evaluators/functions/OrDescriptor.java      |  113 -
 .../OrderedListConstructorDescriptor.java       |  139 -
 .../functions/PrefixLenDescriptor.java          |  138 -
 .../functions/PrefixLenJaccardDescriptor.java   |  127 -
 .../evaluators/functions/RegExpDescriptor.java  |  168 -
 .../functions/SimilarityDescriptor.java         |  282 --
 .../SimilarityJaccardCheckDescriptor.java       |   53 -
 .../functions/SimilarityJaccardDescriptor.java  |   54 -
 .../SimilarityJaccardPrefixCheckDescriptor.java |  107 -
 .../SimilarityJaccardPrefixDescriptor.java      |   54 -
 .../SimilarityJaccardSortedCheckDescriptor.java |   54 -
 .../SimilarityJaccardSortedDescriptor.java      |   55 -
 .../functions/SpatialAreaDescriptor.java        |  143 -
 .../functions/SpatialCellDescriptor.java        |  156 -
 .../functions/SpatialDistanceDescriptor.java    |  126 -
 .../functions/SpatialIntersectDescriptor.java   | 1108 -------
 .../functions/StartsWithDescriptor.java         |   84 -
 .../functions/StringConcatDescriptor.java       |  139 -
 .../functions/StringEndWithDescrtiptor.java     |   88 -
 .../functions/StringEqualDescriptor.java        |   83 -
 .../functions/StringEvaluatorUtils.java         |   49 -
 .../functions/StringJoinDescriptor.java         |  138 -
 .../functions/StringLengthDescriptor.java       |  103 -
 .../functions/StringLowerCaseDescriptor.java    |  112 -
 .../functions/StringMatchesDescriptor.java      |  122 -
 .../StringMatchesWithFlagDescriptor.java        |  150 -
 .../functions/StringReplaceDescriptor.java      |  152 -
 .../StringReplaceWithFlagsDescriptor.java       |  174 -
 .../functions/StringStartWithDescrtiptor.java   |   83 -
 .../functions/StringToCodePointDescriptor.java  |  146 -
 .../functions/StringUpperCaseDescriptor.java    |  112 -
 .../functions/Substring2Descriptor.java         |  122 -
 .../functions/SubstringAfterDescriptor.java     |  128 -
 .../functions/SubstringBeforeDescriptor.java    |  125 -
 .../functions/SubstringDescriptor.java          |  182 --
 .../functions/SwitchCaseDescriptor.java         |  120 -
 .../UnorderedListConstructorDescriptor.java     |  138 -
 .../functions/WordTokensDescriptor.java         |   61 -
 .../functions/binary/AbstractCopyEvaluator.java |  105 -
 .../binary/BinaryConcatDescriptor.java          |  107 -
 .../binary/BinaryLengthDescriptor.java          |   82 -
 .../functions/binary/FindBinaryDescriptor.java  |  147 -
 .../binary/FindBinaryFromDescriptor.java        |   70 -
 .../functions/binary/ParseBinaryDescriptor.java |  124 -
 .../functions/binary/PrintBinaryDescriptor.java |  106 -
 .../binary/SubBinaryFromDescriptor.java         |   57 -
 .../binary/SubBinaryFromToDescriptor.java       |  128 -
 .../records/FieldAccessByIndexDescriptor.java   |   50 -
 .../records/FieldAccessByIndexEvalFactory.java  |  137 -
 .../records/FieldAccessByNameDescriptor.java    |   43 -
 .../records/FieldAccessByNameEvalFactory.java   |  111 -
 .../records/FieldAccessNestedDescriptor.java    |   54 -
 .../records/FieldAccessNestedEvalFactory.java   |   68 -
 .../functions/records/FieldAccessUtil.java      |  193 --
 .../records/GetRecordFieldValueDescriptor.java  |   50 -
 .../records/GetRecordFieldValueEvalFactory.java |   98 -
 .../records/GetRecordFieldsDescriptor.java      |   50 -
 .../records/GetRecordFieldsEvalFactory.java     |  101 -
 .../functions/records/RecordFieldsUtil.java     |  314 --
 .../records/RecordMergeDescriptor.java          |  257 --
 .../AbstractIntervalLogicFuncDescriptor.java    |  122 -
 .../AdjustDateTimeForTimeZoneDescriptor.java    |  146 -
 .../AdjustTimeForTimeZoneDescriptor.java        |  146 -
 .../CalendarDuartionFromDateDescriptor.java     |  236 --
 .../CalendarDurationFromDateTimeDescriptor.java |  250 --
 .../temporal/CurrentDateDescriptor.java         |   87 -
 .../temporal/CurrentDateTimeDescriptor.java     |   91 -
 .../temporal/CurrentTimeDescriptor.java         |   93 -
 .../temporal/DateFromDatetimeDescriptor.java    |  123 -
 .../DateFromUnixTimeInDaysDescriptor.java       |  105 -
 .../DatetimeFromDateAndTimeDescriptor.java      |  141 -
 .../DatetimeFromUnixTimeInMsDescriptor.java     |  135 -
 .../DatetimeFromUnixTimeInSecsDescriptor.java   |  136 -
 .../functions/temporal/DayOfWeekDescriptor.java |  150 -
 .../DayTimeDurationComparatorDescriptor.java    |  149 -
 .../temporal/DurationEqualDescriptor.java       |  130 -
 .../DurationFromIntervalDescriptor.java         |  126 -
 .../DurationFromMillisecondsDescriptor.java     |  135 -
 .../temporal/DurationFromMonthsDescriptor.java  |  112 -
 .../temporal/GetDayTimeDurationDescriptor.java  |  121 -
 .../GetOverlappingIntervalDescriptor.java       |  143 -
 .../GetYearMonthDurationDescriptor.java         |  121 -
 .../temporal/IntervalAfterDescriptor.java       |   51 -
 .../temporal/IntervalBeforeDescriptor.java      |   48 -
 .../temporal/IntervalBinDescriptor.java         |  282 --
 .../temporal/IntervalCoveredByDescriptor.java   |   51 -
 .../temporal/IntervalCoversDescriptor.java      |   51 -
 .../temporal/IntervalEndedByDescriptor.java     |   51 -
 .../temporal/IntervalEndsDecriptor.java         |   51 -
 .../functions/temporal/IntervalLogic.java       |  161 -
 .../temporal/IntervalMeetsDescriptor.java       |   51 -
 .../temporal/IntervalMetByDescriptor.java       |   51 -
 .../IntervalOverlappedByDescriptor.java         |   51 -
 .../temporal/IntervalOverlapsDescriptor.java    |   51 -
 .../temporal/IntervalStartedByDescriptor.java   |   51 -
 .../temporal/IntervalStartsDescriptor.java      |   51 -
 ...llisecondsFromDayTimeDurationDescriptor.java |  121 -
 .../MonthsFromYearMonthDurationDescriptor.java  |  122 -
 .../temporal/OverlapBinsDescriptor.java         |  338 --
 .../functions/temporal/OverlapDescriptor.java   |   51 -
 .../functions/temporal/ParseDateDescriptor.java |  168 -
 .../temporal/ParseDateTimeDescriptor.java       |  162 -
 .../functions/temporal/ParseTimeDescriptor.java |  162 -
 .../functions/temporal/PrintDateDescriptor.java |  135 -
 .../temporal/PrintDateTimeDescriptor.java       |  132 -
 .../functions/temporal/PrintTimeDescriptor.java |  133 -
 .../temporal/TimeFromDatetimeDescriptor.java    |  128 -
 .../TimeFromUnixTimeInMsDescriptor.java         |  110 -
 .../YearMonthDurationComparatorDecriptor.java   |  149 -
 .../ExternalBTreeSearchOperatorDescriptor.java  |   56 -
 ...ExternalBTreeSearchOperatorNodePushable.java |   87 -
 .../ExternalRTreeSearchOperatorDescriptor.java  |   53 -
 ...ExternalRTreeSearchOperatorNodePushable.java |   86 -
 .../asterix/runtime/formats/FormatUtils.java    |   23 -
 .../runtime/formats/NonTaggedDataFormat.java    | 1201 -------
 .../job/listener/JobEventListenerFactory.java   |   73 -
 .../runtime/operators/file/ADMDataParser.java   | 1099 -------
 .../operators/file/AbstractDataParser.java      |  351 --
 .../operators/file/AbstractTupleParser.java     |   77 -
 .../file/AsterixTupleParserFactory.java         |  268 --
 .../file/CounterTimerTupleForwardPolicy.java    |  154 -
 .../operators/file/DelimitedDataParser.java     |  172 -
 .../file/FrameFullTupleForwardPolicy.java       |   72 -
 .../runtime/operators/file/IDataParser.java     |   55 -
 .../file/RateContolledParserPolicy.java         |   93 -
 .../file/RateControlledTupleForwardPolicy.java  |   84 -
 .../std/FlushDatasetOperatorDescriptor.java     |   89 -
 .../std/NoTupleSourceRuntimeFactory.java        |   45 -
 ...nningAggregateFunctionDynamicDescriptor.java |   26 -
 .../std/TidRunningAggregateDescriptor.java      |   95 -
 ...tractUnnestingFunctionDynamicDescriptor.java |   26 -
 .../unnestingfunctions/std/RangeDescriptor.java |  116 -
 .../std/ScanCollectionDescriptor.java           |  120 -
 .../std/SubsetCollectionDescriptor.java         |  162 -
 ...tractAggregateFunctionDynamicDescriptor.java |   27 +
 ...zableAggregateFunctionDynamicDescriptor.java |   28 +
 .../base/SingleFieldFrameTupleReference.java    |   61 +
 .../collections/ListifyAggregateDescriptor.java |   53 +
 .../ListifyAggregateFunctionEvalFactory.java    |   86 +
 .../AbstractScalarAggregateDescriptor.java      |   62 +
 .../scalar/GenericScalarAggregateFunction.java  |   56 +
 .../scalar/ScalarAvgAggregateDescriptor.java    |   36 +
 .../scalar/ScalarCountAggregateDescriptor.java  |   36 +
 .../scalar/ScalarMaxAggregateDescriptor.java    |   36 +
 .../scalar/ScalarMinAggregateDescriptor.java    |   36 +
 .../scalar/ScalarSqlAvgAggregateDescriptor.java |   36 +
 .../ScalarSqlCountAggregateDescriptor.java      |   36 +
 .../scalar/ScalarSqlMaxAggregateDescriptor.java |   36 +
 .../scalar/ScalarSqlMinAggregateDescriptor.java |   36 +
 .../scalar/ScalarSqlSumAggregateDescriptor.java |   36 +
 .../scalar/ScalarSumAggregateDescriptor.java    |   36 +
 ...bstractSerializableAvgAggregateFunction.java |  278 ++
 ...tractSerializableCountAggregateFunction.java |  106 +
 ...bstractSerializableSumAggregateFunction.java |  221 ++
 .../serializable/std/BufferSerDeUtil.java       |   81 +
 .../std/SerializableAvgAggregateDescriptor.java |   54 +
 .../std/SerializableAvgAggregateFunction.java   |   56 +
 .../SerializableCountAggregateDescriptor.java   |   57 +
 .../std/SerializableCountAggregateFunction.java |   27 +
 ...erializableGlobalAvgAggregateDescriptor.java |   55 +
 .../SerializableGlobalAvgAggregateFunction.java |   57 +
 ...alizableGlobalSqlAvgAggregateDescriptor.java |   55 +
 ...rializableGlobalSqlAvgAggregateFunction.java |   49 +
 ...zableIntermediateAvgAggregateDescriptor.java |   55 +
 ...lizableIntermediateAvgAggregateFunction.java |   57 +
 ...leIntermediateSqlAvgAggregateDescriptor.java |   55 +
 ...ableIntermediateSqlAvgAggregateFunction.java |   49 +
 ...SerializableLocalAvgAggregateDescriptor.java |   54 +
 .../SerializableLocalAvgAggregateFunction.java  |   57 +
 ...ializableLocalSqlAvgAggregateDescriptor.java |   54 +
 ...erializableLocalSqlAvgAggregateFunction.java |   49 +
 ...ializableLocalSqlSumAggregateDescriptor.java |   53 +
 ...SerializableLocalSumAggregateDescriptor.java |   53 +
 .../SerializableSqlAvgAggregateDescriptor.java  |   52 +
 .../SerializableSqlAvgAggregateFunction.java    |   48 +
 ...SerializableSqlCountAggregateDescriptor.java |   57 +
 .../SerializableSqlCountAggregateFunction.java  |   31 +
 .../SerializableSqlSumAggregateDescriptor.java  |   53 +
 .../SerializableSqlSumAggregateFunction.java    |   62 +
 .../std/SerializableSumAggregateDescriptor.java |   53 +
 .../std/SerializableSumAggregateFunction.java   |   70 +
 .../std/AbstractAvgAggregateFunction.java       |  257 ++
 .../std/AbstractCountAggregateFunction.java     |   88 +
 .../std/AbstractMinMaxAggregateFunction.java    |  191 ++
 .../std/AbstractSumAggregateFunction.java       |  211 ++
 .../aggregates/std/AvgAggregateDescriptor.java  |   57 +
 .../aggregates/std/AvgAggregateFunction.java    |   55 +
 .../std/CountAggregateDescriptor.java           |   59 +
 .../aggregates/std/CountAggregateFunction.java  |   34 +
 .../std/GlobalAvgAggregateDescriptor.java       |   58 +
 .../std/GlobalAvgAggregateFunction.java         |   56 +
 .../std/GlobalSqlAvgAggregateDescriptor.java    |   58 +
 .../std/GlobalSqlAvgAggregateFunction.java      |   48 +
 .../std/IntermediateAvgAggregateDescriptor.java |   58 +
 .../std/IntermediateAvgAggregateFunction.java   |   56 +
 .../IntermediateSqlAvgAggregateDescriptor.java  |   57 +
 .../IntermediateSqlAvgAggregateFunction.java    |   47 +
 .../std/LocalAvgAggregateDescriptor.java        |   56 +
 .../std/LocalAvgAggregateFunction.java          |   56 +
 .../std/LocalMaxAggregateDescriptor.java        |   57 +
 .../std/LocalMinAggregateDescriptor.java        |   56 +
 .../std/LocalSqlAvgAggregateDescriptor.java     |   56 +
 .../std/LocalSqlAvgAggregateFunction.java       |   49 +
 .../std/LocalSqlMaxAggregateDescriptor.java     |   57 +
 .../std/LocalSqlMinAggregateDescriptor.java     |   56 +
 .../std/LocalSqlSumAggregateDescriptor.java     |   56 +
 .../std/LocalSumAggregateDescriptor.java        |   56 +
 .../aggregates/std/MaxAggregateDescriptor.java  |   55 +
 .../aggregates/std/MinAggregateDescriptor.java  |   55 +
 .../aggregates/std/MinMaxAggregateFunction.java |   59 +
 .../std/SqlAvgAggregateDescriptor.java          |   57 +
 .../aggregates/std/SqlAvgAggregateFunction.java |   47 +
 .../std/SqlCountAggregateDescriptor.java        |   59 +
 .../std/SqlCountAggregateFunction.java          |   34 +
 .../std/SqlMaxAggregateDescriptor.java          |   55 +
 .../std/SqlMinAggregateDescriptor.java          |   55 +
 .../std/SqlMinMaxAggregateFunction.java         |   53 +
 .../std/SqlSumAggregateDescriptor.java          |   55 +
 .../aggregates/std/SqlSumAggregateFunction.java |   61 +
 .../aggregates/std/SumAggregateDescriptor.java  |   55 +
 .../aggregates/std/SumAggregateFunction.java    |   67 +
 .../stream/EmptyStreamAggregateDescriptor.java  |  103 +
 .../NonEmptyStreamAggregateDescriptor.java      |  101 +
 .../runtime/base/AsterixTupleFilter.java        |   44 +
 .../runtime/base/AsterixTupleFilterFactory.java |   43 +
 .../accessors/CircleCenterAccessor.java         |  117 +
 .../accessors/CircleRadiusAccessor.java         |  112 +
 .../accessors/LineRectanglePolygonAccessor.java |  181 ++
 .../accessors/PointXCoordinateAccessor.java     |  113 +
 .../accessors/PointYCoordinateAccessor.java     |  113 +
 .../accessors/TemporalDayAccessor.java          |  154 +
 .../accessors/TemporalHourAccessor.java         |  152 +
 .../accessors/TemporalIntervalEndAccessor.java  |  142 +
 .../TemporalIntervalEndDateAccessor.java        |  128 +
 .../TemporalIntervalEndDatetimeAccessor.java    |  127 +
 .../TemporalIntervalEndTimeAccessor.java        |  127 +
 .../TemporalIntervalStartAccessor.java          |  142 +
 .../TemporalIntervalStartDateAccessor.java      |  127 +
 .../TemporalIntervalStartDatetimeAccessor.java  |  127 +
 .../TemporalIntervalStartTimeAccessor.java      |  126 +
 .../accessors/TemporalMillisecondAccessor.java  |  153 +
 .../accessors/TemporalMinuteAccessor.java       |  153 +
 .../accessors/TemporalMonthAccessor.java        |  155 +
 .../accessors/TemporalSecondAccessor.java       |  153 +
 .../accessors/TemporalYearAccessor.java         |  166 +
 ...AbstractScalarFunctionDynamicDescriptor.java |   29 +
 .../common/AbstractAsterixListIterator.java     |  157 +
 .../common/AccessibleByteArrayEval.java         |   68 +
 .../evaluators/common/AsterixListAccessor.java  |  109 +
 .../common/AsterixOrderedListIterator.java      |   36 +
 .../common/AsterixUnorderedListIterator.java    |   36 +
 .../ClosedRecordConstructorEvalFactory.java     |   97 +
 .../evaluators/common/CreateMBREvalFactory.java |  332 ++
 .../runtime/evaluators/common/DoubleArray.java  |   75 +
 .../common/EditDistanceCheckEvaluator.java      |  107 +
 .../common/EditDistanceContainsEvaluator.java   |   58 +
 .../common/EditDistanceEvaluator.java           |  161 +
 .../evaluators/common/FunctionManagerImpl.java  |   65 +
 .../evaluators/common/GramTokensEvaluator.java  |   94 +
 .../common/SimilarityFiltersCache.java          |   53 +
 .../common/SimilarityJaccardCheckEvaluator.java |  129 +
 .../common/SimilarityJaccardEvaluator.java      |  282 ++
 .../SimilarityJaccardPrefixEvaluator.java       |  284 ++
 .../SimilarityJaccardSortedCheckEvaluator.java  |   42 +
 .../SimilarityJaccardSortedEvaluator.java       |   42 +
 .../runtime/evaluators/common/SpatialUtils.java |  175 +
 .../evaluators/common/WordTokensEvaluator.java  |   65 +
 .../AbstractComparisonEvaluator.java            |  557 ++++
 .../comparisons/ComparisonEvalFactory.java      |  379 +++
 ...BinaryBase64StringConstructorDescriptor.java |   49 +
 .../ABinaryHexStringConstructorDescriptor.java  |  110 +
 .../ABooleanConstructorDescriptor.java          |  111 +
 .../ACircleConstructorDescriptor.java           |  107 +
 .../ADateConstructorDescriptor.java             |  133 +
 .../ADateTimeConstructorDescriptor.java         |  129 +
 .../ADayTimeDurationConstructorDescriptor.java  |  114 +
 .../ADoubleConstructorDescriptor.java           |  217 ++
 .../ADurationConstructorDescriptor.java         |  109 +
 .../AFloatConstructorDescriptor.java            |  219 ++
 .../AInt16ConstructorDescriptor.java            |  126 +
 .../AInt32ConstructorDescriptor.java            |  125 +
 .../AInt64ConstructorDescriptor.java            |  127 +
 .../AInt8ConstructorDescriptor.java             |  126 +
 .../AIntervalFromDateConstructorDescriptor.java |  150 +
 ...tervalFromDateTimeConstructorDescriptor.java |  173 +
 .../AIntervalFromTimeConstructorDescriptor.java |  163 +
 ...ervalStartFromDateConstructorDescriptor.java |  180 +
 ...lStartFromDateTimeConstructorDescriptor.java |  193 ++
 ...ervalStartFromTimeConstructorDescriptor.java |  191 ++
 .../ALineConstructorDescriptor.java             |  110 +
 .../ANullConstructorDescriptor.java             |   98 +
 .../APoint3DConstructorDescriptor.java          |  106 +
 .../APointConstructorDescriptor.java            |  102 +
 .../APolygonConstructorDescriptor.java          |  103 +
 .../ARectangleConstructorDescriptor.java        |  117 +
 .../AStringConstructorDescriptor.java           |  176 +
 .../ATimeConstructorDescriptor.java             |  121 +
 .../AUUIDFromStringConstructorDescriptor.java   |  217 ++
 ...AYearMonthDurationConstructorDescriptor.java |  114 +
 .../ClosedRecordConstructorDescriptor.java      |   52 +
 .../OpenRecordConstructorDescriptor.java        |  113 +
 .../functions/AbstractBinaryStringBoolEval.java |  132 +
 .../AbstractNumericArithmeticEval.java          |  526 +++
 .../functions/AbstractQuadStringStringEval.java |  157 +
 .../functions/AbstractStringContainsEval.java   |   99 +
 .../functions/AbstractTripleStringBoolEval.java |  139 +
 .../AbstractTripleStringStringEval.java         |  171 +
 .../evaluators/functions/AndDescriptor.java     |  110 +
 .../AnyCollectionMemberDescriptor.java          |  149 +
 .../evaluators/functions/BinaryHashMap.java     |  288 ++
 .../functions/CastListDescriptor.java           |  104 +
 .../functions/CastRecordDescriptor.java         |   98 +
 .../functions/CodePointToStringDescriptor.java  |  177 +
 .../evaluators/functions/CodepointIterator.java |  101 +
 .../functions/ContainsDescriptor.java           |   95 +
 .../CountHashedGramTokensDescriptor.java        |   61 +
 .../CountHashedWordTokensDescriptor.java        |   61 +
 .../functions/CreateCircleDescriptor.java       |  117 +
 .../functions/CreateLineDescriptor.java         |  120 +
 .../functions/CreateMBRDescriptor.java          |   45 +
 .../functions/CreatePointDescriptor.java        |  117 +
 .../functions/CreatePolygonDescriptor.java      |  140 +
 .../functions/CreateRectangleDescriptor.java    |  125 +
 .../functions/CreateUUIDDescriptor.java         |   78 +
 .../functions/EditDistanceCheckDescriptor.java  |   53 +
 .../EditDistanceContainsDescriptor.java         |   53 +
 .../functions/EditDistanceDescriptor.java       |   54 +
 .../functions/EditDistanceListIsFilterable.java |  144 +
 .../EditDistanceStringIsFilterable.java         |  167 +
 .../functions/EmbedTypeDescriptor.java          |   71 +
 .../functions/EndsWithDescriptor.java           |   91 +
 .../functions/FlowRecordDescriptor.java         |   89 +
 .../evaluators/functions/FuzzyEqDescriptor.java |   45 +
 .../evaluators/functions/GetItemDescriptor.java |  159 +
 .../functions/GramTokensDescriptor.java         |   61 +
 .../functions/HashedGramTokensDescriptor.java   |   61 +
 .../functions/HashedWordTokensDescriptor.java   |   61 +
 .../functions/InjectFailureDescriptor.java      |   93 +
 .../evaluators/functions/IsNullDescriptor.java  |   82 +
 .../functions/IsSystemNullDescriptor.java       |   82 +
 .../evaluators/functions/LenDescriptor.java     |  124 +
 .../evaluators/functions/LikeDescriptor.java    |  200 ++
 .../evaluators/functions/NotDescriptor.java     |  101 +
 .../evaluators/functions/NotNullDescriptor.java |   88 +
 .../functions/NumericAbsDescriptor.java         |  159 +
 .../functions/NumericAddDescriptor.java         |   64 +
 .../functions/NumericCaretDescriptor.java       |   76 +
 .../functions/NumericCeilingDescriptor.java     |  160 +
 .../functions/NumericDivideDescriptor.java      |   60 +
 .../functions/NumericFloorDescriptor.java       |  159 +
 .../functions/NumericModuloDescriptor.java      |  195 ++
 .../functions/NumericMultiplyDescriptor.java    |   72 +
 .../functions/NumericRoundDescriptor.java       |  161 +
 .../NumericRoundHalfToEven2Descriptor.java      |  198 ++
 .../NumericRoundHalfToEvenDescriptor.java       |  162 +
 .../functions/NumericSubDescriptor.java         |   77 +
 .../functions/NumericSubtractDescriptor.java    |  193 ++
 .../functions/NumericUnaryMinusDescriptor.java  |  149 +
 .../evaluators/functions/OrDescriptor.java      |  113 +
 .../OrderedListConstructorDescriptor.java       |  139 +
 .../functions/PrefixLenDescriptor.java          |  138 +
 .../functions/PrefixLenJaccardDescriptor.java   |  127 +
 .../evaluators/functions/RegExpDescriptor.java  |  168 +
 .../functions/SimilarityDescriptor.java         |  282 ++
 .../SimilarityJaccardCheckDescriptor.java       |   53 +
 .../functions/SimilarityJaccardDescriptor.java  |   54 +
 .../SimilarityJaccardPrefixCheckDescriptor.java |  107 +
 .../SimilarityJaccardPrefixDescriptor.java      |   54 +
 .../SimilarityJaccardSortedCheckDescriptor.java |   54 +
 .../SimilarityJaccardSortedDescriptor.java      |   55 +
 .../functions/SpatialAreaDescriptor.java        |  143 +
 .../functions/SpatialCellDescriptor.java        |  156 +
 .../functions/SpatialDistanceDescriptor.java    |  126 +
 .../functions/SpatialIntersectDescriptor.java   | 1108 +++++++
 .../functions/StartsWithDescriptor.java         |   84 +
 .../functions/StringConcatDescriptor.java       |  139 +
 .../functions/StringEndWithDescrtiptor.java     |   88 +
 .../functions/StringEqualDescriptor.java        |   83 +
 .../functions/StringEvaluatorUtils.java         |   49 +
 .../functions/StringJoinDescriptor.java         |  138 +
 .../functions/StringLengthDescriptor.java       |  103 +
 .../functions/StringLowerCaseDescriptor.java    |  112 +
 .../functions/StringMatchesDescriptor.java      |  122 +
 .../StringMatchesWithFlagDescriptor.java        |  150 +
 .../functions/StringReplaceDescriptor.java      |  152 +
 .../StringReplaceWithFlagsDescriptor.java       |  174 +
 .../functions/StringStartWithDescrtiptor.java   |   83 +
 .../functions/StringToCodePointDescriptor.java  |  146 +
 .../functions/StringUpperCaseDescriptor.java    |  112 +
 .../functions/Substring2Descriptor.java         |  122 +
 .../functions/SubstringAfterDescriptor.java     |  128 +
 .../functions/SubstringBeforeDescriptor.java    |  125 +
 .../functions/SubstringDescriptor.java          |  182 ++
 .../functions/SwitchCaseDescriptor.java         |  120 +
 .../UnorderedListConstructorDescriptor.java     |  138 +
 .../functions/WordTokensDescriptor.java         |   61 +
 .../functions/binary/AbstractCopyEvaluator.java |  105 +
 .../binary/BinaryConcatDescriptor.java          |  107 +
 .../binary/BinaryLengthDescriptor.java          |   82 +
 .../functions/binary/FindBinaryDescriptor.java  |  147 +
 .../binary/FindBinaryFromDescriptor.java        |   70 +
 .../functions/binary/ParseBinaryDescriptor.java |  124 +
 .../functions/binary/PrintBinaryDescriptor.java |  106 +
 .../binary/SubBinaryFromDescriptor.java         |   57 +
 .../binary/SubBinaryFromToDescriptor.java       |  128 +
 .../records/FieldAccessByIndexDescriptor.java   |   50 +
 .../records/FieldAccessByIndexEvalFactory.java  |  137 +
 .../records/FieldAccessByNameDescriptor.java    |   43 +
 .../records/FieldAccessByNameEvalFactory.java   |  111 +
 .../records/FieldAccessNestedDescriptor.java    |   54 +
 .../records/FieldAccessNestedEvalFactory.java   |   68 +
 .../functions/records/FieldAccessUtil.java      |  193 ++
 .../records/GetRecordFieldValueDescriptor.java  |   50 +
 .../records/GetRecordFieldValueEvalFactory.java |   98 +
 .../records/GetRecordFieldsDescriptor.java      |   50 +
 .../records/GetRecordFieldsEvalFactory.java     |  101 +
 .../functions/records/RecordFieldsUtil.java     |  314 ++
 .../records/RecordMergeDescriptor.java          |  257 ++
 .../AbstractIntervalLogicFuncDescriptor.java    |  122 +
 .../AdjustDateTimeForTimeZoneDescriptor.java    |  146 +
 .../AdjustTimeForTimeZoneDescriptor.java        |  146 +
 .../CalendarDuartionFromDateDescriptor.java     |  236 ++
 .../CalendarDurationFromDateTimeDescriptor.java |  250 ++
 .../temporal/CurrentDateDescriptor.java         |   87 +
 .../temporal/CurrentDateTimeDescriptor.java     |   91 +
 .../temporal/CurrentTimeDescriptor.java         |   93 +
 .../temporal/DateFromDatetimeDescriptor.java    |  123 +
 .../DateFromUnixTimeInDaysDescriptor.java       |  105 +
 .../DatetimeFromDateAndTimeDescriptor.java      |  141 +
 .../DatetimeFromUnixTimeInMsDescriptor.java     |  135 +
 .../DatetimeFromUnixTimeInSecsDescriptor.java   |  136 +
 .../functions/temporal/DayOfWeekDescriptor.java |  150 +
 .../DayTimeDurationComparatorDescriptor.java    |  149 +
 .../temporal/DurationEqualDescriptor.java       |  130 +
 .../DurationFromIntervalDescriptor.java         |  126 +
 .../DurationFromMillisecondsDescriptor.java     |  135 +
 .../temporal/DurationFromMonthsDescriptor.java  |  112 +
 .../temporal/GetDayTimeDurationDescriptor.java  |  121 +
 .../GetOverlappingIntervalDescriptor.java       |  143 +
 .../GetYearMonthDurationDescriptor.java         |  121 +
 .../temporal/IntervalAfterDescriptor.java       |   51 +
 .../temporal/IntervalBeforeDescriptor.java      |   48 +
 .../temporal/IntervalBinDescriptor.java         |  282 ++
 .../temporal/IntervalCoveredByDescriptor.java   |   51 +
 .../temporal/IntervalCoversDescriptor.java      |   51 +
 .../temporal/IntervalEndedByDescriptor.java     |   51 +
 .../temporal/IntervalEndsDecriptor.java         |   51 +
 .../functions/temporal/IntervalLogic.java       |  161 +
 .../temporal/IntervalMeetsDescriptor.java       |   51 +
 .../temporal/IntervalMetByDescriptor.java       |   51 +
 .../IntervalOverlappedByDescriptor.java         |   51 +
 .../temporal/IntervalOverlapsDescriptor.java    |   51 +
 .../temporal/IntervalStartedByDescriptor.java   |   51 +
 .../temporal/IntervalStartsDescriptor.java      |   51 +
 ...llisecondsFromDayTimeDurationDescriptor.java |  121 +
 .../MonthsFromYearMonthDurationDescriptor.java  |  122 +
 .../temporal/OverlapBinsDescriptor.java         |  338 ++
 .../functions/temporal/OverlapDescriptor.java   |   51 +
 .../functions/temporal/ParseDateDescriptor.java |  168 +
 .../temporal/ParseDateTimeDescriptor.java       |  162 +
 .../functions/temporal/ParseTimeDescriptor.java |  162 +
 .../functions/temporal/PrintDateDescriptor.java |  135 +
 .../temporal/PrintDateTimeDescriptor.java       |  132 +
 .../functions/temporal/PrintTimeDescriptor.java |  133 +
 .../temporal/TimeFromDatetimeDescriptor.java    |  128 +
 .../TimeFromUnixTimeInMsDescriptor.java         |  110 +
 .../YearMonthDurationComparatorDecriptor.java   |  149 +
 .../ExternalBTreeSearchOperatorDescriptor.java  |   56 +
 ...ExternalBTreeSearchOperatorNodePushable.java |   87 +
 .../ExternalRTreeSearchOperatorDescriptor.java  |   53 +
 ...ExternalRTreeSearchOperatorNodePushable.java |   86 +
 .../asterix/runtime/formats/FormatUtils.java    |   23 +
 .../runtime/formats/NonTaggedDataFormat.java    | 1201 +++++++
 .../job/listener/JobEventListenerFactory.java   |   73 +
 .../runtime/operators/file/ADMDataParser.java   | 1099 +++++++
 .../operators/file/AbstractDataParser.java      |  351 ++
 .../operators/file/AbstractTupleParser.java     |   77 +
 .../file/AsterixTupleParserFactory.java         |  268 ++
 .../file/CounterTimerTupleForwardPolicy.java    |  154 +
 .../operators/file/DelimitedDataParser.java     |  172 +
 .../file/FrameFullTupleForwardPolicy.java       |   72 +
 .../runtime/operators/file/IDataParser.java     |   55 +
 .../file/RateContolledParserPolicy.java         |   93 +
 .../file/RateControlledTupleForwardPolicy.java  |   84 +
 .../std/FlushDatasetOperatorDescriptor.java     |   89 +
 .../std/NoTupleSourceRuntimeFactory.java        |   45 +
 ...nningAggregateFunctionDynamicDescriptor.java |   26 +
 .../std/TidRunningAggregateDescriptor.java      |   95 +
 ...tractUnnestingFunctionDynamicDescriptor.java |   26 +
 .../unnestingfunctions/std/RangeDescriptor.java |  116 +
 .../std/ScanCollectionDescriptor.java           |  120 +
 .../std/SubsetCollectionDescriptor.java         |  162 +
 .../testframework/context/TestCaseContext.java  |  229 --
 .../testframework/context/TestFileContext.java  |   58 -
 .../testframework/xml/TestSuiteParser.java      |   29 -
 .../testframework/context/TestCaseContext.java  |  229 ++
 .../testframework/context/TestFileContext.java  |   58 +
 .../testframework/xml/TestSuiteParser.java      |   29 +
 .../asterix/tools/datagen/AdgClientDriver.java  |   48 -
 .../ics/asterix/tools/datagen/AdmDataGen.java   | 1015 ------
 .../asterix/tools/datagen/CustOrdDataGen.java   |  472 ---
 .../ics/asterix/tools/datagen/EventDataGen.java |  233 --
 .../tools/external/data/DataGenerator.java      | 1184 -------
 .../ExternalFilesIndexOperatorDescriptor.java   |  161 -
 .../tools/external/data/GULongIDGenerator.java  |   46 -
 .../external/data/GenericSocketFeedAdapter.java |  119 -
 .../data/GenericSocketFeedAdapterFactory.java   |  168 -
 .../RateControlledFileSystemBasedAdapter.java   |   65 -
 ...ControlledFileSystemBasedAdapterFactory.java |  127 -
 .../external/data/SocketClientAdapter.java      |  110 -
 .../data/SocketClientAdapterFactory.java        |   89 -
 .../tools/external/data/TweetGenerator.java     |  152 -
 .../data/TwitterFirehoseFeedAdapter.java        |  189 --
 .../data/TwitterFirehoseFeedAdapterFactory.java |  120 -
 .../ics/asterix/tools/tbltoadm/TblToAdm.java    |   93 -
 .../tools/translator/ADGenDmlTranslator.java    |   87 -
 .../asterix/tools/datagen/AdgClientDriver.java  |   48 +
 .../asterix/tools/datagen/AdmDataGen.java       | 1015 ++++++
 .../asterix/tools/datagen/CustOrdDataGen.java   |  472 +++
 .../asterix/tools/datagen/EventDataGen.java     |  233 ++
 .../tools/external/data/DataGenerator.java      | 1184 +++++++
 .../ExternalFilesIndexOperatorDescriptor.java   |  161 +
 .../tools/external/data/GULongIDGenerator.java  |   46 +
 .../external/data/GenericSocketFeedAdapter.java |  119 +
 .../data/GenericSocketFeedAdapterFactory.java   |  168 +
 .../RateControlledFileSystemBasedAdapter.java   |   65 +
 ...ControlledFileSystemBasedAdapterFactory.java |  127 +
 .../external/data/SocketClientAdapter.java      |  110 +
 .../data/SocketClientAdapterFactory.java        |   89 +
 .../tools/external/data/TweetGenerator.java     |  152 +
 .../data/TwitterFirehoseFeedAdapter.java        |  189 ++
 .../data/TwitterFirehoseFeedAdapterFactory.java |  120 +
 .../apache/asterix/tools/tbltoadm/TblToAdm.java |   93 +
 .../tools/translator/ADGenDmlTranslator.java    |   87 +
 .../ics/asterix/tools/test/AdmDataGenTest.java  |  211 --
 .../asterix/tools/test/AdmDataGenTest.java      |  211 ++
 ...tractIndexModificationOperationCallback.java |   72 -
 ...maryIndexInstantSearchOperationCallback.java |   72 -
 ...exInstantSearchOperationCallbackFactory.java |   52 -
 ...imaryIndexModificationOperationCallback.java |   60 -
 ...dexModificationOperationCallbackFactory.java |   71 -
 .../PrimaryIndexOperationTrackerProvider.java   |   41 -
 .../PrimaryIndexSearchOperationCallback.java    |   64 -
 ...maryIndexSearchOperationCallbackFactory.java |   52 -
 ...ndaryIndexModificationOperationCallback.java |   58 -
 ...dexModificationOperationCallbackFactory.java |   68 -
 .../SecondaryIndexOperationTrackerProvider.java |   41 -
 .../SecondaryIndexSearchOperationCallback.java  |   53 -
 ...daryIndexSearchOperationCallbackFactory.java |   32 -
 ...tasetIndexModificationOperationCallback.java |   51 -
 ...dexModificationOperationCallbackFactory.java |   67 -
 ...dexModificationOperationCallbackFactory.java |   67 -
 .../AbstractLSMLocalResourceMetadata.java       |   41 -
 .../ExternalBTreeLocalResourceMetadata.java     |   63 -
 ...rnalBTreeWithBuddyLocalResourceMetadata.java |   75 -
 .../ExternalRTreeLocalResourceMetadata.java     |   79 -
 .../resource/LSMBTreeLocalResourceMetadata.java |   85 -
 .../LSMInvertedIndexLocalResourceMetadata.java  |  123 -
 .../resource/LSMRTreeLocalResourceMetadata.java |  101 -
 .../PersistentLocalResourceFactory.java         |   35 -
 .../PersistentLocalResourceFactoryProvider.java |   36 -
 .../PersistentLocalResourceRepository.java      |  322 --
 ...ersistentLocalResourceRepositoryFactory.java |   33 -
 .../service/locking/ConcurrentLockManager.java  | 1231 -------
 .../service/locking/DatasetLockInfo.java        |  532 ---
 .../service/locking/DeadlockDetector.java       |  251 --
 .../service/locking/DummyLockManager.java       |   89 -
 .../service/locking/EntityInfoManager.java      |  726 -----
 .../service/locking/EntityLockInfoManager.java  |  823 -----
 .../service/locking/ILockHashTable.java         |   35 -
 .../management/service/locking/ILockMatrix.java |   44 -
 .../management/service/locking/Job.json         |   24 -
 .../management/service/locking/JobInfo.java     |  330 --
 .../management/service/locking/LockManager.java | 2285 -------------
 .../LockManagerDeterministicUnitTest.java       |  662 ----
 .../locking/LockManagerRandomUnitTest.java      |  633 ----
 .../service/locking/LockManagerStats.java       |   71 -
 .../locking/LockMgrLatchHandlerException.java   |   32 -
 .../management/service/locking/LockRequestFile  |   11 -
 .../service/locking/LockRequestTracker.java     |   71 -
 .../management/service/locking/LockWaiter.java  |  145 -
 .../service/locking/LockWaiterManager.java      |  399 ---
 .../service/locking/PrimitiveIntHashMap.java    |  591 ----
 .../management/service/locking/Request.json     |   34 -
 .../management/service/locking/Resource.json    |   37 -
 .../locking/TestRuntimeContextProvider.java     |  139 -
 .../service/locking/TimeOutDetector.java        |   97 -
 .../management/service/logging/LogManager.java  |  517 ---
 .../management/service/logging/LogPage.java     |  281 --
 .../service/logging/LogPageReader.java          |   46 -
 .../management/service/logging/LogReader.java   |  189 --
 .../service/recovery/CheckpointObject.java      |   68 -
 .../service/recovery/CheckpointThread.java      |   93 -
 .../service/recovery/RecoveryManager.java       |  851 -----
 .../AsterixRuntimeComponentsProvider.java       |   75 -
 .../service/transaction/DatasetIdFactory.java   |   39 -
 .../transaction/FieldsHashValueGenerator.java   |   37 -
 .../service/transaction/JobIdFactory.java       |   34 -
 .../service/transaction/MutableResourceId.java  |   44 -
 .../service/transaction/TransactionContext.java |  236 --
 .../TransactionManagementConstants.java         |   63 -
 .../service/transaction/TransactionManager.java |  202 --
 .../transaction/TransactionSubsystem.java       |   91 -
 .../TransactionSubsystemProvider.java           |   35 -
 ...tractIndexModificationOperationCallback.java |   72 +
 ...maryIndexInstantSearchOperationCallback.java |   72 +
 ...exInstantSearchOperationCallbackFactory.java |   52 +
 ...imaryIndexModificationOperationCallback.java |   60 +
 ...dexModificationOperationCallbackFactory.java |   71 +
 .../PrimaryIndexOperationTrackerProvider.java   |   41 +
 .../PrimaryIndexSearchOperationCallback.java    |   64 +
 ...maryIndexSearchOperationCallbackFactory.java |   52 +
 ...ndaryIndexModificationOperationCallback.java |   58 +
 ...dexModificationOperationCallbackFactory.java |   68 +
 .../SecondaryIndexOperationTrackerProvider.java |   41 +
 .../SecondaryIndexSearchOperationCallback.java  |   53 +
 ...daryIndexSearchOperationCallbackFactory.java |   32 +
 ...tasetIndexModificationOperationCallback.java |   51 +
 ...dexModificationOperationCallbackFactory.java |   67 +
 ...dexModificationOperationCallbackFactory.java |   67 +
 .../AbstractLSMLocalResourceMetadata.java       |   41 +
 .../ExternalBTreeLocalResourceMetadata.java     |   63 +
 ...rnalBTreeWithBuddyLocalResourceMetadata.java |   75 +
 .../ExternalRTreeLocalResourceMetadata.java     |   79 +
 .../resource/LSMBTreeLocalResourceMetadata.java |   85 +
 .../LSMInvertedIndexLocalResourceMetadata.java  |  123 +
 .../resource/LSMRTreeLocalResourceMetadata.java |  101 +
 .../PersistentLocalResourceFactory.java         |   35 +
 .../PersistentLocalResourceFactoryProvider.java |   36 +
 .../PersistentLocalResourceRepository.java      |  322 ++
 ...ersistentLocalResourceRepositoryFactory.java |   33 +
 .../service/locking/ConcurrentLockManager.java  | 1231 +++++++
 .../service/locking/DatasetLockInfo.java        |  532 +++
 .../service/locking/DeadlockDetector.java       |  251 ++
 .../service/locking/DummyLockManager.java       |   89 +
 .../service/locking/EntityInfoManager.java      |  726 +++++
 .../service/locking/EntityLockInfoManager.java  |  823 +++++
 .../service/locking/ILockHashTable.java         |   35 +
 .../management/service/locking/ILockMatrix.java |   44 +
 .../management/service/locking/Job.json         |   24 +
 .../management/service/locking/JobInfo.java     |  330 ++
 .../management/service/locking/LockManager.java | 2285 +++++++++++++
 .../LockManagerDeterministicUnitTest.java       |  662 ++++
 .../locking/LockManagerRandomUnitTest.java      |  633 ++++
 .../service/locking/LockManagerStats.java       |   71 +
 .../locking/LockMgrLatchHandlerException.java   |   32 +
 .../management/service/locking/LockRequestFile  |   11 +
 .../service/locking/LockRequestTracker.java     |   71 +
 .../management/service/locking/LockWaiter.java  |  145 +
 .../service/locking/LockWaiterManager.java      |  399 +++
 .../service/locking/PrimitiveIntHashMap.java    |  591 ++++
 .../management/service/locking/Request.json     |   34 +
 .../management/service/locking/Resource.json    |   37 +
 .../locking/TestRuntimeContextProvider.java     |  139 +
 .../service/locking/TimeOutDetector.java        |   97 +
 .../management/service/logging/LogManager.java  |  517 +++
 .../management/service/logging/LogPage.java     |  281 ++
 .../service/logging/LogPageReader.java          |   46 +
 .../management/service/logging/LogReader.java   |  189 ++
 .../service/recovery/CheckpointObject.java      |   68 +
 .../service/recovery/CheckpointThread.java      |   93 +
 .../service/recovery/RecoveryManager.java       |  851 +++++
 .../AsterixRuntimeComponentsProvider.java       |   75 +
 .../service/transaction/DatasetIdFactory.java   |   39 +
 .../transaction/FieldsHashValueGenerator.java   |   37 +
 .../service/transaction/JobIdFactory.java       |   34 +
 .../service/transaction/MutableResourceId.java  |   44 +
 .../service/transaction/TransactionContext.java |  236 ++
 .../TransactionManagementConstants.java         |   63 +
 .../service/transaction/TransactionManager.java |  202 ++
 .../transaction/TransactionSubsystem.java       |   91 +
 .../TransactionSubsystemProvider.java           |   35 +
 .../edu/uci/ics/asterix/aoya/AConstants.java    |  116 -
 .../asterix/aoya/AsterixApplicationMaster.java  | 1288 --------
 .../uci/ics/asterix/aoya/AsterixYARNClient.java | 1387 --------
 .../java/edu/uci/ics/asterix/aoya/Deleter.java  |   45 -
 .../edu/uci/ics/asterix/aoya/HDFSBackup.java    |  108 -
 .../java/edu/uci/ics/asterix/aoya/Utils.java    |  476 ---
 .../org/apache/asterix/aoya/AConstants.java     |  116 +
 .../asterix/aoya/AsterixApplicationMaster.java  | 1288 ++++++++
 .../apache/asterix/aoya/AsterixYARNClient.java  | 1387 ++++++++
 .../java/org/apache/asterix/aoya/Deleter.java   |   45 +
 .../org/apache/asterix/aoya/HDFSBackup.java     |  108 +
 .../java/org/apache/asterix/aoya/Utils.java     |  476 +++
 .../aoya/test/AsterixYARNInstanceUtil.java      |  121 -
 .../aoya/test/AsterixYARNLibraryTestIT.java     |  104 -
 .../aoya/test/AsterixYARNLifecycleIT.java       |  162 -
 .../uci/ics/asterix/aoya/test/YARNCluster.java  |   99 -
 .../aoya/test/AsterixYARNInstanceUtil.java      |  121 +
 .../aoya/test/AsterixYARNLibraryTestIT.java     |  104 +
 .../aoya/test/AsterixYARNLifecycleIT.java       |  162 +
 .../apache/asterix/aoya/test/YARNCluster.java   |   99 +
 3796 files changed, 219326 insertions(+), 219326 deletions(-)
----------------------------------------------------------------------



[21/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java b/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java
new file mode 100644
index 0000000..bfa0331
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java
@@ -0,0 +1,625 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.translator;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import edu.uci.ics.asterix.aql.base.Clause;
+import edu.uci.ics.asterix.aql.base.Expression;
+import edu.uci.ics.asterix.aql.base.Statement.Kind;
+import edu.uci.ics.asterix.aql.expression.CallExpr;
+import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
+import edu.uci.ics.asterix.aql.expression.FieldAccessor;
+import edu.uci.ics.asterix.aql.expression.FieldBinding;
+import edu.uci.ics.asterix.aql.expression.ForClause;
+import edu.uci.ics.asterix.aql.expression.Identifier;
+import edu.uci.ics.asterix.aql.expression.LiteralExpr;
+import edu.uci.ics.asterix.aql.expression.Query;
+import edu.uci.ics.asterix.aql.expression.RecordConstructor;
+import edu.uci.ics.asterix.aql.expression.VariableExpr;
+import edu.uci.ics.asterix.aql.expression.WhereClause;
+import edu.uci.ics.asterix.aql.literal.StringLiteral;
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionRequest;
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.common.functions.FunctionSignature;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+
+/**
+ * An AQL statement instance is translated into an instance of type CompileX
+ * that has additional fields for use by the AqlTranslator.
+ */
+public class CompiledStatements {
+
+    public static interface ICompiledStatement {
+
+        public Kind getKind();
+    }
+
+    public static class CompiledDatasetDropStatement implements ICompiledStatement {
+        private final String dataverseName;
+        private final String datasetName;
+
+        public CompiledDatasetDropStatement(String dataverseName, String datasetName) {
+            this.dataverseName = dataverseName;
+            this.datasetName = datasetName;
+        }
+
+        public String getDataverseName() {
+            return dataverseName;
+        }
+
+        public String getDatasetName() {
+            return datasetName;
+        }
+
+        @Override
+        public Kind getKind() {
+            return Kind.DATASET_DROP;
+        }
+    }
+
+    // added by yasser
+    public static class CompiledCreateDataverseStatement implements ICompiledStatement {
+        private String dataverseName;
+        private String format;
+
+        public CompiledCreateDataverseStatement(String dataverseName, String format) {
+            this.dataverseName = dataverseName;
+            this.format = format;
+        }
+
+        public String getDataverseName() {
+            return dataverseName;
+        }
+
+        public String getFormat() {
+            return format;
+        }
+
+        @Override
+        public Kind getKind() {
+            return Kind.CREATE_DATAVERSE;
+        }
+    }
+
+    public static class CompiledNodeGroupDropStatement implements ICompiledStatement {
+        private String nodeGroupName;
+
+        public CompiledNodeGroupDropStatement(String nodeGroupName) {
+            this.nodeGroupName = nodeGroupName;
+        }
+
+        public String getNodeGroupName() {
+            return nodeGroupName;
+        }
+
+        @Override
+        public Kind getKind() {
+            return Kind.NODEGROUP_DROP;
+        }
+    }
+
+    public static class CompiledIndexDropStatement implements ICompiledStatement {
+        private String dataverseName;
+        private String datasetName;
+        private String indexName;
+
+        public CompiledIndexDropStatement(String dataverseName, String datasetName, String indexName) {
+            this.dataverseName = dataverseName;
+            this.datasetName = datasetName;
+            this.indexName = indexName;
+        }
+
+        public String getDataverseName() {
+            return dataverseName;
+        }
+
+        public String getDatasetName() {
+            return datasetName;
+        }
+
+        public String getIndexName() {
+            return indexName;
+        }
+
+        @Override
+        public Kind getKind() {
+            return Kind.INDEX_DROP;
+        }
+    }
+
+    public static class CompiledDataverseDropStatement implements ICompiledStatement {
+        private String dataverseName;
+        private boolean ifExists;
+
+        public CompiledDataverseDropStatement(String dataverseName, boolean ifExists) {
+            this.dataverseName = dataverseName;
+            this.ifExists = ifExists;
+        }
+
+        public String getDataverseName() {
+            return dataverseName;
+        }
+
+        public boolean getIfExists() {
+            return ifExists;
+        }
+
+        @Override
+        public Kind getKind() {
+            return Kind.DATAVERSE_DROP;
+        }
+    }
+
+    public static class CompiledTypeDropStatement implements ICompiledStatement {
+        private String typeName;
+
+        public CompiledTypeDropStatement(String nodeGroupName) {
+            this.typeName = nodeGroupName;
+        }
+
+        public String getTypeName() {
+            return typeName;
+        }
+
+        @Override
+        public Kind getKind() {
+            return Kind.TYPE_DROP;
+        }
+    }
+
+    public static interface ICompiledDmlStatement extends ICompiledStatement {
+
+        public String getDataverseName();
+
+        public String getDatasetName();
+    }
+
+    public static class CompiledCreateIndexStatement implements ICompiledDmlStatement {
+        private final String indexName;
+        private final String dataverseName;
+        private final String datasetName;
+        private final List<List<String>> keyFields;
+        private final List<IAType> keyTypes;
+        private final boolean isEnforced;
+        private final IndexType indexType;
+
+        // Specific to NGram index.
+        private final int gramLength;
+
+        public CompiledCreateIndexStatement(String indexName, String dataverseName, String datasetName,
+                List<List<String>> keyFields, List<IAType> keyTypes, boolean isEnforced, int gramLength, IndexType indexType) {
+            this.indexName = indexName;
+            this.dataverseName = dataverseName;
+            this.datasetName = datasetName;
+            this.keyFields = keyFields;
+            this.keyTypes = keyTypes;
+            this.gramLength = gramLength;
+            this.isEnforced = isEnforced;
+            this.indexType = indexType;
+        }
+
+        public String getDatasetName() {
+            return datasetName;
+        }
+
+        public String getDataverseName() {
+            return dataverseName;
+        }
+
+        public String getIndexName() {
+            return indexName;
+        }
+
+        public List<List<String>> getKeyFields() {
+            return keyFields;
+        }
+
+        public List<IAType> getKeyFieldTypes() {
+            return keyTypes;
+        }
+
+        public IndexType getIndexType() {
+            return indexType;
+        }
+
+        public int getGramLength() {
+            return gramLength;
+        }
+
+        public boolean isEnforced() {
+            return isEnforced;
+        }
+
+        @Override
+        public Kind getKind() {
+            return Kind.CREATE_INDEX;
+        }
+    }
+
+    public static class CompiledLoadFromFileStatement implements ICompiledDmlStatement {
+        private String dataverseName;
+        private String datasetName;
+        private boolean alreadySorted;
+        private String adapter;
+        private Map<String, String> properties;
+
+        public CompiledLoadFromFileStatement(String dataverseName, String datasetName, String adapter,
+                Map<String, String> properties, boolean alreadySorted) {
+            this.dataverseName = dataverseName;
+            this.datasetName = datasetName;
+            this.alreadySorted = alreadySorted;
+            this.adapter = adapter;
+            this.properties = properties;
+        }
+
+        public String getDataverseName() {
+            return dataverseName;
+        }
+
+        public String getDatasetName() {
+            return datasetName;
+        }
+
+        public boolean alreadySorted() {
+            return alreadySorted;
+        }
+
+        public String getAdapter() {
+            return adapter;
+        }
+
+        public Map<String, String> getProperties() {
+            return properties;
+        }
+
+        @Override
+        public Kind getKind() {
+            return Kind.LOAD;
+        }
+    }
+
+    public static class CompiledInsertStatement implements ICompiledDmlStatement {
+        private final String dataverseName;
+        private final String datasetName;
+        private final Query query;
+        private final int varCounter;
+
+        public CompiledInsertStatement(String dataverseName, String datasetName, Query query, int varCounter) {
+            this.dataverseName = dataverseName;
+            this.datasetName = datasetName;
+            this.query = query;
+            this.varCounter = varCounter;
+        }
+
+        public String getDataverseName() {
+            return dataverseName;
+        }
+
+        public String getDatasetName() {
+            return datasetName;
+        }
+
+        public int getVarCounter() {
+            return varCounter;
+        }
+
+        public Query getQuery() {
+            return query;
+        }
+
+        @Override
+        public Kind getKind() {
+            return Kind.INSERT;
+        }
+    }
+
+    public static class CompiledConnectFeedStatement implements ICompiledDmlStatement {
+        private String dataverseName;
+        private String feedName;
+        private String datasetName;
+        private String policyName;
+        private Query query;
+        private int varCounter;
+
+        public CompiledConnectFeedStatement(String dataverseName, String feedName, String datasetName,
+                String policyName, Query query, int varCounter) {
+            this.dataverseName = dataverseName;
+            this.feedName = feedName;
+            this.datasetName = datasetName;
+            this.policyName = policyName;
+            this.query = query;
+            this.varCounter = varCounter;
+        }
+
+        @Override
+        public String getDataverseName() {
+            return dataverseName;
+        }
+
+        public String getFeedName() {
+            return feedName;
+        }
+
+        @Override
+        public String getDatasetName() {
+            return datasetName;
+        }
+
+        public int getVarCounter() {
+            return varCounter;
+        }
+
+        public Query getQuery() {
+            return query;
+        }
+
+        public void setQuery(Query query) {
+            this.query = query;
+        }
+
+        @Override
+        public Kind getKind() {
+            return Kind.CONNECT_FEED;
+        }
+
+        public String getPolicyName() {
+            return policyName;
+        }
+    }
+    
+    public static class CompiledSubscribeFeedStatement implements ICompiledDmlStatement {
+
+        private final FeedConnectionRequest request;
+        private Query query;
+        private final int varCounter;
+
+        public CompiledSubscribeFeedStatement(FeedConnectionRequest request, Query query, int varCounter) {
+            this.request = request;
+            this.query = query;
+            this.varCounter = varCounter;
+        }
+
+        @Override
+        public String getDataverseName() {
+            return request.getReceivingFeedId().getDataverse();
+        }
+
+        @Override
+        public String getDatasetName() {
+            return request.getTargetDataset();
+        }
+
+        public int getVarCounter() {
+            return varCounter;
+        }
+
+        public Query getQuery() {
+            return query;
+        }
+
+        public void setQuery(Query query) {
+            this.query = query;
+        }
+
+        @Override
+        public Kind getKind() {
+            return Kind.SUBSCRIBE_FEED;
+        }
+
+    }
+
+
+    public static class CompiledDisconnectFeedStatement implements ICompiledDmlStatement {
+        private String dataverseName;
+        private String datasetName;
+        private String feedName;
+        private Query query;
+        private int varCounter;
+
+        public CompiledDisconnectFeedStatement(String dataverseName, String feedName, String datasetName) {
+            this.dataverseName = dataverseName;
+            this.feedName = feedName;
+            this.datasetName = datasetName;
+        }
+
+        @Override
+        public String getDataverseName() {
+            return dataverseName;
+        }
+
+        @Override
+        public String getDatasetName() {
+            return datasetName;
+        }
+
+        public String getFeedName() {
+            return feedName;
+        }
+
+        public int getVarCounter() {
+            return varCounter;
+        }
+
+        public Query getQuery() {
+            return query;
+        }
+
+        @Override
+        public Kind getKind() {
+            return Kind.DISCONNECT_FEED;
+        }
+
+    }
+
+    public static class CompiledDeleteStatement implements ICompiledDmlStatement {
+        private VariableExpr var;
+        private String dataverseName;
+        private String datasetName;
+        private Expression condition;
+        private int varCounter;
+        private AqlMetadataProvider metadataProvider;
+
+        public CompiledDeleteStatement(VariableExpr var, String dataverseName, String datasetName,
+                Expression condition, int varCounter, AqlMetadataProvider metadataProvider) {
+            this.var = var;
+            this.dataverseName = dataverseName;
+            this.datasetName = datasetName;
+            this.condition = condition;
+            this.varCounter = varCounter;
+            this.metadataProvider = metadataProvider;
+        }
+
+        @Override
+        public String getDatasetName() {
+            return datasetName;
+        }
+
+        @Override
+        public String getDataverseName() {
+            return dataverseName;
+        }
+
+        public int getVarCounter() {
+            return varCounter;
+        }
+
+        public Expression getCondition() {
+            return condition;
+        }
+
+        public Query getQuery() throws AlgebricksException {
+
+            List<Expression> arguments = new ArrayList<Expression>();
+            String arg = dataverseName == null ? datasetName : dataverseName + "." + datasetName;
+            LiteralExpr argumentLiteral = new LiteralExpr(new StringLiteral(arg));
+            arguments.add(argumentLiteral);
+
+            CallExpr callExpression = new CallExpr(new FunctionSignature(FunctionConstants.ASTERIX_NS, "dataset", 1),
+                    arguments);
+            List<Clause> clauseList = new ArrayList<Clause>();
+            Clause forClause = new ForClause(var, callExpression);
+            clauseList.add(forClause);
+            Clause whereClause = null;
+            if (condition != null) {
+                whereClause = new WhereClause(condition);
+                clauseList.add(whereClause);
+            }
+
+            Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
+            if (dataset == null) {
+                throw new AlgebricksException("Unknown dataset " + datasetName);
+            }
+            String itemTypeName = dataset.getItemTypeName();
+            IAType itemType = metadataProvider.findType(dataset.getDataverseName(), itemTypeName);
+            ARecordType recType = (ARecordType) itemType;
+            String[] fieldNames = recType.getFieldNames();
+            List<FieldBinding> fieldBindings = new ArrayList<FieldBinding>();
+            for (int i = 0; i < fieldNames.length; i++) {
+                FieldAccessor fa = new FieldAccessor(var, new Identifier(fieldNames[i]));
+                FieldBinding fb = new FieldBinding(new LiteralExpr(new StringLiteral(fieldNames[i])), fa);
+                fieldBindings.add(fb);
+            }
+            RecordConstructor rc = new RecordConstructor(fieldBindings);
+
+            FLWOGRExpression flowgr = new FLWOGRExpression(clauseList, rc);
+            Query query = new Query();
+            query.setBody(flowgr);
+            return query;
+        }
+
+        @Override
+        public Kind getKind() {
+            return Kind.DELETE;
+        }
+
+    }
+
+    public static class CompiledCompactStatement implements ICompiledStatement {
+        private final String dataverseName;
+        private final String datasetName;
+
+        public CompiledCompactStatement(String dataverseName, String datasetName) {
+            this.dataverseName = dataverseName;
+            this.datasetName = datasetName;
+        }
+
+        public String getDataverseName() {
+            return dataverseName;
+        }
+
+        public String getDatasetName() {
+            return datasetName;
+        }
+
+        @Override
+        public Kind getKind() {
+            return Kind.COMPACT;
+        }
+    }
+
+    public static class CompiledIndexCompactStatement extends CompiledCompactStatement {
+        private final String indexName;
+        private final List<List<String>> keyFields;
+        private final List<IAType> keyTypes;
+        private final IndexType indexType;
+        private final boolean isEnforced;
+
+        // Specific to NGram index.
+        private final int gramLength;
+
+        public CompiledIndexCompactStatement(String dataverseName, String datasetName, String indexName,
+                List<List<String>> keyFields, List<IAType> keyTypes, boolean isEnforced, int gramLength, IndexType indexType) {
+            super(dataverseName, datasetName);
+            this.indexName = indexName;
+            this.keyFields = keyFields;
+            this.keyTypes = keyTypes;
+            this.gramLength = gramLength;
+            this.indexType = indexType;
+            this.isEnforced = isEnforced;
+        }
+
+        public String getIndexName() {
+            return indexName;
+        }
+
+        public List<List<String>> getKeyFields() {
+            return keyFields;
+        }
+
+        public List<IAType> getKeyTypes() {
+            return keyTypes;
+        }
+
+        public IndexType getIndexType() {
+            return indexType;
+        }
+
+        public int getGramLength() {
+            return gramLength;
+        }
+
+        public boolean isEnforced() {
+            return isEnforced;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/translator/ConstantHelper.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/translator/ConstantHelper.java b/asterix-algebra/src/main/java/org/apache/asterix/translator/ConstantHelper.java
new file mode 100644
index 0000000..dc75889
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/translator/ConstantHelper.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.translator;
+
+import edu.uci.ics.asterix.aql.base.Literal;
+import edu.uci.ics.asterix.aql.literal.DoubleLiteral;
+import edu.uci.ics.asterix.aql.literal.FloatLiteral;
+import edu.uci.ics.asterix.aql.literal.IntegerLiteral;
+import edu.uci.ics.asterix.aql.literal.LongIntegerLiteral;
+import edu.uci.ics.asterix.aql.literal.StringLiteral;
+import edu.uci.ics.asterix.om.base.ABoolean;
+import edu.uci.ics.asterix.om.base.ADouble;
+import edu.uci.ics.asterix.om.base.AFloat;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AInt64;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.base.IAObject;
+
+public final class ConstantHelper {
+
+    public static IAObject objectFromLiteral(Literal valLiteral) {
+        switch (valLiteral.getLiteralType()) {
+            case DOUBLE: {
+                DoubleLiteral d = (DoubleLiteral) valLiteral;
+                return new ADouble(d.getValue());
+            }
+            case FALSE: {
+                return ABoolean.FALSE;
+            }
+            case FLOAT: {
+                FloatLiteral fl = (FloatLiteral) valLiteral;
+                return new AFloat(fl.getValue());
+            }
+            case INTEGER: {
+                IntegerLiteral il = (IntegerLiteral) valLiteral;
+                return new AInt32(il.getValue());
+            }
+            case LONG: {
+                LongIntegerLiteral il = (LongIntegerLiteral) valLiteral;
+                return new AInt64(il.getValue());
+            }
+            case NULL: {
+                return ANull.NULL;
+            }
+            case STRING: {
+                StringLiteral sl = (StringLiteral) valLiteral;
+                return new AString(sl.getValue());
+            }
+            case TRUE: {
+                return ABoolean.TRUE;
+            }
+            default: {
+                throw new IllegalStateException();
+            }
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/translator/TranslationContext.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/translator/TranslationContext.java b/asterix-algebra/src/main/java/org/apache/asterix/translator/TranslationContext.java
new file mode 100644
index 0000000..986b5ca
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/translator/TranslationContext.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.translator;
+
+import java.util.HashMap;
+
+import edu.uci.ics.asterix.aql.expression.VariableExpr;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.Counter;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+
+public final class TranslationContext {
+
+    private Counter varCounter;
+    private HashMap<Integer, LogicalVariable> varEnv = new HashMap<Integer, LogicalVariable>();
+    private boolean topFlwor = true;
+
+    public TranslationContext(Counter varCounter) {
+        this.varCounter = varCounter;
+    }
+
+    public int getVarCounter() {
+        return varCounter.get();
+    }
+
+    public boolean isTopFlwor() {
+        return topFlwor;
+    }
+
+    public void setTopFlwor(boolean b) {
+        topFlwor = b;
+    }
+
+    public LogicalVariable getVar(Integer varId) {
+        return varEnv.get(varId);
+    }
+
+    public LogicalVariable getVar(VariableExpr v) {
+        return varEnv.get(v.getVar().getId());
+    }
+
+    public LogicalVariable newVar(VariableExpr v) {
+        Integer i = v.getVar().getId();
+        if (i > varCounter.get()) {
+            varCounter.set(i);
+        }
+        LogicalVariable var = new LogicalVariable(i);
+        varEnv.put(i, var);
+        return var;
+    }
+
+    public void setVar(VariableExpr v, LogicalVariable var) {
+        varEnv.put(v.getVar().getId(), var);
+    }
+
+    public LogicalVariable newVar() {
+        varCounter.inc();
+        LogicalVariable var = new LogicalVariable(varCounter.get());
+        varEnv.put(varCounter.get(), var);
+        return var;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/translator/TranslationException.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/translator/TranslationException.java b/asterix-algebra/src/main/java/org/apache/asterix/translator/TranslationException.java
new file mode 100644
index 0000000..2819961
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/translator/TranslationException.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.translator;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+
+public class TranslationException extends AsterixException {
+    /**
+     * 
+     */
+    private static final long serialVersionUID = 685960054131778068L;
+
+    public TranslationException() {
+        super();
+    }
+
+    public TranslationException(String msg) {
+        super(msg);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/translator/TypeTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/translator/TypeTranslator.java b/asterix-algebra/src/main/java/org/apache/asterix/translator/TypeTranslator.java
new file mode 100644
index 0000000..e6f3d17
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/translator/TypeTranslator.java
@@ -0,0 +1,392 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.translator;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+import edu.uci.ics.asterix.aql.expression.OrderedListTypeDefinition;
+import edu.uci.ics.asterix.aql.expression.RecordTypeDefinition;
+import edu.uci.ics.asterix.aql.expression.RecordTypeDefinition.RecordKind;
+import edu.uci.ics.asterix.aql.expression.TypeExpression;
+import edu.uci.ics.asterix.aql.expression.TypeReferenceExpression;
+import edu.uci.ics.asterix.aql.expression.UnorderedListTypeDefinition;
+import edu.uci.ics.asterix.common.annotations.IRecordFieldDataGen;
+import edu.uci.ics.asterix.common.annotations.RecordDataGenAnnotation;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.metadata.MetadataException;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.entities.AsterixBuiltinTypeMap;
+import edu.uci.ics.asterix.metadata.entities.Datatype;
+import edu.uci.ics.asterix.om.types.AOrderedListType;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.AUnorderedListType;
+import edu.uci.ics.asterix.om.types.AbstractCollectionType;
+import edu.uci.ics.asterix.om.types.AbstractComplexType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.types.TypeSignature;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class TypeTranslator {
+
+    public static Map<TypeSignature, IAType> computeTypes(MetadataTransactionContext mdTxnCtx, TypeExpression typeExpr,
+            String typeName, String typeDataverse) throws AlgebricksException, MetadataException {
+        Map<TypeSignature, IAType> typeMap = new HashMap<TypeSignature, IAType>();
+        return computeTypes(mdTxnCtx, typeExpr, typeName, typeDataverse, typeMap);
+    }
+
+    public static Map<TypeSignature, IAType> computeTypes(MetadataTransactionContext mdTxnCtx, TypeExpression typeExpr,
+            String typeName, String typeDataverse, Map<TypeSignature, IAType> typeMap) throws AlgebricksException,
+            MetadataException {
+        Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes = new HashMap<String, Map<ARecordType, List<Integer>>>();
+        Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes = new HashMap<TypeSignature, List<AbstractCollectionType>>();
+        Map<TypeSignature, List<TypeSignature>> incompleteTopLevelTypeReferences = new HashMap<TypeSignature, List<TypeSignature>>();
+        firstPass(typeExpr, typeName, typeMap, incompleteFieldTypes, incompleteItemTypes,
+                incompleteTopLevelTypeReferences, typeDataverse);
+        secondPass(mdTxnCtx, typeMap, incompleteFieldTypes, incompleteItemTypes, incompleteTopLevelTypeReferences,
+                typeDataverse);
+
+        for (IAType type : typeMap.values())
+            if (type.getTypeTag().isDerivedType())
+                ((AbstractComplexType) type).generateNestedDerivedTypeNames();
+        return typeMap;
+    }
+
+    private static Map<String, BuiltinType> builtinTypeMap = AsterixBuiltinTypeMap.getBuiltinTypes();
+
+    private static void firstPass(TypeExpression typeExpr, String typeName, Map<TypeSignature, IAType> typeMap,
+            Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes,
+            Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
+            Map<TypeSignature, List<TypeSignature>> incompleteTopLevelTypeReferences, String typeDataverse)
+            throws AlgebricksException {
+
+        if (builtinTypeMap.get(typeName) != null) {
+            throw new AlgebricksException("Cannot redefine builtin type " + typeName + " .");
+        }
+        TypeSignature typeSignature = new TypeSignature(typeDataverse, typeName);
+        try {
+            switch (typeExpr.getTypeKind()) {
+                case TYPEREFERENCE: {
+                    TypeReferenceExpression tre = (TypeReferenceExpression) typeExpr;
+                    IAType t = solveTypeReference(new TypeSignature(typeDataverse, tre.getIdent().getValue()), typeMap);
+                    if (t != null) {
+                        typeMap.put(typeSignature, t);
+                    } else {
+                        addIncompleteTopLevelTypeReference(typeName, tre, incompleteTopLevelTypeReferences,
+                                typeDataverse);
+                    }
+                    break;
+                }
+                case RECORD: {
+                    RecordTypeDefinition rtd = (RecordTypeDefinition) typeExpr;
+                    ARecordType recType = computeRecordType(typeSignature, rtd, typeMap, incompleteFieldTypes,
+                            incompleteItemTypes, typeDataverse);
+                    typeMap.put(typeSignature, recType);
+                    break;
+                }
+                case ORDEREDLIST: {
+                    OrderedListTypeDefinition oltd = (OrderedListTypeDefinition) typeExpr;
+                    AOrderedListType olType = computeOrderedListType(typeSignature, oltd, typeMap, incompleteItemTypes,
+                            incompleteFieldTypes, typeDataverse);
+                    typeMap.put(typeSignature, olType);
+                    break;
+                }
+                case UNORDEREDLIST: {
+                    UnorderedListTypeDefinition ultd = (UnorderedListTypeDefinition) typeExpr;
+                    AUnorderedListType ulType = computeUnorderedListType(typeSignature, ultd, typeMap,
+                            incompleteItemTypes, incompleteFieldTypes, typeDataverse);
+                    typeMap.put(typeSignature, ulType);
+                    break;
+                }
+                default: {
+                    throw new IllegalStateException();
+                }
+            }
+        } catch (AsterixException e) {
+            throw new AlgebricksException(e);
+        }
+    }
+
+    private static void secondPass(MetadataTransactionContext mdTxnCtx, Map<TypeSignature, IAType> typeMap,
+            Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes,
+            Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
+            Map<TypeSignature, List<TypeSignature>> incompleteTopLevelTypeReferences, String typeDataverse)
+            throws AlgebricksException, MetadataException {
+        // solve remaining top level references
+
+        for (TypeSignature typeSignature : incompleteTopLevelTypeReferences.keySet()) {
+            IAType t;// = typeMap.get(trefName);
+            Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, typeSignature.getNamespace(),
+                    typeSignature.getName());
+            if (dt == null) {
+                throw new AlgebricksException("Could not resolve type " + typeSignature);
+            } else
+                t = dt.getDatatype();
+            for (TypeSignature sign : incompleteTopLevelTypeReferences.get(typeSignature)) {
+                typeMap.put(sign, t);
+            }
+        }
+        // solve remaining field type references
+        for (String trefName : incompleteFieldTypes.keySet()) {
+            IAType t;// = typeMap.get(trefName);
+            Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, typeDataverse, trefName);
+            if (dt == null) {
+                throw new AlgebricksException("Could not resolve type " + trefName);
+            } else
+                t = dt.getDatatype();
+            Map<ARecordType, List<Integer>> fieldsToFix = incompleteFieldTypes.get(trefName);
+            for (ARecordType recType : fieldsToFix.keySet()) {
+                List<Integer> positions = fieldsToFix.get(recType);
+                IAType[] fldTypes = recType.getFieldTypes();
+                for (Integer pos : positions) {
+                    if (fldTypes[pos] == null) {
+                        fldTypes[pos] = t;
+                    } else { // nullable
+                        AUnionType nullableUnion = (AUnionType) fldTypes[pos];
+                        nullableUnion.setTypeAtIndex(t, 1);
+                    }
+                }
+            }
+        }
+
+        // solve remaining item type references
+        for (TypeSignature typeSignature : incompleteItemTypes.keySet()) {
+            IAType t;// = typeMap.get(trefName);
+            Datatype dt = null;
+            if (MetadataManager.INSTANCE != null) {
+                dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, typeSignature.getNamespace(),
+                        typeSignature.getName());
+                if (dt == null) {
+                    throw new AlgebricksException("Could not resolve type " + typeSignature);
+                }
+                t = dt.getDatatype();
+            } else {
+                t = typeMap.get(typeSignature);
+            }
+            for (AbstractCollectionType act : incompleteItemTypes.get(typeSignature)) {
+                act.setItemType(t);
+            }
+        }
+    }
+
+    private static AOrderedListType computeOrderedListType(TypeSignature typeSignature, OrderedListTypeDefinition oltd,
+            Map<TypeSignature, IAType> typeMap, Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
+            Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes, String defaultDataverse)
+            throws AsterixException {
+        TypeExpression tExpr = oltd.getItemTypeExpression();
+        String typeName = typeSignature != null ? typeSignature.getName() : null;
+        AOrderedListType aolt = new AOrderedListType(null, typeName);
+        setCollectionItemType(tExpr, typeMap, incompleteItemTypes, incompleteFieldTypes, aolt, defaultDataverse);
+        return aolt;
+    }
+
+    private static AUnorderedListType computeUnorderedListType(TypeSignature typeSignature,
+            UnorderedListTypeDefinition ultd, Map<TypeSignature, IAType> typeMap,
+            Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
+            Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes, String defaulDataverse)
+            throws AsterixException {
+        TypeExpression tExpr = ultd.getItemTypeExpression();
+        String typeName = typeSignature != null ? typeSignature.getName() : null;
+        AUnorderedListType ault = new AUnorderedListType(null, typeName);
+        setCollectionItemType(tExpr, typeMap, incompleteItemTypes, incompleteFieldTypes, ault, defaulDataverse);
+        return ault;
+    }
+
+    private static void setCollectionItemType(TypeExpression tExpr, Map<TypeSignature, IAType> typeMap,
+            Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
+            Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes, AbstractCollectionType act,
+            String defaultDataverse) throws AsterixException {
+        switch (tExpr.getTypeKind()) {
+            case ORDEREDLIST: {
+                OrderedListTypeDefinition oltd = (OrderedListTypeDefinition) tExpr;
+                IAType t = computeOrderedListType(null, oltd, typeMap, incompleteItemTypes, incompleteFieldTypes,
+                        defaultDataverse);
+                act.setItemType(t);
+                break;
+            }
+            case UNORDEREDLIST: {
+                UnorderedListTypeDefinition ultd = (UnorderedListTypeDefinition) tExpr;
+                IAType t = computeUnorderedListType(null, ultd, typeMap, incompleteItemTypes, incompleteFieldTypes,
+                        defaultDataverse);
+                act.setItemType(t);
+                break;
+            }
+            case RECORD: {
+                RecordTypeDefinition rtd = (RecordTypeDefinition) tExpr;
+                IAType t = computeRecordType(null, rtd, typeMap, incompleteFieldTypes, incompleteItemTypes,
+                        defaultDataverse);
+                act.setItemType(t);
+                break;
+            }
+            case TYPEREFERENCE: {
+                TypeReferenceExpression tre = (TypeReferenceExpression) tExpr;
+                TypeSignature signature = new TypeSignature(defaultDataverse, tre.getIdent().getValue());
+                IAType tref = solveTypeReference(signature, typeMap);
+                if (tref != null) {
+                    act.setItemType(tref);
+                } else {
+                    addIncompleteCollectionTypeReference(act, tre, incompleteItemTypes, defaultDataverse);
+                }
+                break;
+            }
+            default: {
+                throw new IllegalStateException();
+            }
+        }
+    }
+
+    private static void addIncompleteCollectionTypeReference(AbstractCollectionType collType,
+            TypeReferenceExpression tre, Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes,
+            String defaultDataverse) {
+        String typeName = tre.getIdent().getValue();
+        TypeSignature typeSignature = new TypeSignature(defaultDataverse, typeName);
+        List<AbstractCollectionType> typeList = incompleteItemTypes.get(typeSignature);
+        if (typeList == null) {
+            typeList = new LinkedList<AbstractCollectionType>();
+            incompleteItemTypes.put(typeSignature, typeList);
+        }
+        typeList.add(collType);
+    }
+
+    private static void addIncompleteFieldTypeReference(ARecordType recType, int fldPosition,
+            TypeReferenceExpression tre, Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes) {
+        String typeName = tre.getIdent().getValue();
+        Map<ARecordType, List<Integer>> refMap = incompleteFieldTypes.get(typeName);
+        if (refMap == null) {
+            refMap = new HashMap<ARecordType, List<Integer>>();
+            incompleteFieldTypes.put(typeName, refMap);
+        }
+        List<Integer> typeList = refMap.get(recType);
+        if (typeList == null) {
+            typeList = new ArrayList<Integer>();
+            refMap.put(recType, typeList);
+        }
+        typeList.add(fldPosition);
+    }
+
+    private static void addIncompleteTopLevelTypeReference(String tdeclName, TypeReferenceExpression tre,
+            Map<TypeSignature, List<TypeSignature>> incompleteTopLevelTypeReferences, String defaultDataverse) {
+        String name = tre.getIdent().getValue();
+        TypeSignature typeSignature = new TypeSignature(defaultDataverse, name);
+        List<TypeSignature> refList = incompleteTopLevelTypeReferences.get(name);
+        if (refList == null) {
+            refList = new LinkedList<TypeSignature>();
+            incompleteTopLevelTypeReferences.put(new TypeSignature(defaultDataverse, tre.getIdent().getValue()),
+                    refList);
+        }
+        refList.add(typeSignature);
+    }
+
+    private static IAType solveTypeReference(TypeSignature typeSignature, Map<TypeSignature, IAType> typeMap) {
+        IAType builtin = builtinTypeMap.get(typeSignature.getName());
+        if (builtin != null) {
+            return builtin;
+        } else {
+            return typeMap.get(typeSignature);
+        }
+    }
+
+    private static ARecordType computeRecordType(TypeSignature typeSignature, RecordTypeDefinition rtd,
+            Map<TypeSignature, IAType> typeMap, Map<String, Map<ARecordType, List<Integer>>> incompleteFieldTypes,
+            Map<TypeSignature, List<AbstractCollectionType>> incompleteItemTypes, String defaultDataverse)
+            throws AsterixException {
+        List<String> names = rtd.getFieldNames();
+        int n = names.size();
+        String[] fldNames = new String[n];
+        IAType[] fldTypes = new IAType[n];
+        int i = 0;
+        for (String s : names) {
+            fldNames[i++] = s;
+        }
+        boolean isOpen = rtd.getRecordKind() == RecordKind.OPEN;
+        ARecordType recType;
+        try {
+            recType = new ARecordType(typeSignature == null ? null : typeSignature.getName(), fldNames, fldTypes,
+                    isOpen);
+        } catch (HyracksDataException e) {
+            throw new AsterixException(e);
+        }
+
+        List<IRecordFieldDataGen> fieldDataGen = rtd.getFieldDataGen();
+        if (fieldDataGen.size() == n) {
+            IRecordFieldDataGen[] rfdg = new IRecordFieldDataGen[n];
+            rfdg = fieldDataGen.toArray(rfdg);
+            recType.getAnnotations().add(new RecordDataGenAnnotation(rfdg, rtd.getUndeclaredFieldsDataGen()));
+        }
+
+        for (int j = 0; j < n; j++) {
+            TypeExpression texpr = rtd.getFieldTypes().get(j);
+            switch (texpr.getTypeKind()) {
+                case TYPEREFERENCE: {
+                    TypeReferenceExpression tre = (TypeReferenceExpression) texpr;
+                    TypeSignature signature = new TypeSignature(defaultDataverse, tre.getIdent().getValue());
+                    IAType tref = solveTypeReference(signature, typeMap);
+                    if (tref != null) {
+                        if (!rtd.getNullableFields().get(j)) { // not nullable
+                            fldTypes[j] = tref;
+                        } else { // nullable
+                            fldTypes[j] = AUnionType.createNullableType(tref);
+                        }
+                    } else {
+                        addIncompleteFieldTypeReference(recType, j, tre, incompleteFieldTypes);
+                        if (rtd.getNullableFields().get(j)) {
+                            fldTypes[j] = AUnionType.createNullableType(null);
+                        }
+                    }
+                    break;
+                }
+                case RECORD: {
+                    RecordTypeDefinition recTypeDef2 = (RecordTypeDefinition) texpr;
+                    IAType t2 = computeRecordType(null, recTypeDef2, typeMap, incompleteFieldTypes,
+                            incompleteItemTypes, defaultDataverse);
+                    if (!rtd.getNullableFields().get(j)) { // not nullable
+                        fldTypes[j] = t2;
+                    } else { // nullable
+                        fldTypes[j] = AUnionType.createNullableType(t2);
+                    }
+                    break;
+                }
+                case ORDEREDLIST: {
+                    OrderedListTypeDefinition oltd = (OrderedListTypeDefinition) texpr;
+                    IAType t2 = computeOrderedListType(null, oltd, typeMap, incompleteItemTypes, incompleteFieldTypes,
+                            defaultDataverse);
+                    fldTypes[j] = (rtd.getNullableFields().get(j)) ? AUnionType.createNullableType(t2) : t2;
+                    break;
+                }
+                case UNORDEREDLIST: {
+                    UnorderedListTypeDefinition ultd = (UnorderedListTypeDefinition) texpr;
+                    IAType t2 = computeUnorderedListType(null, ultd, typeMap, incompleteItemTypes,
+                            incompleteFieldTypes, defaultDataverse);
+                    fldTypes[j] = (rtd.getNullableFields().get(j)) ? AUnionType.createNullableType(t2) : t2;
+                    break;
+                }
+                default: {
+                    throw new IllegalStateException();
+                }
+            }
+
+        }
+
+        return recType;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
deleted file mode 100644
index 8ccc9d5..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
+++ /dev/null
@@ -1,411 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.common;
-
-import java.io.PrintWriter;
-import java.rmi.RemoteException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.json.JSONException;
-
-import edu.uci.ics.asterix.api.common.Job.SubmissionMode;
-import edu.uci.ics.asterix.aql.base.Statement.Kind;
-import edu.uci.ics.asterix.aql.expression.FunctionDecl;
-import edu.uci.ics.asterix.aql.expression.Query;
-import edu.uci.ics.asterix.aql.expression.visitor.AQLPrintVisitor;
-import edu.uci.ics.asterix.aql.rewrites.AqlRewriter;
-import edu.uci.ics.asterix.common.config.AsterixCompilerProperties;
-import edu.uci.ics.asterix.common.config.AsterixExternalProperties;
-import edu.uci.ics.asterix.common.config.OptimizationConfUtil;
-import edu.uci.ics.asterix.common.exceptions.ACIDException;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.dataflow.data.common.AqlExpressionTypeComputer;
-import edu.uci.ics.asterix.dataflow.data.common.AqlMergeAggregationExpressionFactory;
-import edu.uci.ics.asterix.dataflow.data.common.AqlNullableTypeComputer;
-import edu.uci.ics.asterix.dataflow.data.common.AqlPartialAggregationTypeComputer;
-import edu.uci.ics.asterix.formats.base.IDataFormat;
-import edu.uci.ics.asterix.jobgen.AqlLogicalExpressionJobGen;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.entities.Dataverse;
-import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
-import edu.uci.ics.asterix.optimizer.base.RuleCollections;
-import edu.uci.ics.asterix.runtime.job.listener.JobEventListenerFactory;
-import edu.uci.ics.asterix.transaction.management.service.transaction.JobIdFactory;
-import edu.uci.ics.asterix.translator.AqlExpressionToPlanTranslator;
-import edu.uci.ics.asterix.translator.CompiledStatements.ICompiledDmlStatement;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.compiler.api.HeuristicCompilerFactoryBuilder;
-import edu.uci.ics.hyracks.algebricks.compiler.api.ICompiler;
-import edu.uci.ics.hyracks.algebricks.compiler.api.ICompilerFactory;
-import edu.uci.ics.hyracks.algebricks.compiler.rewriter.rulecontrollers.SequentialFixpointRuleController;
-import edu.uci.ics.hyracks.algebricks.compiler.rewriter.rulecontrollers.SequentialOnceRuleController;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionEvalSizeComputer;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionTypeComputer;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IMergeAggregationExpressionFactory;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.INullableTypeComputer;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.LogicalExpressionJobGenToExpressionRuntimeProviderAdapter;
-import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.LogicalOperatorPrettyPrintVisitor;
-import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.PlanPlotter;
-import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.PlanPrettyPrinter;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.AbstractRuleController;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.AlgebricksOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IOptimizationContextFactory;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-
-/**
- * Provides helper methods for compilation of a query into a JobSpec and submission
- * to Hyracks through the Hyracks client interface.
- */
-public class APIFramework {
-
-    private static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> buildDefaultLogicalRewrites() {
-        List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> defaultLogicalRewrites = new ArrayList<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>>();
-        SequentialFixpointRuleController seqCtrlNoDfs = new SequentialFixpointRuleController(false);
-        SequentialFixpointRuleController seqCtrlFullDfs = new SequentialFixpointRuleController(true);
-        SequentialOnceRuleController seqOnceCtrl = new SequentialOnceRuleController(true);
-        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
-                RuleCollections.buildInitialTranslationRuleCollection()));
-        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
-                RuleCollections.buildTypeInferenceRuleCollection()));
-        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
-                RuleCollections.buildAutogenerateIDRuleCollection()));
-        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,
-                RuleCollections.buildNormalizationRuleCollection()));
-        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
-                RuleCollections.buildCondPushDownAndJoinInferenceRuleCollection()));
-        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,
-                RuleCollections.buildLoadFieldsRuleCollection()));
-        // fj
-        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,
-                RuleCollections.buildFuzzyJoinRuleCollection()));
-        //
-        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,
-                RuleCollections.buildNormalizationRuleCollection()));
-        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
-                RuleCollections.buildCondPushDownAndJoinInferenceRuleCollection()));
-        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,
-                RuleCollections.buildLoadFieldsRuleCollection()));
-        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
-                RuleCollections.buildDataExchangeRuleCollection()));
-        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
-                RuleCollections.buildConsolidationRuleCollection()));
-        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
-                RuleCollections.buildAccessMethodRuleCollection()));
-        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
-                RuleCollections.buildPlanCleanupRuleCollection()));
-
-        //put TXnRuleCollection!
-        return defaultLogicalRewrites;
-    }
-
-    private static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> buildDefaultPhysicalRewrites() {
-        List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> defaultPhysicalRewrites = new ArrayList<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>>();
-        SequentialOnceRuleController seqOnceCtrl = new SequentialOnceRuleController(true);
-        SequentialOnceRuleController seqOnceTopLevel = new SequentialOnceRuleController(false);
-        defaultPhysicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
-                RuleCollections.buildPhysicalRewritesAllLevelsRuleCollection()));
-        defaultPhysicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceTopLevel,
-                RuleCollections.buildPhysicalRewritesTopLevelRuleCollection()));
-        defaultPhysicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
-                RuleCollections.prepareForJobGenRuleCollection()));
-        return defaultPhysicalRewrites;
-    }
-
-    private static class AqlOptimizationContextFactory implements IOptimizationContextFactory {
-
-        public static final AqlOptimizationContextFactory INSTANCE = new AqlOptimizationContextFactory();
-
-        private AqlOptimizationContextFactory() {
-        }
-
-        @Override
-        public IOptimizationContext createOptimizationContext(int varCounter,
-                IExpressionEvalSizeComputer expressionEvalSizeComputer,
-                IMergeAggregationExpressionFactory mergeAggregationExpressionFactory,
-                IExpressionTypeComputer expressionTypeComputer, INullableTypeComputer nullableTypeComputer,
-                PhysicalOptimizationConfig physicalOptimizationConfig) {
-            return new AlgebricksOptimizationContext(varCounter, expressionEvalSizeComputer,
-                    mergeAggregationExpressionFactory, expressionTypeComputer, nullableTypeComputer,
-                    physicalOptimizationConfig);
-        }
-
-    }
-
-    public static Pair<Query, Integer> reWriteQuery(List<FunctionDecl> declaredFunctions,
-            AqlMetadataProvider metadataProvider, Query q, SessionConfig conf) throws AsterixException {
-
-        if (!conf.is(SessionConfig.FORMAT_ONLY_PHYSICAL_OPS) && conf.is(SessionConfig.OOB_EXPR_TREE)) {
-            conf.out().println();
-
-            if (conf.is(SessionConfig.FORMAT_HTML)) {
-                conf.out().println("<h4>Expression tree:</h4>");
-                conf.out().println("<pre>");
-            } else {
-                conf.out().println("----------Expression tree:");
-            }
-
-            if (q != null) {
-                q.accept(new AQLPrintVisitor(conf.out()), 0);
-            }
-
-            if (conf.is(SessionConfig.FORMAT_HTML)) {
-                conf.out().println("</pre>");
-            }
-        }
-        AqlRewriter rw = new AqlRewriter(declaredFunctions, q, metadataProvider);
-        rw.rewrite();
-        Query rwQ = rw.getExpr();
-        return new Pair(rwQ, rw.getVarCounter());
-    }
-
-    public static JobSpecification compileQuery(List<FunctionDecl> declaredFunctions,
-            AqlMetadataProvider queryMetadataProvider, Query rwQ, int varCounter, String outputDatasetName,
-            SessionConfig conf, ICompiledDmlStatement statement) throws AsterixException, AlgebricksException,
-            JSONException, RemoteException, ACIDException {
-
-        if (!conf.is(SessionConfig.FORMAT_ONLY_PHYSICAL_OPS) && conf.is(SessionConfig.OOB_REWRITTEN_EXPR_TREE)) {
-            conf.out().println();
-
-            if (conf.is(SessionConfig.FORMAT_HTML)) {
-                conf.out().println("<h4>Rewritten expression tree:</h4>");
-                conf.out().println("<pre>");
-            } else {
-                conf.out().println("----------Rewritten expression:");
-            }
-
-            if (rwQ != null) {
-                rwQ.accept(new AQLPrintVisitor(conf.out()), 0);
-            }
-
-            if (conf.is(SessionConfig.FORMAT_HTML)) {
-                conf.out().println("</pre>");
-            }
-        }
-
-        edu.uci.ics.asterix.common.transactions.JobId asterixJobId = JobIdFactory.generateJobId();
-        queryMetadataProvider.setJobId(asterixJobId);
-        AqlExpressionToPlanTranslator t = new AqlExpressionToPlanTranslator(queryMetadataProvider, varCounter,
-                outputDatasetName, statement);
-
-        ILogicalPlan plan;
-        // statement = null when it's a query
-        if (statement == null || statement.getKind() != Kind.LOAD) {
-            plan = t.translate(rwQ);
-        } else {
-            plan = t.translateLoad();
-        }
-
-        LogicalOperatorPrettyPrintVisitor pvisitor = new LogicalOperatorPrettyPrintVisitor();
-        if (!conf.is(SessionConfig.FORMAT_ONLY_PHYSICAL_OPS) && conf.is(SessionConfig.OOB_LOGICAL_PLAN)) {
-            conf.out().println();
-
-            if (conf.is(SessionConfig.FORMAT_HTML)) {
-                conf.out().println("<h4>Logical plan:</h4>");
-                conf.out().println("<pre>");
-            } else {
-                conf.out().println("----------Logical plan:");
-            }
-
-            if (rwQ != null || statement.getKind() == Kind.LOAD) {
-                StringBuilder buffer = new StringBuilder();
-                PlanPrettyPrinter.printPlan(plan, buffer, pvisitor, 0);
-                conf.out().print(buffer);
-            }
-
-            if (conf.is(SessionConfig.FORMAT_HTML)) {
-                conf.out().println("</pre>");
-            }
-        }
-
-        //print the plot for the logical plan
-        AsterixExternalProperties xProps = AsterixAppContextInfo.getInstance().getExternalProperties();
-        Boolean plot = xProps.getIsPlottingEnabled();
-        if (plot) {
-            PlanPlotter.printLogicalPlan(plan);
-        }
-
-        AsterixCompilerProperties compilerProperties = AsterixAppContextInfo.getInstance().getCompilerProperties();
-        int frameSize = compilerProperties.getFrameSize();
-        int sortFrameLimit = (int) (compilerProperties.getSortMemorySize() / frameSize);
-        int groupFrameLimit = (int) (compilerProperties.getGroupMemorySize() / frameSize);
-        int joinFrameLimit = (int) (compilerProperties.getJoinMemorySize() / frameSize);
-        OptimizationConfUtil.getPhysicalOptimizationConfig().setFrameSize(frameSize);
-        OptimizationConfUtil.getPhysicalOptimizationConfig().setMaxFramesExternalSort(sortFrameLimit);
-        OptimizationConfUtil.getPhysicalOptimizationConfig().setMaxFramesExternalGroupBy(groupFrameLimit);
-        OptimizationConfUtil.getPhysicalOptimizationConfig().setMaxFramesHybridHash(joinFrameLimit);
-
-        HeuristicCompilerFactoryBuilder builder = new HeuristicCompilerFactoryBuilder(
-                AqlOptimizationContextFactory.INSTANCE);
-        builder.setPhysicalOptimizationConfig(OptimizationConfUtil.getPhysicalOptimizationConfig());
-        builder.setLogicalRewrites(buildDefaultLogicalRewrites());
-        builder.setPhysicalRewrites(buildDefaultPhysicalRewrites());
-        IDataFormat format = queryMetadataProvider.getFormat();
-        ICompilerFactory compilerFactory = builder.create();
-        builder.setExpressionEvalSizeComputer(format.getExpressionEvalSizeComputer());
-        builder.setIMergeAggregationExpressionFactory(new AqlMergeAggregationExpressionFactory());
-        builder.setPartialAggregationTypeComputer(new AqlPartialAggregationTypeComputer());
-        builder.setExpressionTypeComputer(AqlExpressionTypeComputer.INSTANCE);
-        builder.setNullableTypeComputer(AqlNullableTypeComputer.INSTANCE);
-
-        ICompiler compiler = compilerFactory.createCompiler(plan, queryMetadataProvider, t.getVarCounter());
-        if (conf.isOptimize()) {
-            compiler.optimize();
-            //plot optimized logical plan
-            if (plot)
-                PlanPlotter.printOptimizedLogicalPlan(plan);
-            if (conf.is(SessionConfig.OOB_OPTIMIZED_LOGICAL_PLAN)) {
-                if (conf.is(SessionConfig.FORMAT_ONLY_PHYSICAL_OPS)) {
-                    // For Optimizer tests.
-                    StringBuilder buffer = new StringBuilder();
-                    PlanPrettyPrinter.printPhysicalOps(plan, buffer, 0);
-                    conf.out().print(buffer);
-                } else {
-                    if (conf.is(SessionConfig.FORMAT_HTML)) {
-                        conf.out().println("<h4>Optimized logical plan:</h4>");
-                        conf.out().println("<pre>");
-                    } else {
-                        conf.out().println("----------Optimized logical plan:");
-                    }
-
-                    if (rwQ != null || statement.getKind() == Kind.LOAD) {
-                        StringBuilder buffer = new StringBuilder();
-                        PlanPrettyPrinter.printPlan(plan, buffer, pvisitor, 0);
-                        conf.out().print(buffer);
-                    }
-
-                    if (conf.is(SessionConfig.FORMAT_HTML)) {
-                        conf.out().println("</pre>");
-                    }
-                }
-            }
-        }
-
-        if (!conf.isGenerateJobSpec()) {
-            return null;
-        }
-
-        AlgebricksPartitionConstraint clusterLocs = queryMetadataProvider.getClusterLocations();
-        builder.setBinaryBooleanInspectorFactory(format.getBinaryBooleanInspectorFactory());
-        builder.setBinaryIntegerInspectorFactory(format.getBinaryIntegerInspectorFactory());
-        builder.setClusterLocations(clusterLocs);
-        builder.setComparatorFactoryProvider(format.getBinaryComparatorFactoryProvider());
-        builder.setExpressionRuntimeProvider(new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter(
-                AqlLogicalExpressionJobGen.INSTANCE));
-        builder.setHashFunctionFactoryProvider(format.getBinaryHashFunctionFactoryProvider());
-        builder.setHashFunctionFamilyProvider(format.getBinaryHashFunctionFamilyProvider());
-        builder.setNullWriterFactory(format.getNullWriterFactory());
-        builder.setPredicateEvaluatorFactoryProvider(format.getPredicateEvaluatorFactoryProvider());
-
-        switch (conf.fmt()) {
-            case JSON:
-                builder.setPrinterProvider(format.getJSONPrinterFactoryProvider());
-                break;
-            case CSV:
-                builder.setPrinterProvider(format.getCSVPrinterFactoryProvider());
-                break;
-            case ADM:
-                builder.setPrinterProvider(format.getPrinterFactoryProvider());
-                break;
-            default:
-                throw new RuntimeException("Unexpected OutputFormat!");
-        }
-
-        builder.setSerializerDeserializerProvider(format.getSerdeProvider());
-        builder.setTypeTraitProvider(format.getTypeTraitProvider());
-        builder.setNormalizedKeyComputerFactoryProvider(format.getNormalizedKeyComputerFactoryProvider());
-
-        JobEventListenerFactory jobEventListenerFactory = new JobEventListenerFactory(asterixJobId,
-                queryMetadataProvider.isWriteTransaction());
-        JobSpecification spec = compiler.createJob(AsterixAppContextInfo.getInstance(), jobEventListenerFactory);
-
-        if (conf.is(SessionConfig.OOB_HYRACKS_JOB)) {
-            if (conf.is(SessionConfig.FORMAT_HTML)) {
-                conf.out().println("<h4>Hyracks job:</h4>");
-                conf.out().println("<pre>");
-            } else {
-                conf.out().println("----------Hyracks job:");
-            }
-
-            if (rwQ != null) {
-                conf.out().println(spec.toJSON().toString(1));
-                conf.out().println(spec.getUserConstraints());
-            }
-
-            if (conf.is(SessionConfig.FORMAT_HTML)) {
-                conf.out().println("</pre>");
-            }
-        }
-        return spec;
-    }
-
-    public static void executeJobArray(IHyracksClientConnection hcc, JobSpecification[] specs, PrintWriter out)
-            throws Exception {
-        for (int i = 0; i < specs.length; i++) {
-            specs[i].setMaxReattempts(0);
-            JobId jobId = hcc.startJob(specs[i]);
-            long startTime = System.currentTimeMillis();
-            hcc.waitForCompletion(jobId);
-            long endTime = System.currentTimeMillis();
-            double duration = (endTime - startTime) / 1000.00;
-            out.println("<pre>Duration: " + duration + " sec</pre>");
-        }
-
-    }
-
-    public static void executeJobArray(IHyracksClientConnection hcc, Job[] jobs, PrintWriter out) throws Exception {
-        for (int i = 0; i < jobs.length; i++) {
-            jobs[i].getJobSpec().setMaxReattempts(0);
-            long startTime = System.currentTimeMillis();
-            try {
-                JobId jobId = hcc.startJob(jobs[i].getJobSpec());
-                if (jobs[i].getSubmissionMode() == SubmissionMode.ASYNCHRONOUS) {
-                    continue;
-                }
-                hcc.waitForCompletion(jobId);
-            } catch (Exception e) {
-                e.printStackTrace();
-                continue;
-            }
-            long endTime = System.currentTimeMillis();
-            double duration = (endTime - startTime) / 1000.00;
-            out.println("<pre>Duration: " + duration + " sec</pre>");
-        }
-
-    }
-
-    private static IDataFormat getDataFormat(MetadataTransactionContext mdTxnCtx, String dataverseName)
-            throws AsterixException {
-        Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
-        IDataFormat format;
-        try {
-            format = (IDataFormat) Class.forName(dataverse.getDataFormat()).newInstance();
-        } catch (Exception e) {
-            throw new AsterixException(e);
-        }
-        return format;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixAppRuntimeContext.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixAppRuntimeContext.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixAppRuntimeContext.java
deleted file mode 100644
index 6d7f2a4..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixAppRuntimeContext.java
+++ /dev/null
@@ -1,271 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.common;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.logging.Logger;
-
-import edu.uci.ics.asterix.common.api.AsterixThreadExecutor;
-import edu.uci.ics.asterix.common.api.IAsterixAppRuntimeContext;
-import edu.uci.ics.asterix.common.config.AsterixCompilerProperties;
-import edu.uci.ics.asterix.common.config.AsterixExternalProperties;
-import edu.uci.ics.asterix.common.config.AsterixFeedProperties;
-import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
-import edu.uci.ics.asterix.common.config.AsterixPropertiesAccessor;
-import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
-import edu.uci.ics.asterix.common.config.AsterixTransactionProperties;
-import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
-import edu.uci.ics.asterix.common.context.AsterixFileMapManager;
-import edu.uci.ics.asterix.common.context.DatasetLifecycleManager;
-import edu.uci.ics.asterix.common.exceptions.ACIDException;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.feeds.api.IFeedManager;
-import edu.uci.ics.asterix.common.transactions.IAsterixAppRuntimeContextProvider;
-import edu.uci.ics.asterix.common.transactions.ITransactionSubsystem;
-import edu.uci.ics.asterix.feeds.FeedManager;
-import edu.uci.ics.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
-import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceRepository;
-import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceRepositoryFactory;
-import edu.uci.ics.asterix.transaction.management.service.logging.LogManager;
-import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionSubsystem;
-import edu.uci.ics.hyracks.api.application.INCApplicationContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.api.lifecycle.ILifeCycleComponent;
-import edu.uci.ics.hyracks.api.lifecycle.ILifeCycleComponentManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IVirtualBufferCache;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.AsynchronousScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.PrefixMergePolicyFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
-import edu.uci.ics.hyracks.storage.common.buffercache.DelayPageCleanerPolicy;
-import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IPageCleanerPolicy;
-import edu.uci.ics.hyracks.storage.common.buffercache.IPageReplacementStrategy;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepository;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepositoryFactory;
-import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactory;
-import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactoryProvider;
-
-public class AsterixAppRuntimeContext implements IAsterixAppRuntimeContext, IAsterixPropertiesProvider {
-
-    private static final AsterixPropertiesAccessor ASTERIX_PROPERTIES_ACCESSOR;
-
-    static {
-        try {
-            ASTERIX_PROPERTIES_ACCESSOR = new AsterixPropertiesAccessor();
-        } catch (AsterixException e) {
-            throw new ExceptionInInitializerError(e);
-        }
-    }
-
-    private static final int METADATA_IO_DEVICE_ID = 0;
-
-    private ILSMMergePolicyFactory metadataMergePolicyFactory;
-    private final INCApplicationContext ncApplicationContext;
-
-    private AsterixCompilerProperties compilerProperties;
-    private AsterixExternalProperties externalProperties;
-    private AsterixMetadataProperties metadataProperties;
-    private AsterixStorageProperties storageProperties;
-    private AsterixTransactionProperties txnProperties;
-    private AsterixFeedProperties feedProperties;
-
-
-    private AsterixThreadExecutor threadExecutor;
-    private DatasetLifecycleManager indexLifecycleManager;
-    private IFileMapManager fileMapManager;
-    private IBufferCache bufferCache;
-    private ITransactionSubsystem txnSubsystem;
-
-    private ILSMIOOperationScheduler lsmIOScheduler;
-    private ILocalResourceRepository localResourceRepository;
-    private ResourceIdFactory resourceIdFactory;
-    private IIOManager ioManager;
-    private boolean isShuttingdown;
-
-    private IFeedManager feedManager;
-
-    public AsterixAppRuntimeContext(INCApplicationContext ncApplicationContext) throws AsterixException {
-        this.ncApplicationContext = ncApplicationContext;
-        compilerProperties = new AsterixCompilerProperties(ASTERIX_PROPERTIES_ACCESSOR);
-        externalProperties = new AsterixExternalProperties(ASTERIX_PROPERTIES_ACCESSOR);
-        metadataProperties = new AsterixMetadataProperties(ASTERIX_PROPERTIES_ACCESSOR);
-        storageProperties = new AsterixStorageProperties(ASTERIX_PROPERTIES_ACCESSOR);
-        txnProperties = new AsterixTransactionProperties(ASTERIX_PROPERTIES_ACCESSOR);
-        feedProperties = new AsterixFeedProperties(ASTERIX_PROPERTIES_ACCESSOR);
-    }
-
-    public void initialize() throws IOException, ACIDException, AsterixException {
-        Logger.getLogger("edu.uci.ics").setLevel(externalProperties.getLogLevel());
-
-        threadExecutor = new AsterixThreadExecutor(ncApplicationContext.getThreadFactory());
-        fileMapManager = new AsterixFileMapManager();
-        ICacheMemoryAllocator allocator = new HeapBufferAllocator();
-        IPageCleanerPolicy pcp = new DelayPageCleanerPolicy(600000);
-        ioManager = ncApplicationContext.getRootContext().getIOManager();
-        IPageReplacementStrategy prs = new ClockPageReplacementStrategy(allocator,
-                storageProperties.getBufferCachePageSize(), storageProperties.getBufferCacheNumPages());
-        bufferCache = new BufferCache(ioManager, prs, pcp, fileMapManager,
-                storageProperties.getBufferCacheMaxOpenFiles(), ncApplicationContext.getThreadFactory());
-
-        AsynchronousScheduler.INSTANCE.init(ncApplicationContext.getThreadFactory());
-        lsmIOScheduler = AsynchronousScheduler.INSTANCE;
-
-        metadataMergePolicyFactory = new PrefixMergePolicyFactory();
-
-        ILocalResourceRepositoryFactory persistentLocalResourceRepositoryFactory = new PersistentLocalResourceRepositoryFactory(
-                ioManager);
-        localResourceRepository = (PersistentLocalResourceRepository) persistentLocalResourceRepositoryFactory
-                .createRepository();
-        resourceIdFactory = (new ResourceIdFactoryProvider(localResourceRepository)).createResourceIdFactory();
-
-        IAsterixAppRuntimeContextProvider asterixAppRuntimeContextProvider = new AsterixAppRuntimeContextProdiverForRecovery(
-                this);
-        txnSubsystem = new TransactionSubsystem(ncApplicationContext.getNodeId(), asterixAppRuntimeContextProvider,
-                txnProperties);
-        
-        indexLifecycleManager = new DatasetLifecycleManager(storageProperties, localResourceRepository,
-                MetadataPrimaryIndexes.FIRST_AVAILABLE_USER_DATASET_ID,(LogManager)txnSubsystem.getLogManager());
-        
-        isShuttingdown = false;
-
-        feedManager = new FeedManager(ncApplicationContext.getNodeId(), feedProperties,
-                compilerProperties.getFrameSize());
-
-        // The order of registration is important. The buffer cache must registered before recovery and transaction managers.
-        ILifeCycleComponentManager lccm = ncApplicationContext.getLifeCycleComponentManager();
-        lccm.register((ILifeCycleComponent) bufferCache);
-        lccm.register((ILifeCycleComponent) txnSubsystem.getTransactionManager());
-        lccm.register((ILifeCycleComponent) txnSubsystem.getLogManager());
-        lccm.register((ILifeCycleComponent) indexLifecycleManager);
-        lccm.register((ILifeCycleComponent) txnSubsystem.getLockManager());
-        lccm.register((ILifeCycleComponent) txnSubsystem.getRecoveryManager());
-    }
-
-    public boolean isShuttingdown() {
-        return isShuttingdown;
-    }
-
-    public void setShuttingdown(boolean isShuttingdown) {
-        this.isShuttingdown = isShuttingdown;
-    }
-
-    public void deinitialize() throws HyracksDataException {
-    }
-
-    public IBufferCache getBufferCache() {
-        return bufferCache;
-    }
-
-    public IFileMapProvider getFileMapManager() {
-        return fileMapManager;
-    }
-
-    public ITransactionSubsystem getTransactionSubsystem() {
-        return txnSubsystem;
-    }
-
-    public IIndexLifecycleManager getIndexLifecycleManager() {
-        return indexLifecycleManager;
-    }
-
-    public double getBloomFilterFalsePositiveRate() {
-        return storageProperties.getBloomFilterFalsePositiveRate();
-    }
-
-    public ILSMIOOperationScheduler getLSMIOScheduler() {
-        return lsmIOScheduler;
-    }
-
-    public ILocalResourceRepository getLocalResourceRepository() {
-        return localResourceRepository;
-    }
-
-    public ResourceIdFactory getResourceIdFactory() {
-        return resourceIdFactory;
-    }
-
-    public IIOManager getIOManager() {
-        return ioManager;
-    }
-
-    public int getMetaDataIODeviceId() {
-        return METADATA_IO_DEVICE_ID;
-    }
-
-    @Override
-    public AsterixStorageProperties getStorageProperties() {
-        return storageProperties;
-    }
-
-    @Override
-    public AsterixTransactionProperties getTransactionProperties() {
-        return txnProperties;
-    }
-
-    @Override
-    public AsterixCompilerProperties getCompilerProperties() {
-        return compilerProperties;
-    }
-
-    @Override
-    public AsterixMetadataProperties getMetadataProperties() {
-        return metadataProperties;
-    }
-
-    @Override
-    public AsterixExternalProperties getExternalProperties() {
-        return externalProperties;
-    }
-    
-    @Override
-    public AsterixFeedProperties getFeedProperties() {
-        return feedProperties;
-    }
-
-    @Override
-    public List<IVirtualBufferCache> getVirtualBufferCaches(int datasetID) {
-        return indexLifecycleManager.getVirtualBufferCaches(datasetID);
-    }
-
-    @Override
-    public ILSMOperationTracker getLSMBTreeOperationTracker(int datasetID) {
-        return indexLifecycleManager.getOperationTracker(datasetID);
-    }
-
-    @Override
-    public AsterixThreadExecutor getThreadExecutor() {
-        return threadExecutor;
-    }
-
-    public ILSMMergePolicyFactory getMetadataMergePolicyFactory() {
-        return metadataMergePolicyFactory;
-    }
-
-    @Override
-    public IFeedManager getFeedManager() {
-        return feedManager;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixAppRuntimeContextProdiverForRecovery.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixAppRuntimeContextProdiverForRecovery.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixAppRuntimeContextProdiverForRecovery.java
deleted file mode 100644
index 938a8b9..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/AsterixAppRuntimeContextProdiverForRecovery.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.api.common;
-
-import java.util.List;
-
-import edu.uci.ics.asterix.common.api.AsterixThreadExecutor;
-import edu.uci.ics.asterix.common.api.IAsterixAppRuntimeContext;
-import edu.uci.ics.asterix.common.transactions.IAsterixAppRuntimeContextProvider;
-import edu.uci.ics.asterix.common.transactions.ITransactionSubsystem;
-import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.IVirtualBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepository;
-import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactory;
-
-public class AsterixAppRuntimeContextProdiverForRecovery implements IAsterixAppRuntimeContextProvider {
-
-    private final AsterixAppRuntimeContext asterixAppRuntimeContext;
-
-    public AsterixAppRuntimeContextProdiverForRecovery(AsterixAppRuntimeContext asterixAppRuntimeContext) {
-        this.asterixAppRuntimeContext = asterixAppRuntimeContext;
-    }
-
-    @Override
-    public IBufferCache getBufferCache() {
-        return asterixAppRuntimeContext.getBufferCache();
-    }
-
-    @Override
-    public IFileMapProvider getFileMapManager() {
-        return asterixAppRuntimeContext.getFileMapManager();
-    }
-
-    @Override
-    public ITransactionSubsystem getTransactionSubsystem() {
-        return asterixAppRuntimeContext.getTransactionSubsystem();
-    }
-
-    @Override
-    public IIndexLifecycleManager getIndexLifecycleManager() {
-        return asterixAppRuntimeContext.getIndexLifecycleManager();
-    }
-
-    @Override
-    public double getBloomFilterFalsePositiveRate() {
-        return asterixAppRuntimeContext.getBloomFilterFalsePositiveRate();
-    }
-
-    @Override
-    public ILSMIOOperationScheduler getLSMIOScheduler() {
-        return asterixAppRuntimeContext.getLSMIOScheduler();
-    }
-
-    @Override
-    public ILocalResourceRepository getLocalResourceRepository() {
-        return asterixAppRuntimeContext.getLocalResourceRepository();
-    }
-
-    @Override
-    public ResourceIdFactory getResourceIdFactory() {
-        return asterixAppRuntimeContext.getResourceIdFactory();
-    }
-
-    @Override
-    public IIOManager getIOManager() {
-        return asterixAppRuntimeContext.getIOManager();
-    }
-
-    @Override
-    public List<IVirtualBufferCache> getVirtualBufferCaches(int datasetID) {
-        return asterixAppRuntimeContext.getVirtualBufferCaches(datasetID);
-    }
-
-    @Override
-    public ILSMOperationTracker getLSMBTreeOperationTracker(int datasetID) {
-        return asterixAppRuntimeContext.getLSMBTreeOperationTracker(datasetID);
-    }
-
-    @Override
-    public IAsterixAppRuntimeContext getAppContext() {
-        return asterixAppRuntimeContext;
-    }
-
-    @Override
-    public AsterixThreadExecutor getThreadExecutor() {
-        return asterixAppRuntimeContext.getThreadExecutor();
-    }
-}



[42/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
deleted file mode 100644
index b6d07a3..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
+++ /dev/null
@@ -1,747 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules.am;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
-import edu.uci.ics.asterix.dataflow.data.common.AqlExpressionTypeComputer;
-import edu.uci.ics.asterix.metadata.api.IMetadataEntity;
-import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.AOrderedList;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.types.hierachy.ATypeHierarchy;
-import edu.uci.ics.asterix.optimizer.rules.am.OptimizableOperatorSubTree.DataSourceType;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-/**
- * Class that embodies the commonalities between rewrite rules for access
- * methods.
- */
-public abstract class AbstractIntroduceAccessMethodRule implements IAlgebraicRewriteRule {
-
-    private AqlMetadataProvider metadataProvider;
-
-    public abstract Map<FunctionIdentifier, List<IAccessMethod>> getAccessMethods();
-
-    protected static void registerAccessMethod(IAccessMethod accessMethod,
-            Map<FunctionIdentifier, List<IAccessMethod>> accessMethods) {
-        List<FunctionIdentifier> funcs = accessMethod.getOptimizableFunctions();
-        for (FunctionIdentifier funcIdent : funcs) {
-            List<IAccessMethod> l = accessMethods.get(funcIdent);
-            if (l == null) {
-                l = new ArrayList<IAccessMethod>();
-                accessMethods.put(funcIdent, l);
-            }
-            l.add(accessMethod);
-        }
-    }
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
-        return false;
-    }
-
-    protected void setMetadataDeclarations(IOptimizationContext context) {
-        metadataProvider = (AqlMetadataProvider) context.getMetadataProvider();
-    }
-
-    protected void fillSubTreeIndexExprs(OptimizableOperatorSubTree subTree,
-            Map<IAccessMethod, AccessMethodAnalysisContext> analyzedAMs, IOptimizationContext context)
-            throws AlgebricksException {
-        Iterator<Map.Entry<IAccessMethod, AccessMethodAnalysisContext>> amIt = analyzedAMs.entrySet().iterator();
-        // Check applicability of indexes by access method type.
-        while (amIt.hasNext()) {
-            Map.Entry<IAccessMethod, AccessMethodAnalysisContext> entry = amIt.next();
-            AccessMethodAnalysisContext amCtx = entry.getValue();
-            // For the current access method type, map variables to applicable
-            // indexes.
-            fillAllIndexExprs(subTree, amCtx, context);
-        }
-    }
-
-    protected void pruneIndexCandidates(Map<IAccessMethod, AccessMethodAnalysisContext> analyzedAMs)
-            throws AlgebricksException {
-        Iterator<Map.Entry<IAccessMethod, AccessMethodAnalysisContext>> amIt = analyzedAMs.entrySet().iterator();
-        // Check applicability of indexes by access method type.
-        while (amIt.hasNext()) {
-            Map.Entry<IAccessMethod, AccessMethodAnalysisContext> entry = amIt.next();
-            AccessMethodAnalysisContext amCtx = entry.getValue();
-            pruneIndexCandidates(entry.getKey(), amCtx);
-            // Remove access methods for which there are definitely no
-            // applicable indexes.
-            if (amCtx.indexExprsAndVars.isEmpty()) {
-                amIt.remove();
-            }
-        }
-    }
-
-    /**
-     * Simply picks the first index that it finds. TODO: Improve this decision
-     * process by making it more systematic.
-     */
-    protected Pair<IAccessMethod, Index> chooseIndex(Map<IAccessMethod, AccessMethodAnalysisContext> analyzedAMs) {
-        Iterator<Map.Entry<IAccessMethod, AccessMethodAnalysisContext>> amIt = analyzedAMs.entrySet().iterator();
-        while (amIt.hasNext()) {
-            Map.Entry<IAccessMethod, AccessMethodAnalysisContext> amEntry = amIt.next();
-            AccessMethodAnalysisContext analysisCtx = amEntry.getValue();
-            Iterator<Map.Entry<Index, List<Pair<Integer, Integer>>>> indexIt = analysisCtx.indexExprsAndVars.entrySet()
-                    .iterator();
-            if (indexIt.hasNext()) {
-                Map.Entry<Index, List<Pair<Integer, Integer>>> indexEntry = indexIt.next();
-                // To avoid a case where the chosen access method and a chosen
-                // index type is different.
-                // Allowed Case: [BTreeAccessMethod , IndexType.BTREE],
-                //               [RTreeAccessMethod , IndexType.RTREE],
-                //               [InvertedIndexAccessMethod,
-                //                 IndexType.SINGLE_PARTITION_WORD_INVIX ||
-                //                           SINGLE_PARTITION_NGRAM_INVIX ||
-                //                           LENGTH_PARTITIONED_WORD_INVIX ||
-                //                           LENGTH_PARTITIONED_NGRAM_INVIX]
-                IAccessMethod chosenAccessMethod = amEntry.getKey();
-                Index chosenIndex = indexEntry.getKey();
-                boolean isKeywordOrNgramIndexChosen = false;
-                if (chosenIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX
-                        || chosenIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX
-                        || chosenIndex.getIndexType() == IndexType.SINGLE_PARTITION_WORD_INVIX
-                        || chosenIndex.getIndexType() == IndexType.SINGLE_PARTITION_NGRAM_INVIX)
-                    isKeywordOrNgramIndexChosen = true;
-                if ((chosenAccessMethod == BTreeAccessMethod.INSTANCE && chosenIndex.getIndexType() != IndexType.BTREE)
-                        || (chosenAccessMethod == RTreeAccessMethod.INSTANCE && chosenIndex.getIndexType() != IndexType.RTREE)
-                        || (chosenAccessMethod == InvertedIndexAccessMethod.INSTANCE && !isKeywordOrNgramIndexChosen)) {
-                    continue;
-                }
-                return new Pair<IAccessMethod, Index>(chosenAccessMethod, chosenIndex);
-            }
-        }
-        return null;
-    }
-
-    /**
-     * Removes irrelevant access methods candidates, based on whether the
-     * expressions in the query match those in the index. For example, some
-     * index may require all its expressions to be matched, and some indexes may
-     * only require a match on a prefix of fields to be applicable. This methods
-     * removes all index candidates indexExprs that are definitely not
-     * applicable according to the expressions involved.
-     *
-     * @throws AlgebricksException
-     */
-    public void pruneIndexCandidates(IAccessMethod accessMethod, AccessMethodAnalysisContext analysisCtx)
-            throws AlgebricksException {
-        Iterator<Map.Entry<Index, List<Pair<Integer, Integer>>>> indexExprAndVarIt = analysisCtx.indexExprsAndVars
-                .entrySet().iterator();
-        // Used to keep track of matched expressions (added for prefix search)
-        int numMatchedKeys = 0;
-        ArrayList<Integer> matchedExpressions = new ArrayList<Integer>();
-        while (indexExprAndVarIt.hasNext()) {
-            Map.Entry<Index, List<Pair<Integer, Integer>>> indexExprAndVarEntry = indexExprAndVarIt.next();
-            Index index = indexExprAndVarEntry.getKey();
-            boolean allUsed = true;
-            int lastFieldMatched = -1;
-            boolean foundKeyField = false;
-            matchedExpressions.clear();
-            numMatchedKeys = 0;
-
-            // Remove the candidate if the dataset is a metadata dataset and the index is secondary
-            // TODO: fix the way secondary metadata indexes are implemented and remove this check
-            if (accessMethod.matchPrefixIndexExprs()) {
-                if (index.getDataverseName().equals(MetadataConstants.METADATA_DATAVERSE_NAME)
-                        && !index.isPrimaryIndex()) {
-                    indexExprAndVarIt.remove();
-                    continue;
-                }
-            }
-            for (int i = 0; i < index.getKeyFieldNames().size(); i++) {
-                List<String> keyField = index.getKeyFieldNames().get(i);
-                final IAType keyType = index.getKeyFieldTypes().get(i);
-                Iterator<Pair<Integer, Integer>> exprsAndVarIter = indexExprAndVarEntry.getValue().iterator();
-                while (exprsAndVarIter.hasNext()) {
-                    final Pair<Integer, Integer> exprAndVarIdx = exprsAndVarIter.next();
-                    final IOptimizableFuncExpr optFuncExpr = analysisCtx.matchedFuncExprs.get(exprAndVarIdx.first);
-                    // If expr is not optimizable by concrete index then remove
-                    // expr and continue.
-                    if (!accessMethod.exprIsOptimizable(index, optFuncExpr)) {
-                        exprsAndVarIter.remove();
-                        continue;
-                    }
-                    boolean typeMatch = true;
-                    //Prune indexes based on field types
-                    List<IAType> indexedTypes = new ArrayList<IAType>();
-                    //retrieve types of expressions joined/selected with an indexed field
-                    for (int j = 0; j < optFuncExpr.getNumLogicalVars(); j++)
-                        if (j != exprAndVarIdx.second)
-                            indexedTypes.add(optFuncExpr.getFieldType(j));
-                    //add constants in case of select
-                    if (indexedTypes.size() < 2 && optFuncExpr.getNumLogicalVars() == 1) {
-                        indexedTypes.add((IAType) AqlExpressionTypeComputer.INSTANCE.getType(new ConstantExpression(
-                                optFuncExpr.getConstantVal(0)), null, null));
-                    }
-                    //infer type of logicalExpr based on index keyType
-                    indexedTypes.add((IAType) AqlExpressionTypeComputer.INSTANCE.getType(
-                            optFuncExpr.getLogicalExpr(exprAndVarIdx.second), null, new IVariableTypeEnvironment() {
-
-                                @Override
-                                public Object getVarType(LogicalVariable var) throws AlgebricksException {
-                                    if (var.equals(optFuncExpr.getSourceVar(exprAndVarIdx.second)))
-                                        return keyType;
-                                    throw new IllegalArgumentException();
-                                }
-
-                                @Override
-                                public Object getVarType(LogicalVariable var, List<LogicalVariable> nonNullVariables,
-                                        List<List<LogicalVariable>> correlatedNullableVariableLists)
-                                        throws AlgebricksException {
-                                    if (var.equals(optFuncExpr.getSourceVar(exprAndVarIdx.second)))
-                                        return keyType;
-                                    throw new IllegalArgumentException();
-                                }
-
-                                @Override
-                                public void setVarType(LogicalVariable var, Object type) {
-                                    throw new IllegalArgumentException();
-                                }
-
-                                @Override
-                                public Object getType(ILogicalExpression expr) throws AlgebricksException {
-                                    return AqlExpressionTypeComputer.INSTANCE.getType(expr, null, this);
-                                }
-
-                                @Override
-                                public boolean substituteProducedVariable(LogicalVariable v1, LogicalVariable v2)
-                                        throws AlgebricksException {
-                                    throw new IllegalArgumentException();
-                                }
-                            }));
-
-                    //for the case when jaccard similarity is measured between ordered & unordered lists
-                    boolean jaccardSimilarity = optFuncExpr.getFuncExpr().getFunctionIdentifier().getName()
-                            .startsWith("similarity-jaccard-check");
-
-                    for (int j = 0; j < indexedTypes.size(); j++)
-                        for (int k = j + 1; k < indexedTypes.size(); k++)
-                            typeMatch &= isMatched(indexedTypes.get(j), indexedTypes.get(k), jaccardSimilarity);
-
-                    // Check if any field name in the optFuncExpr matches.
-                    if (optFuncExpr.findFieldName(keyField) != -1) {
-                        foundKeyField = typeMatch
-                                && optFuncExpr.getOperatorSubTree(exprAndVarIdx.second).hasDataSourceScan();
-                        if (foundKeyField) {
-                            matchedExpressions.add(exprAndVarIdx.first);
-                            numMatchedKeys++;
-                            if (lastFieldMatched == i - 1) {
-                                lastFieldMatched = i;
-                            }
-                            break;
-                        }
-                    }
-                }
-                if (!foundKeyField) {
-                    allUsed = false;
-                    // if any expression was matched, remove the non-matched expressions, otherwise the index is unusable
-                    if (lastFieldMatched >= 0) {
-                        exprsAndVarIter = indexExprAndVarEntry.getValue().iterator();
-                        while (exprsAndVarIter.hasNext()) {
-                            if (!matchedExpressions.contains(exprsAndVarIter.next().first)) {
-                                exprsAndVarIter.remove();
-                            }
-                        }
-                    }
-                    break;
-                }
-            }
-            // If the access method requires all exprs to be matched but they
-            // are not, remove this candidate.
-            if (!allUsed && accessMethod.matchAllIndexExprs()) {
-                indexExprAndVarIt.remove();
-                continue;
-            }
-            // A prefix of the index exprs may have been matched.
-            if (accessMethod.matchPrefixIndexExprs()) {
-                if (lastFieldMatched < 0) {
-                    indexExprAndVarIt.remove();
-                    continue;
-                }
-            }
-            analysisCtx.indexNumMatchedKeys.put(index, new Integer(numMatchedKeys));
-        }
-    }
-
-    private boolean isMatched(IAType type1, IAType type2, boolean useListDomain) throws AlgebricksException {
-        if (ATypeHierarchy.isSameTypeDomain(Index.getNonNullableType(type1).first.getTypeTag(),
-                Index.getNonNullableType(type2).first.getTypeTag(), useListDomain))
-            return true;
-        return ATypeHierarchy.canPromote(Index.getNonNullableType(type1).first.getTypeTag(),
-                Index.getNonNullableType(type2).first.getTypeTag());
-    }
-
-    /**
-     * Analyzes the given selection condition, filling analyzedAMs with
-     * applicable access method types. At this point we are not yet consulting
-     * the metadata whether an actual index exists or not.
-     */
-    protected boolean analyzeCondition(ILogicalExpression cond, List<AbstractLogicalOperator> assignsAndUnnests,
-            Map<IAccessMethod, AccessMethodAnalysisContext> analyzedAMs) {
-        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) cond;
-        FunctionIdentifier funcIdent = funcExpr.getFunctionIdentifier();
-        // Don't consider optimizing a disjunctive condition with an index (too
-        // complicated for now).
-        if (funcIdent == AlgebricksBuiltinFunctions.OR) {
-            return false;
-        }
-        boolean found = analyzeFunctionExpr(funcExpr, assignsAndUnnests, analyzedAMs);
-        for (Mutable<ILogicalExpression> arg : funcExpr.getArguments()) {
-            ILogicalExpression argExpr = arg.getValue();
-            if (argExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-                continue;
-            }
-            AbstractFunctionCallExpression argFuncExpr = (AbstractFunctionCallExpression) argExpr;
-            boolean matchFound = analyzeFunctionExpr(argFuncExpr, assignsAndUnnests, analyzedAMs);
-            found = found || matchFound;
-        }
-        return found;
-    }
-
-    /**
-     * Finds applicable access methods for the given function expression based
-     * on the function identifier, and an analysis of the function's arguments.
-     * Updates the analyzedAMs accordingly.
-     */
-    protected boolean analyzeFunctionExpr(AbstractFunctionCallExpression funcExpr,
-            List<AbstractLogicalOperator> assignsAndUnnests, Map<IAccessMethod, AccessMethodAnalysisContext> analyzedAMs) {
-        FunctionIdentifier funcIdent = funcExpr.getFunctionIdentifier();
-        if (funcIdent == AlgebricksBuiltinFunctions.AND) {
-            return false;
-        }
-        // Retrieves the list of access methods that are relevant based on the
-        // funcIdent.
-        List<IAccessMethod> relevantAMs = getAccessMethods().get(funcIdent);
-        if (relevantAMs == null) {
-            return false;
-        }
-        boolean atLeastOneMatchFound = false;
-        // Place holder for a new analysis context in case we need one.
-        AccessMethodAnalysisContext newAnalysisCtx = new AccessMethodAnalysisContext();
-        for (IAccessMethod accessMethod : relevantAMs) {
-            AccessMethodAnalysisContext analysisCtx = analyzedAMs.get(accessMethod);
-            // Use the current place holder.
-            if (analysisCtx == null) {
-                analysisCtx = newAnalysisCtx;
-            }
-            // Analyzes the funcExpr's arguments to see if the accessMethod is
-            // truly applicable.
-            boolean matchFound = accessMethod.analyzeFuncExprArgs(funcExpr, assignsAndUnnests, analysisCtx);
-            if (matchFound) {
-                // If we've used the current new context placeholder, replace it
-                // with a new one.
-                if (analysisCtx == newAnalysisCtx) {
-                    analyzedAMs.put(accessMethod, analysisCtx);
-                    newAnalysisCtx = new AccessMethodAnalysisContext();
-                }
-                atLeastOneMatchFound = true;
-            }
-        }
-        return atLeastOneMatchFound;
-    }
-
-    /**
-     * Finds secondary indexes whose keys include fieldName, and adds a mapping
-     * in analysisCtx.indexEsprs from that index to the a corresponding
-     * optimizable function expression.
-     *
-     * @return true if a candidate index was added to foundIndexExprs, false
-     *         otherwise
-     * @throws AlgebricksException
-     */
-    protected boolean fillIndexExprs(List<Index> datasetIndexes, List<String> fieldName, IAType fieldType,
-            IOptimizableFuncExpr optFuncExpr, int matchedFuncExprIndex, int varIdx,
-            OptimizableOperatorSubTree matchedSubTree, AccessMethodAnalysisContext analysisCtx)
-            throws AlgebricksException {
-        List<Index> indexCandidates = new ArrayList<Index>();
-        // Add an index to the candidates if one of the indexed fields is
-        // fieldName
-        for (Index index : datasetIndexes) {
-            // Need to also verify the index is pending no op
-            if (index.getKeyFieldNames().contains(fieldName) && index.getPendingOp() == IMetadataEntity.PENDING_NO_OP) {
-                indexCandidates.add(index);
-                if (optFuncExpr.getFieldType(varIdx) == BuiltinType.ANULL
-                        || optFuncExpr.getFieldType(varIdx) == BuiltinType.ANY)
-                    optFuncExpr.setFieldType(varIdx,
-                            index.getKeyFieldTypes().get(index.getKeyFieldNames().indexOf(fieldName)));
-                analysisCtx.addIndexExpr(matchedSubTree.dataset, index, matchedFuncExprIndex, varIdx);
-            }
-        }
-        // No index candidates for fieldName.
-        if (indexCandidates.isEmpty()) {
-            return false;
-        }
-        return true;
-    }
-
-    protected void fillAllIndexExprs(OptimizableOperatorSubTree subTree, AccessMethodAnalysisContext analysisCtx,
-            IOptimizationContext context) throws AlgebricksException {
-        int optFuncExprIndex = 0;
-        List<Index> datasetIndexes = new ArrayList<Index>();
-        if (subTree.dataSourceType != DataSourceType.COLLECTION_SCAN)
-            datasetIndexes = metadataProvider.getDatasetIndexes(subTree.dataset.getDataverseName(),
-                    subTree.dataset.getDatasetName());
-        for (IOptimizableFuncExpr optFuncExpr : analysisCtx.matchedFuncExprs) {
-            // Try to match variables from optFuncExpr to assigns or unnests.
-            for (int assignOrUnnestIndex = 0; assignOrUnnestIndex < subTree.assignsAndUnnests.size(); assignOrUnnestIndex++) {
-                AbstractLogicalOperator op = subTree.assignsAndUnnests.get(assignOrUnnestIndex);
-                if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
-                    AssignOperator assignOp = (AssignOperator) op;
-                    List<LogicalVariable> varList = assignOp.getVariables();
-                    for (int varIndex = 0; varIndex < varList.size(); varIndex++) {
-                        LogicalVariable var = varList.get(varIndex);
-                        int optVarIndex = optFuncExpr.findLogicalVar(var);
-                        // No matching var in optFuncExpr.
-                        if (optVarIndex == -1) {
-                            continue;
-                        }
-                        // At this point we have matched the optimizable func
-                        // expr at optFuncExprIndex to an assigned variable.
-                        // Remember matching subtree.
-                        optFuncExpr.setOptimizableSubTree(optVarIndex, subTree);
-                        List<String> fieldName = getFieldNameFromSubTree(optFuncExpr, subTree, assignOrUnnestIndex,
-                                varIndex, subTree.recordType, optVarIndex, optFuncExpr.getFuncExpr().getArguments()
-                                        .get(optVarIndex).getValue());
-                        if (fieldName == null) {
-                            continue;
-                        }
-                        IAType fieldType = (IAType) context.getOutputTypeEnvironment(assignOp).getType(
-                                optFuncExpr.getLogicalExpr(optVarIndex));
-                        // Set the fieldName in the corresponding matched
-                        // function expression.
-                        optFuncExpr.setFieldName(optVarIndex, fieldName);
-                        optFuncExpr.setFieldType(optVarIndex, fieldType);
-
-                        setTypeTag(context, subTree, optFuncExpr, optVarIndex);
-                        if (subTree.hasDataSource()) {
-                            fillIndexExprs(datasetIndexes, fieldName, fieldType, optFuncExpr, optFuncExprIndex,
-                                    optVarIndex, subTree, analysisCtx);
-                        }
-                    }
-                } else {
-                    UnnestOperator unnestOp = (UnnestOperator) op;
-                    LogicalVariable var = unnestOp.getVariable();
-                    int funcVarIndex = optFuncExpr.findLogicalVar(var);
-                    // No matching var in optFuncExpr.
-                    if (funcVarIndex == -1) {
-                        continue;
-                    }
-                    // At this point we have matched the optimizable func expr
-                    // at optFuncExprIndex to an unnest variable.
-                    // Remember matching subtree.
-                    optFuncExpr.setOptimizableSubTree(funcVarIndex, subTree);
-                    List<String> fieldName = null;
-                    if (subTree.dataSourceType == DataSourceType.COLLECTION_SCAN) {
-                        optFuncExpr.setLogicalExpr(funcVarIndex, new VariableReferenceExpression(var));
-                    } else {
-                        fieldName = getFieldNameFromSubTree(optFuncExpr, subTree, assignOrUnnestIndex, 0,
-                                subTree.recordType, funcVarIndex,
-                                optFuncExpr.getFuncExpr().getArguments().get(funcVarIndex).getValue());
-                        if (fieldName == null) {
-                            continue;
-                        }
-                    }
-                    IAType fieldType = (IAType) context.getOutputTypeEnvironment(unnestOp).getType(
-                            optFuncExpr.getLogicalExpr(funcVarIndex));
-                    // Set the fieldName in the corresponding matched function
-                    // expression.
-                    optFuncExpr.setFieldName(funcVarIndex, fieldName);
-                    optFuncExpr.setFieldType(funcVarIndex, fieldType);
-
-                    setTypeTag(context, subTree, optFuncExpr, funcVarIndex);
-                    if (subTree.hasDataSource()) {
-                        fillIndexExprs(datasetIndexes, fieldName, fieldType, optFuncExpr, optFuncExprIndex,
-                                funcVarIndex, subTree, analysisCtx);
-                    }
-                }
-            }
-
-            // Try to match variables from optFuncExpr to datasourcescan if not
-            // already matched in assigns.
-            List<LogicalVariable> dsVarList = subTree.getDataSourceVariables();
-            for (int varIndex = 0; varIndex < dsVarList.size(); varIndex++) {
-                LogicalVariable var = dsVarList.get(varIndex);
-                int funcVarIndex = optFuncExpr.findLogicalVar(var);
-                // No matching var in optFuncExpr.
-                if (funcVarIndex == -1) {
-                    continue;
-                }
-                // The variable value is one of the partitioning fields.
-                List<String> fieldName = DatasetUtils.getPartitioningKeys(subTree.dataset).get(varIndex);
-                IAType fieldType = (IAType) context.getOutputTypeEnvironment(subTree.dataSourceRef.getValue())
-                        .getVarType(var);
-                // Set the fieldName in the corresponding matched function
-                // expression, and remember matching subtree.
-                optFuncExpr.setFieldName(funcVarIndex, fieldName);
-                optFuncExpr.setOptimizableSubTree(funcVarIndex, subTree);
-                optFuncExpr.setSourceVar(funcVarIndex, var);
-                optFuncExpr.setLogicalExpr(funcVarIndex, new VariableReferenceExpression(var));
-                setTypeTag(context, subTree, optFuncExpr, funcVarIndex);
-                if (subTree.hasDataSourceScan()) {
-                    fillIndexExprs(datasetIndexes, fieldName, fieldType, optFuncExpr, optFuncExprIndex, funcVarIndex,
-                            subTree, analysisCtx);
-                }
-            }
-            optFuncExprIndex++;
-        }
-    }
-
-    private void setTypeTag(IOptimizationContext context, OptimizableOperatorSubTree subTree,
-            IOptimizableFuncExpr optFuncExpr, int funcVarIndex) throws AlgebricksException {
-        // Set the typeTag if the type is not null
-        IAType type = (IAType) context.getOutputTypeEnvironment(subTree.root).getVarType(
-                optFuncExpr.getLogicalVar(funcVarIndex));
-        optFuncExpr.setFieldType(funcVarIndex, type);
-    }
-
-    /**
-     * Returns the field name corresponding to the assigned variable at
-     * varIndex. Returns null if the expr at varIndex does not yield to a field
-     * access function after following a set of allowed functions.
-     * 
-     * @throws AlgebricksException
-     */
-    protected List<String> getFieldNameFromSubTree(IOptimizableFuncExpr optFuncExpr,
-            OptimizableOperatorSubTree subTree, int opIndex, int assignVarIndex, ARecordType recordType,
-            int funcVarIndex, ILogicalExpression parentFuncExpr) throws AlgebricksException {
-        // Get expression corresponding to opVar at varIndex.
-        AbstractLogicalExpression expr = null;
-        AbstractFunctionCallExpression childFuncExpr = null;
-        AbstractLogicalOperator op = subTree.assignsAndUnnests.get(opIndex);
-        if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
-            AssignOperator assignOp = (AssignOperator) op;
-            expr = (AbstractLogicalExpression) assignOp.getExpressions().get(assignVarIndex).getValue();
-            childFuncExpr = (AbstractFunctionCallExpression) expr;
-        } else {
-            UnnestOperator unnestOp = (UnnestOperator) op;
-            expr = (AbstractLogicalExpression) unnestOp.getExpressionRef().getValue();
-            if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-                return null;
-            }
-            childFuncExpr = (AbstractFunctionCallExpression) expr;
-            if (childFuncExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.SCAN_COLLECTION) {
-                return null;
-            }
-            expr = (AbstractLogicalExpression) childFuncExpr.getArguments().get(0).getValue();
-        }
-        if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            return null;
-        }
-        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
-        FunctionIdentifier funcIdent = funcExpr.getFunctionIdentifier();
-
-        boolean isByName = false;
-        boolean isFieldAccess = false;
-        String fieldName = null;
-        List<String> nestedAccessFieldName = null;
-        int fieldIndex = -1;
-        if (funcIdent == AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME) {
-            ILogicalExpression nameArg = funcExpr.getArguments().get(1).getValue();
-            if (nameArg.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
-                return null;
-            }
-            ConstantExpression constExpr = (ConstantExpression) nameArg;
-            fieldName = ((AString) ((AsterixConstantValue) constExpr.getValue()).getObject()).getStringValue();
-            isFieldAccess = true;
-            isByName = true;
-        } else if (funcIdent == AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX) {
-            ILogicalExpression idxArg = funcExpr.getArguments().get(1).getValue();
-            if (idxArg.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
-                return null;
-            }
-            ConstantExpression constExpr = (ConstantExpression) idxArg;
-            fieldIndex = ((AInt32) ((AsterixConstantValue) constExpr.getValue()).getObject()).getIntegerValue();
-            isFieldAccess = true;
-        } else if (funcIdent == AsterixBuiltinFunctions.FIELD_ACCESS_NESTED) {
-            ILogicalExpression nameArg = funcExpr.getArguments().get(1).getValue();
-            if (nameArg.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
-                return null;
-            }
-            ConstantExpression constExpr = (ConstantExpression) nameArg;
-            AOrderedList orderedNestedFieldName = (AOrderedList) ((AsterixConstantValue) constExpr.getValue())
-                    .getObject();
-            nestedAccessFieldName = new ArrayList<String>();
-            for (int i = 0; i < orderedNestedFieldName.size(); i++) {
-                nestedAccessFieldName.add(((AString) orderedNestedFieldName.getItem(i)).getStringValue());
-            }
-            isFieldAccess = true;
-            isByName = true;
-        }
-        if (isFieldAccess) {
-            optFuncExpr.setLogicalExpr(funcVarIndex, parentFuncExpr);
-            int[] assignAndExpressionIndexes = null;
-
-            //go forward through nested assigns until you find the relevant one
-            for (int i = opIndex + 1; i < subTree.assignsAndUnnests.size(); i++) {
-                AbstractLogicalOperator subOp = subTree.assignsAndUnnests.get(i);
-                List<LogicalVariable> varList;
-
-                if (subOp.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
-                    //Nested was an assign
-                    varList = ((AssignOperator) subOp).getVariables();
-                } else if (subOp.getOperatorTag() == LogicalOperatorTag.UNNEST) {
-                    //Nested is not an assign
-                    varList = ((UnnestOperator) subOp).getVariables();
-                } else {
-                    break;
-                }
-
-                //Go through variables in assign to check for match
-                for (int varIndex = 0; varIndex < varList.size(); varIndex++) {
-                    LogicalVariable var = varList.get(varIndex);
-                    ArrayList<LogicalVariable> parentVars = new ArrayList<LogicalVariable>();
-                    expr.getUsedVariables(parentVars);
-
-                    if (parentVars.contains(var)) {
-                        //Found the variable we are looking for.
-                        //return assign and index of expression
-                        int[] returnValues = { i, varIndex };
-                        assignAndExpressionIndexes = returnValues;
-                    }
-                }
-            }
-            if (assignAndExpressionIndexes != null && assignAndExpressionIndexes[0] > -1) {
-                //We found the nested assign
-
-                //Recursive call on nested assign
-                List<String> parentFieldNames = getFieldNameFromSubTree(optFuncExpr, subTree,
-                        assignAndExpressionIndexes[0], assignAndExpressionIndexes[1], recordType, funcVarIndex,
-                        parentFuncExpr);
-
-                if (parentFieldNames == null) {
-                    //Nested assign was not a field access. 
-                    //We will not use index
-                    return null;
-                }
-
-                if (!isByName) {
-                    try {
-                        fieldName = ((ARecordType) recordType.getSubFieldType(parentFieldNames)).getFieldNames()[fieldIndex];
-                    } catch (IOException e) {
-                        throw new AlgebricksException(e);
-                    }
-                }
-                optFuncExpr.setSourceVar(funcVarIndex, ((AssignOperator) op).getVariables().get(assignVarIndex));
-                //add fieldName to the nested fieldName, return
-                if (nestedAccessFieldName != null) {
-                    for (int i = 0; i < nestedAccessFieldName.size(); i++) {
-                        parentFieldNames.add(nestedAccessFieldName.get(i));
-                    }
-                } else {
-                    parentFieldNames.add(fieldName);
-                }
-                return (parentFieldNames);
-            }
-
-            optFuncExpr.setSourceVar(funcVarIndex, ((AssignOperator) op).getVariables().get(assignVarIndex));
-            //no nested assign, we are at the lowest level.
-            if (isByName) {
-                if (nestedAccessFieldName != null) {
-                    return nestedAccessFieldName;
-                }
-                return new ArrayList<String>(Arrays.asList(fieldName));
-            }
-            return new ArrayList<String>(Arrays.asList(recordType.getFieldNames()[fieldIndex]));
-
-        }
-
-        if (funcIdent != AsterixBuiltinFunctions.WORD_TOKENS && funcIdent != AsterixBuiltinFunctions.GRAM_TOKENS
-                && funcIdent != AsterixBuiltinFunctions.SUBSTRING
-                && funcIdent != AsterixBuiltinFunctions.SUBSTRING_BEFORE
-                && funcIdent != AsterixBuiltinFunctions.SUBSTRING_AFTER
-                && funcIdent != AsterixBuiltinFunctions.CREATE_POLYGON
-                && funcIdent != AsterixBuiltinFunctions.CREATE_MBR
-                && funcIdent != AsterixBuiltinFunctions.CREATE_RECTANGLE
-                && funcIdent != AsterixBuiltinFunctions.CREATE_CIRCLE
-                && funcIdent != AsterixBuiltinFunctions.CREATE_LINE
-                && funcIdent != AsterixBuiltinFunctions.CREATE_POINT) {
-            return null;
-        }
-        // We use a part of the field in edit distance computation
-        if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE_CHECK) {
-            optFuncExpr.setPartialField(true);
-        }
-        // We expect the function's argument to be a variable, otherwise we
-        // cannot apply an index.
-        ILogicalExpression argExpr = funcExpr.getArguments().get(0).getValue();
-        if (argExpr.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
-            return null;
-        }
-        LogicalVariable curVar = ((VariableReferenceExpression) argExpr).getVariableReference();
-        // We look for the assign or unnest operator that produces curVar below
-        // the current operator
-        for (int assignOrUnnestIndex = opIndex + 1; assignOrUnnestIndex < subTree.assignsAndUnnests.size(); assignOrUnnestIndex++) {
-            AbstractLogicalOperator curOp = subTree.assignsAndUnnests.get(assignOrUnnestIndex);
-            if (curOp.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
-                AssignOperator assignOp = (AssignOperator) curOp;
-                List<LogicalVariable> varList = assignOp.getVariables();
-                for (int varIndex = 0; varIndex < varList.size(); varIndex++) {
-                    LogicalVariable var = varList.get(varIndex);
-                    if (var.equals(curVar)) {
-                        optFuncExpr.setSourceVar(funcVarIndex, var);
-                        return getFieldNameFromSubTree(optFuncExpr, subTree, assignOrUnnestIndex, varIndex, recordType,
-                                funcVarIndex, childFuncExpr);
-                    }
-                }
-            } else {
-                UnnestOperator unnestOp = (UnnestOperator) curOp;
-                LogicalVariable var = unnestOp.getVariable();
-                if (var.equals(curVar)) {
-                    getFieldNameFromSubTree(optFuncExpr, subTree, assignOrUnnestIndex, 0, recordType, funcVarIndex,
-                            childFuncExpr);
-                }
-            }
-        }
-        return null;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodAnalysisContext.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodAnalysisContext.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodAnalysisContext.java
deleted file mode 100644
index cc24912..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodAnalysisContext.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules.am;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-
-/**
- * Context for analyzing the applicability of a single access method.
- */
-public class AccessMethodAnalysisContext {
-
-    public List<IOptimizableFuncExpr> matchedFuncExprs = new ArrayList<IOptimizableFuncExpr>();
-
-    // Contains candidate indexes and a list of (integer,integer) tuples that index into matchedFuncExprs and matched variable inside this expr.
-    // We are mapping from candidate indexes to a list of function expressions 
-    // that match one of the index's expressions.
-    public Map<Index, List<Pair<Integer, Integer>>> indexExprsAndVars = new TreeMap<Index, List<Pair<Integer, Integer>>>();
-
-    // Maps from index to the dataset it is indexing.
-    public Map<Index, Dataset> indexDatasetMap = new TreeMap<Index, Dataset>();
-    
-    // Maps from an index to the number of matched fields in the query plan (for performing prefix search)
-    public Map<Index, Integer> indexNumMatchedKeys = new TreeMap<Index, Integer>();
-
-    // variables for resetting null placeholder for left-outer-join
-    private Mutable<ILogicalOperator> lojGroupbyOpRef = null;
-    private ScalarFunctionCallExpression lojIsNullFuncInGroupBy = null;
-
-    public void addIndexExpr(Dataset dataset, Index index, Integer exprIndex, Integer varIndex) {
-        List<Pair<Integer, Integer>> exprs = indexExprsAndVars.get(index);
-        if (exprs == null) {
-            exprs = new ArrayList<Pair<Integer, Integer>>();
-            indexExprsAndVars.put(index, exprs);
-        }
-        exprs.add(new Pair<Integer, Integer>(exprIndex, varIndex));
-        indexDatasetMap.put(index, dataset);
-    }
-
-    public List<Pair<Integer, Integer>> getIndexExprs(Index index) {
-        return indexExprsAndVars.get(index);
-    }
-
-    public void setLOJGroupbyOpRef(Mutable<ILogicalOperator> opRef) {
-        lojGroupbyOpRef = opRef;
-    }
-
-    public Mutable<ILogicalOperator> getLOJGroupbyOpRef() {
-        return lojGroupbyOpRef;
-    }
-
-    public void setLOJIsNullFuncInGroupBy(ScalarFunctionCallExpression isNullFunc) {
-        lojIsNullFuncInGroupBy = isNullFunc;
-    }
-
-    public ScalarFunctionCallExpression getLOJIsNullFuncInGroupBy() {
-        return lojIsNullFuncInGroupBy;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodJobGenParams.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodJobGenParams.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodJobGenParams.java
deleted file mode 100644
index 31943e4..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodJobGenParams.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules.am;
-
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-
-/**
- * Helper class for reading and writing job-gen parameters for access methods to
- * and from a list of function arguments, typically of an unnest-map.
- */
-public class AccessMethodJobGenParams {
-    protected String indexName;
-    protected IndexType indexType;
-    protected String dataverseName;
-    protected String datasetName;
-    protected boolean retainInput;
-    protected boolean retainNull;
-    protected boolean requiresBroadcast;
-    protected boolean isPrimaryIndex;
-
-    private final int NUM_PARAMS = 7;
-
-    public AccessMethodJobGenParams() {
-    }
-
-    public AccessMethodJobGenParams(String indexName, IndexType indexType, String dataverseName, String datasetName,
-            boolean retainInput, boolean retainNull, boolean requiresBroadcast) {
-        this.indexName = indexName;
-        this.indexType = indexType;
-        this.dataverseName = dataverseName;
-        this.datasetName = datasetName;
-        this.retainInput = retainInput;
-        this.retainNull = retainNull;
-        this.requiresBroadcast = requiresBroadcast;
-        this.isPrimaryIndex = datasetName.equals(indexName);
-    }
-
-    public void writeToFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
-        funcArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils.createStringConstant(indexName)));
-        funcArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils.createInt32Constant(indexType.ordinal())));
-        funcArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils.createStringConstant(dataverseName)));
-        funcArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils.createStringConstant(datasetName)));
-        funcArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils.createBooleanConstant(retainInput)));
-        funcArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils.createBooleanConstant(retainNull)));
-        funcArgs.add(new MutableObject<ILogicalExpression>(AccessMethodUtils.createBooleanConstant(requiresBroadcast)));
-    }
-
-    public void readFromFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
-        indexName = AccessMethodUtils.getStringConstant(funcArgs.get(0));
-        indexType = IndexType.values()[AccessMethodUtils.getInt32Constant(funcArgs.get(1))];
-        dataverseName = AccessMethodUtils.getStringConstant(funcArgs.get(2));
-        datasetName = AccessMethodUtils.getStringConstant(funcArgs.get(3));
-        retainInput = AccessMethodUtils.getBooleanConstant(funcArgs.get(4));
-        retainNull = AccessMethodUtils.getBooleanConstant(funcArgs.get(5));
-        requiresBroadcast = AccessMethodUtils.getBooleanConstant(funcArgs.get(6));
-        isPrimaryIndex = datasetName.equals(indexName);
-    }
-
-    public String getIndexName() {
-        return indexName;
-    }
-
-    public IndexType getIndexType() {
-        return indexType;
-    }
-
-    public String getDataverseName() {
-        return dataverseName;
-    }
-
-    public String getDatasetName() {
-        return datasetName;
-    }
-
-    public boolean getRetainInput() {
-        return retainInput;
-    }
-    
-    public boolean getRetainNull() {
-        return retainNull;
-    }
-
-    public boolean getRequiresBroadcast() {
-        return requiresBroadcast;
-    }
-
-    protected void writeVarList(List<LogicalVariable> varList, List<Mutable<ILogicalExpression>> funcArgs) {
-        Mutable<ILogicalExpression> numKeysRef = new MutableObject<ILogicalExpression>(new ConstantExpression(
-                new AsterixConstantValue(new AInt32(varList.size()))));
-        funcArgs.add(numKeysRef);
-        for (LogicalVariable keyVar : varList) {
-            Mutable<ILogicalExpression> keyVarRef = new MutableObject<ILogicalExpression>(
-                    new VariableReferenceExpression(keyVar));
-            funcArgs.add(keyVarRef);
-        }
-    }
-
-    protected int readVarList(List<Mutable<ILogicalExpression>> funcArgs, int index, List<LogicalVariable> varList) {
-        int numLowKeys = AccessMethodUtils.getInt32Constant(funcArgs.get(index));
-        if (numLowKeys > 0) {
-            for (int i = 0; i < numLowKeys; i++) {
-                LogicalVariable var = ((VariableReferenceExpression) funcArgs.get(index + 1 + i).getValue())
-                        .getVariableReference();
-                varList.add(var);
-            }
-        }
-        return index + numLowKeys + 1;
-    }
-
-    protected int getNumParams() {
-        return NUM_PARAMS;
-    }
-
-    public boolean isPrimaryIndex() {
-        return isPrimaryIndex;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodUtils.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodUtils.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodUtils.java
deleted file mode 100644
index 73377b9..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/AccessMethodUtils.java
+++ /dev/null
@@ -1,609 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.optimizer.rules.am;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.algebra.operators.physical.ExternalDataLookupPOperator;
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.asterix.metadata.external.IndexingConstants;
-import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
-import edu.uci.ics.asterix.om.base.ABoolean;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.base.IAObject;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.types.hierachy.ATypeHierarchy;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IAlgebricksConstantValue;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractDataSourceOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExternalDataLookupOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
-import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
-
-/**
- * Static helper functions for rewriting plans using indexes.
- */
-public class AccessMethodUtils {
-
-    public static void appendPrimaryIndexTypes(Dataset dataset, IAType itemType, List<Object> target)
-            throws IOException, AlgebricksException {
-        ARecordType recordType = (ARecordType) itemType;
-        List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
-        for (List<String> partitioningKey : partitioningKeys) {
-            target.add(recordType.getSubFieldType(partitioningKey));
-        }
-        target.add(itemType);
-    }
-
-    public static ConstantExpression createStringConstant(String str) {
-        return new ConstantExpression(new AsterixConstantValue(new AString(str)));
-    }
-
-    public static ConstantExpression createInt32Constant(int i) {
-        return new ConstantExpression(new AsterixConstantValue(new AInt32(i)));
-    }
-
-    public static ConstantExpression createBooleanConstant(boolean b) {
-        if (b) {
-            return new ConstantExpression(new AsterixConstantValue(ABoolean.TRUE));
-        } else {
-            return new ConstantExpression(new AsterixConstantValue(ABoolean.FALSE));
-        }
-    }
-
-    public static String getStringConstant(Mutable<ILogicalExpression> expr) {
-        IAObject obj = ((AsterixConstantValue) ((ConstantExpression) expr.getValue()).getValue()).getObject();
-        return ((AString) obj).getStringValue();
-    }
-
-    public static int getInt32Constant(Mutable<ILogicalExpression> expr) {
-        IAObject obj = ((AsterixConstantValue) ((ConstantExpression) expr.getValue()).getValue()).getObject();
-        return ((AInt32) obj).getIntegerValue();
-    }
-
-    public static boolean getBooleanConstant(Mutable<ILogicalExpression> expr) {
-        IAObject obj = ((AsterixConstantValue) ((ConstantExpression) expr.getValue()).getValue()).getObject();
-        return ((ABoolean) obj).getBoolean();
-    }
-
-    public static boolean analyzeFuncExprArgsForOneConstAndVar(AbstractFunctionCallExpression funcExpr,
-            AccessMethodAnalysisContext analysisCtx) {
-        IAlgebricksConstantValue constFilterVal = null;
-        LogicalVariable fieldVar = null;
-        ILogicalExpression arg1 = funcExpr.getArguments().get(0).getValue();
-        ILogicalExpression arg2 = funcExpr.getArguments().get(1).getValue();
-        // One of the args must be a constant, and the other arg must be a variable.
-        if (arg1.getExpressionTag() == LogicalExpressionTag.CONSTANT
-                && arg2.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-            // The arguments of contains() function are asymmetrical, we can only use index if it is on the first argument
-            if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.CONTAINS) {
-                return false;
-            }
-            ConstantExpression constExpr = (ConstantExpression) arg1;
-            constFilterVal = constExpr.getValue();
-            VariableReferenceExpression varExpr = (VariableReferenceExpression) arg2;
-            fieldVar = varExpr.getVariableReference();
-        } else if (arg1.getExpressionTag() == LogicalExpressionTag.VARIABLE
-                && arg2.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
-            ConstantExpression constExpr = (ConstantExpression) arg2;
-            constFilterVal = constExpr.getValue();
-            VariableReferenceExpression varExpr = (VariableReferenceExpression) arg1;
-            fieldVar = varExpr.getVariableReference();
-        } else {
-            return false;
-        }
-        OptimizableFuncExpr newOptFuncExpr = new OptimizableFuncExpr(funcExpr, fieldVar, constFilterVal);
-        for (IOptimizableFuncExpr optFuncExpr : analysisCtx.matchedFuncExprs) {
-            //avoid additional optFuncExpressions in case of a join
-            if (optFuncExpr.getFuncExpr().equals(funcExpr))
-                return true;
-        }
-        analysisCtx.matchedFuncExprs.add(newOptFuncExpr);
-        return true;
-    }
-
-    public static boolean analyzeFuncExprArgsForTwoVars(AbstractFunctionCallExpression funcExpr,
-            AccessMethodAnalysisContext analysisCtx) {
-        LogicalVariable fieldVar1 = null;
-        LogicalVariable fieldVar2 = null;
-        ILogicalExpression arg1 = funcExpr.getArguments().get(0).getValue();
-        ILogicalExpression arg2 = funcExpr.getArguments().get(1).getValue();
-        if (arg1.getExpressionTag() == LogicalExpressionTag.VARIABLE
-                && arg2.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-            fieldVar1 = ((VariableReferenceExpression) arg1).getVariableReference();
-            fieldVar2 = ((VariableReferenceExpression) arg2).getVariableReference();
-        } else {
-            return false;
-        }
-        OptimizableFuncExpr newOptFuncExpr = new OptimizableFuncExpr(funcExpr, new LogicalVariable[] { fieldVar1,
-                fieldVar2 }, null);
-        for (IOptimizableFuncExpr optFuncExpr : analysisCtx.matchedFuncExprs) {
-            //avoid additional optFuncExpressions in case of a join
-            if (optFuncExpr.getFuncExpr().equals(funcExpr))
-                return true;
-        }
-        analysisCtx.matchedFuncExprs.add(newOptFuncExpr);
-        return true;
-    }
-
-    public static int getNumSecondaryKeys(Index index, ARecordType recordType) throws AlgebricksException {
-        switch (index.getIndexType()) {
-            case BTREE:
-            case SINGLE_PARTITION_WORD_INVIX:
-            case SINGLE_PARTITION_NGRAM_INVIX:
-            case LENGTH_PARTITIONED_WORD_INVIX:
-            case LENGTH_PARTITIONED_NGRAM_INVIX: {
-                return index.getKeyFieldNames().size();
-            }
-            case RTREE: {
-                Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(0),
-                        index.getKeyFieldNames().get(0), recordType);
-                IAType keyType = keyPairType.first;
-                int numDimensions = NonTaggedFormatUtil.getNumDimensions(keyType.getTypeTag());
-                return numDimensions * 2;
-            }
-            default: {
-                throw new AlgebricksException("Unknown index kind: " + index.getIndexType());
-            }
-        }
-    }
-
-    /**
-     * Appends the types of the fields produced by the given secondary index to dest.
-     */
-    public static void appendSecondaryIndexTypes(Dataset dataset, ARecordType recordType, Index index,
-            boolean primaryKeysOnly, List<Object> dest) throws AlgebricksException {
-        if (!primaryKeysOnly) {
-            switch (index.getIndexType()) {
-                case BTREE:
-                case SINGLE_PARTITION_WORD_INVIX:
-                case SINGLE_PARTITION_NGRAM_INVIX: {
-                    for (int i = 0; i < index.getKeyFieldNames().size(); i++) {
-                        Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes()
-                                .get(i), index.getKeyFieldNames().get(i), recordType);
-                        dest.add(keyPairType.first);
-                    }
-                    break;
-                }
-                case RTREE: {
-                    Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(
-                            index.getKeyFieldTypes().get(0), index.getKeyFieldNames().get(0), recordType);
-                    IAType keyType = keyPairType.first;
-                    IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(keyType.getTypeTag());
-                    int numKeys = getNumSecondaryKeys(index, recordType);
-                    for (int i = 0; i < numKeys; i++) {
-                        dest.add(nestedKeyType);
-                    }
-                    break;
-                }
-                case LENGTH_PARTITIONED_NGRAM_INVIX:
-                case LENGTH_PARTITIONED_WORD_INVIX:
-                default:
-                    break;
-            }
-        }
-        // Primary keys.
-        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
-            //add primary keys
-            try {
-                appendExternalRecPrimaryKeys(dataset, dest);
-            } catch (AsterixException e) {
-                throw new AlgebricksException(e);
-            }
-        } else {
-            List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
-            for (List<String> partitioningKey : partitioningKeys) {
-                try {
-                    dest.add(recordType.getSubFieldType(partitioningKey));
-                } catch (IOException e) {
-                    throw new AlgebricksException(e);
-                }
-            }
-        }
-    }
-
-    public static void appendSecondaryIndexOutputVars(Dataset dataset, ARecordType recordType, Index index,
-            boolean primaryKeysOnly, IOptimizationContext context, List<LogicalVariable> dest)
-            throws AlgebricksException {
-        int numPrimaryKeys = 0;
-        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
-            numPrimaryKeys = IndexingConstants.getRIDSize(dataset);
-        } else {
-            numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
-        }
-        int numSecondaryKeys = getNumSecondaryKeys(index, recordType);
-        int numVars = (primaryKeysOnly) ? numPrimaryKeys : numPrimaryKeys + numSecondaryKeys;
-        for (int i = 0; i < numVars; i++) {
-            dest.add(context.newVar());
-        }
-    }
-
-    public static List<LogicalVariable> getPrimaryKeyVarsFromSecondaryUnnestMap(Dataset dataset,
-            ILogicalOperator unnestMapOp) {
-        int numPrimaryKeys;
-        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
-            numPrimaryKeys = IndexingConstants.getRIDSize(dataset);
-        } else {
-            numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
-        }
-        List<LogicalVariable> primaryKeyVars = new ArrayList<LogicalVariable>();
-        List<LogicalVariable> sourceVars = ((UnnestMapOperator) unnestMapOp).getVariables();
-        // Assumes the primary keys are located at the end.
-        int start = sourceVars.size() - numPrimaryKeys;
-        int stop = sourceVars.size();
-        for (int i = start; i < stop; i++) {
-            primaryKeyVars.add(sourceVars.get(i));
-        }
-        return primaryKeyVars;
-    }
-
-    public static List<LogicalVariable> getPrimaryKeyVarsFromPrimaryUnnestMap(Dataset dataset,
-            ILogicalOperator unnestMapOp) {
-        int numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
-        List<LogicalVariable> primaryKeyVars = new ArrayList<LogicalVariable>();
-        List<LogicalVariable> sourceVars = ((UnnestMapOperator) unnestMapOp).getVariables();
-        // Assumes the primary keys are located at the beginning.
-        for (int i = 0; i < numPrimaryKeys; i++) {
-            primaryKeyVars.add(sourceVars.get(i));
-        }
-        return primaryKeyVars;
-    }
-
-    /**
-     * Returns the search key expression which feeds a secondary-index search. If we are optimizing a selection query then this method returns
-     * the a ConstantExpression from the first constant value in the optimizable function expression.
-     * If we are optimizing a join, then this method returns the VariableReferenceExpression that should feed the secondary index probe.
-     *
-     * @throws AlgebricksException
-     */
-    public static Pair<ILogicalExpression, Boolean> createSearchKeyExpr(IOptimizableFuncExpr optFuncExpr,
-            OptimizableOperatorSubTree indexSubTree, OptimizableOperatorSubTree probeSubTree)
-            throws AlgebricksException {
-        if (probeSubTree == null) {
-            // We are optimizing a selection query. Search key is a constant.
-            // Type Checking and type promotion is done here
-            IAType fieldType = optFuncExpr.getFieldType(0);
-            IAObject constantObj = ((AsterixConstantValue) optFuncExpr.getConstantVal(0)).getObject();
-            ATypeTag constantValueTag = constantObj.getType().getTypeTag();
-            // type casting applied?
-            boolean typeCastingApplied = false;
-            // type casting happened from real (FLOAT, DOUBLE) value -> INT value?
-            boolean realTypeConvertedToIntegerType = false;
-            AsterixConstantValue replacedConstantValue = null;
-
-            // if the constant type and target type does not match, we do a type conversion
-            if (constantValueTag != fieldType.getTypeTag()) {
-                replacedConstantValue = ATypeHierarchy.getAsterixConstantValueFromNumericTypeObject(constantObj,
-                        fieldType.getTypeTag());
-                if (replacedConstantValue != null) {
-                    typeCastingApplied = true;
-                }
-
-                // To check whether the constant is REAL values, and target field is an INT type field.
-                // In this case, we need to change the search parameter. Refer to the caller section for the detail.
-                switch (constantValueTag) {
-                    case DOUBLE:
-                    case FLOAT:
-                        switch (fieldType.getTypeTag()) {
-                            case INT8:
-                            case INT16:
-                            case INT32:
-                            case INT64:
-                                realTypeConvertedToIntegerType = true;
-                                break;
-                            default:
-                                break;
-                        }
-                    default:
-                        break;
-                }
-            }
-
-            if (typeCastingApplied) {
-                return new Pair<ILogicalExpression, Boolean>(new ConstantExpression(replacedConstantValue),
-                        realTypeConvertedToIntegerType);
-            } else {
-                return new Pair<ILogicalExpression, Boolean>(new ConstantExpression(optFuncExpr.getConstantVal(0)),
-                        false);
-            }
-        } else {
-            // We are optimizing a join query. Determine which variable feeds the secondary index.
-            if (optFuncExpr.getOperatorSubTree(0) == null || optFuncExpr.getOperatorSubTree(0) == probeSubTree) {
-                return new Pair<ILogicalExpression, Boolean>(new VariableReferenceExpression(
-                        optFuncExpr.getLogicalVar(0)), false);
-            } else {
-                return new Pair<ILogicalExpression, Boolean>(new VariableReferenceExpression(
-                        optFuncExpr.getLogicalVar(1)), false);
-            }
-        }
-    }
-
-    /**
-     * Returns the first expr optimizable by this index.
-     */
-    public static IOptimizableFuncExpr chooseFirstOptFuncExpr(Index chosenIndex, AccessMethodAnalysisContext analysisCtx) {
-        List<Pair<Integer, Integer>> indexExprs = analysisCtx.getIndexExprs(chosenIndex);
-        int firstExprIndex = indexExprs.get(0).first;
-        return analysisCtx.matchedFuncExprs.get(firstExprIndex);
-    }
-
-    public static int chooseFirstOptFuncVar(Index chosenIndex, AccessMethodAnalysisContext analysisCtx) {
-        List<Pair<Integer, Integer>> indexExprs = analysisCtx.getIndexExprs(chosenIndex);
-        return indexExprs.get(0).second;
-    }
-
-    public static UnnestMapOperator createSecondaryIndexUnnestMap(Dataset dataset, ARecordType recordType, Index index,
-            ILogicalOperator inputOp, AccessMethodJobGenParams jobGenParams, IOptimizationContext context,
-            boolean outputPrimaryKeysOnly, boolean retainInput) throws AlgebricksException {
-        // The job gen parameters are transferred to the actual job gen via the UnnestMapOperator's function arguments.
-        ArrayList<Mutable<ILogicalExpression>> secondaryIndexFuncArgs = new ArrayList<Mutable<ILogicalExpression>>();
-        jobGenParams.writeToFuncArgs(secondaryIndexFuncArgs);
-        // Variables and types coming out of the secondary-index search.
-        List<LogicalVariable> secondaryIndexUnnestVars = new ArrayList<LogicalVariable>();
-        List<Object> secondaryIndexOutputTypes = new ArrayList<Object>();
-        // Append output variables/types generated by the secondary-index search (not forwarded from input).
-        appendSecondaryIndexOutputVars(dataset, recordType, index, outputPrimaryKeysOnly, context,
-                secondaryIndexUnnestVars);
-        appendSecondaryIndexTypes(dataset, recordType, index, outputPrimaryKeysOnly, secondaryIndexOutputTypes);
-        // An index search is expressed as an unnest over an index-search function.
-        IFunctionInfo secondaryIndexSearch = FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.INDEX_SEARCH);
-        UnnestingFunctionCallExpression secondaryIndexSearchFunc = new UnnestingFunctionCallExpression(
-                secondaryIndexSearch, secondaryIndexFuncArgs);
-        secondaryIndexSearchFunc.setReturnsUniqueValues(true);
-        // This is the operator that jobgen will be looking for. It contains an unnest function that has all necessary arguments to determine
-        // which index to use, which variables contain the index-search keys, what is the original dataset, etc.
-        UnnestMapOperator secondaryIndexUnnestOp = new UnnestMapOperator(secondaryIndexUnnestVars,
-                new MutableObject<ILogicalExpression>(secondaryIndexSearchFunc), secondaryIndexOutputTypes, retainInput);
-        secondaryIndexUnnestOp.getInputs().add(new MutableObject<ILogicalOperator>(inputOp));
-        context.computeAndSetTypeEnvironmentForOperator(secondaryIndexUnnestOp);
-        secondaryIndexUnnestOp.setExecutionMode(ExecutionMode.PARTITIONED);
-        return secondaryIndexUnnestOp;
-    }
-
-    public static UnnestMapOperator createPrimaryIndexUnnestMap(AbstractDataSourceOperator dataSourceOp,
-            Dataset dataset, ARecordType recordType, ILogicalOperator inputOp, IOptimizationContext context,
-            boolean sortPrimaryKeys, boolean retainInput, boolean retainNull, boolean requiresBroadcast)
-            throws AlgebricksException {
-        List<LogicalVariable> primaryKeyVars = AccessMethodUtils.getPrimaryKeyVarsFromSecondaryUnnestMap(dataset,
-                inputOp);
-        // Optionally add a sort on the primary-index keys before searching the primary index.
-        OrderOperator order = null;
-        if (sortPrimaryKeys) {
-            order = new OrderOperator();
-            for (LogicalVariable pkVar : primaryKeyVars) {
-                Mutable<ILogicalExpression> vRef = new MutableObject<ILogicalExpression>(
-                        new VariableReferenceExpression(pkVar));
-                order.getOrderExpressions().add(
-                        new Pair<IOrder, Mutable<ILogicalExpression>>(OrderOperator.ASC_ORDER, vRef));
-            }
-            // The secondary-index search feeds into the sort.
-            order.getInputs().add(new MutableObject<ILogicalOperator>(inputOp));
-            order.setExecutionMode(ExecutionMode.LOCAL);
-            context.computeAndSetTypeEnvironmentForOperator(order);
-        }
-        // The job gen parameters are transferred to the actual job gen via the UnnestMapOperator's function arguments.
-        List<Mutable<ILogicalExpression>> primaryIndexFuncArgs = new ArrayList<Mutable<ILogicalExpression>>();
-        BTreeJobGenParams jobGenParams = new BTreeJobGenParams(dataset.getDatasetName(), IndexType.BTREE,
-                dataset.getDataverseName(), dataset.getDatasetName(), retainInput, retainNull, requiresBroadcast);
-        // Set low/high inclusive to true for a point lookup.
-        jobGenParams.setLowKeyInclusive(true);
-        jobGenParams.setHighKeyInclusive(true);
-        jobGenParams.setLowKeyVarList(primaryKeyVars, 0, primaryKeyVars.size());
-        jobGenParams.setHighKeyVarList(primaryKeyVars, 0, primaryKeyVars.size());
-        jobGenParams.setIsEqCondition(true);
-        jobGenParams.writeToFuncArgs(primaryIndexFuncArgs);
-        // Variables and types coming out of the primary-index search.
-        List<LogicalVariable> primaryIndexUnnestVars = new ArrayList<LogicalVariable>();
-        List<Object> primaryIndexOutputTypes = new ArrayList<Object>();
-        // Append output variables/types generated by the primary-index search (not forwarded from input).
-        primaryIndexUnnestVars.addAll(dataSourceOp.getVariables());
-        try {
-            appendPrimaryIndexTypes(dataset, recordType, primaryIndexOutputTypes);
-        } catch (IOException e) {
-            throw new AlgebricksException(e);
-        }
-        // An index search is expressed as an unnest over an index-search function.
-        IFunctionInfo primaryIndexSearch = FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.INDEX_SEARCH);
-        AbstractFunctionCallExpression primaryIndexSearchFunc = new ScalarFunctionCallExpression(primaryIndexSearch,
-                primaryIndexFuncArgs);
-        // This is the operator that jobgen will be looking for. It contains an unnest function that has all necessary arguments to determine
-        // which index to use, which variables contain the index-search keys, what is the original dataset, etc.
-        UnnestMapOperator primaryIndexUnnestOp = new UnnestMapOperator(primaryIndexUnnestVars,
-                new MutableObject<ILogicalExpression>(primaryIndexSearchFunc), primaryIndexOutputTypes, retainInput);
-        // Fed by the order operator or the secondaryIndexUnnestOp.
-        if (sortPrimaryKeys) {
-            primaryIndexUnnestOp.getInputs().add(new MutableObject<ILogicalOperator>(order));
-        } else {
-            primaryIndexUnnestOp.getInputs().add(new MutableObject<ILogicalOperator>(inputOp));
-        }
-        context.computeAndSetTypeEnvironmentForOperator(primaryIndexUnnestOp);
-        primaryIndexUnnestOp.setExecutionMode(ExecutionMode.PARTITIONED);
-        return primaryIndexUnnestOp;
-    }
-
-    public static ScalarFunctionCallExpression findLOJIsNullFuncInGroupBy(GroupByOperator lojGroupbyOp)
-            throws AlgebricksException {
-        //find IS_NULL function of which argument has the nullPlaceholder variable in the nested plan of groupby.
-        ALogicalPlanImpl subPlan = (ALogicalPlanImpl) lojGroupbyOp.getNestedPlans().get(0);
-        Mutable<ILogicalOperator> subPlanRootOpRef = subPlan.getRoots().get(0);
-        AbstractLogicalOperator subPlanRootOp = (AbstractLogicalOperator) subPlanRootOpRef.getValue();
-        boolean foundSelectNonNull = false;
-        ScalarFunctionCallExpression isNullFuncExpr = null;
-        AbstractLogicalOperator inputOp = subPlanRootOp;
-        while (inputOp != null) {
-            if (inputOp.getOperatorTag() == LogicalOperatorTag.SELECT) {
-                SelectOperator selectOp = (SelectOperator) inputOp;
-                if (selectOp.getCondition().getValue().getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                    if (((AbstractFunctionCallExpression) selectOp.getCondition().getValue()).getFunctionIdentifier()
-                            .equals(AlgebricksBuiltinFunctions.NOT)) {
-                        ScalarFunctionCallExpression notFuncExpr = (ScalarFunctionCallExpression) selectOp
-                                .getCondition().getValue();
-                        if (notFuncExpr.getArguments().get(0).getValue().getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                            if (((AbstractFunctionCallExpression) notFuncExpr.getArguments().get(0).getValue())
-                                    .getFunctionIdentifier().equals(AlgebricksBuiltinFunctions.IS_NULL)) {
-                                isNullFuncExpr = (ScalarFunctionCallExpression) notFuncExpr.getArguments().get(0)
-                                        .getValue();
-                                if (isNullFuncExpr.getArguments().get(0).getValue().getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-                                    foundSelectNonNull = true;
-                                    break;
-                                }
-                            }
-                        }
-                    }
-                }
-            }
-            inputOp = inputOp.getInputs().size() > 0 ? (AbstractLogicalOperator) inputOp.getInputs().get(0).getValue()
-                    : null;
-        }
-
-        if (!foundSelectNonNull) {
-            throw new AlgebricksException(
-                    "Could not find the non-null select operator in GroupByOperator for LEFTOUTERJOIN plan optimization.");
-        }
-        return isNullFuncExpr;
-    }
-
-    public static void resetLOJNullPlaceholderVariableInGroupByOp(AccessMethodAnalysisContext analysisCtx,
-            LogicalVariable newNullPlaceholderVaraible, IOptimizationContext context) throws AlgebricksException {
-
-        //reset the null placeholder variable in groupby operator
-        ScalarFunctionCallExpression isNullFuncExpr = analysisCtx.getLOJIsNullFuncInGroupBy();
-        isNullFuncExpr.getArguments().clear();
-        isNullFuncExpr.getArguments().add(
-                new MutableObject<ILogicalExpression>(new VariableReferenceExpression(newNullPlaceholderVaraible)));
-
-        //recompute type environment.
-        OperatorPropertiesUtil.typeOpRec(analysisCtx.getLOJGroupbyOpRef(), context);
-    }
-
-    // New < For external datasets indexing>
-    private static void appendExternalRecTypes(Dataset dataset, IAType itemType, List<Object> target) {
-        target.add(itemType);
-    }
-
-    private static void appendExternalRecPrimaryKeys(Dataset dataset, List<Object> target) throws AsterixException {
-        int numPrimaryKeys = IndexingConstants.getRIDSize(dataset);
-        for (int i = 0; i < numPrimaryKeys; i++) {
-            target.add(IndexingConstants.getFieldType(i));
-        }
-    }
-
-    private static void writeVarList(List<LogicalVariable> varList, List<Mutable<ILogicalExpression>> funcArgs) {
-        Mutable<ILogicalExpression> numKeysRef = new MutableObject<ILogicalExpression>(new ConstantExpression(
-                new AsterixConstantValue(new AInt32(varList.size()))));
-        funcArgs.add(numKeysRef);
-        for (LogicalVariable keyVar : varList) {
-            Mutable<ILogicalExpression> keyVarRef = new MutableObject<ILogicalExpression>(
-                    new VariableReferenceExpression(keyVar));
-            funcArgs.add(keyVarRef);
-        }
-    }
-
-    private static void addStringArg(String argument, List<Mutable<ILogicalExpression>> funcArgs) {
-        Mutable<ILogicalExpression> stringRef = new MutableObject<ILogicalExpression>(new ConstantExpression(
-                new AsterixConstantValue(new AString(argument))));
-        funcArgs.add(stringRef);
-    }
-
-    public static ExternalDataLookupOperator createExternalDataLookupUnnestMap(AbstractDataSourceOperator dataSourceOp,
-            Dataset dataset, ARecordType recordType, ILogicalOperator inputOp, IOptimizationContext context,
-            Index secondaryIndex, boolean retainInput, boolean retainNull) throws AlgebricksException {
-        List<LogicalVariable> primaryKeyVars = AccessMethodUtils.getPrimaryKeyVarsFromSecondaryUnnestMap(dataset,
-                inputOp);
-
-        // add a sort on the RID fields before fetching external data.
-        OrderOperator order = new OrderOperator();
-        for (LogicalVariable pkVar : primaryKeyVars) {
-            Mutable<ILogicalExpression> vRef = new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
-                    pkVar));
-            order.getOrderExpressions().add(
-                    new Pair<IOrder, Mutable<ILogicalExpression>>(OrderOperator.ASC_ORDER, vRef));
-        }
-        // The secondary-index search feeds into the sort.
-        order.getInputs().add(new MutableObject<ILogicalOperator>(inputOp));
-        order.setExecutionMode(ExecutionMode.LOCAL);
-        context.computeAndSetTypeEnvironmentForOperator(order);
-        List<Mutable<ILogicalExpression>> externalRIDAccessFuncArgs = new ArrayList<Mutable<ILogicalExpression>>();
-        //Add dataverse and dataset to the arguments
-        AccessMethodUtils.addStringArg(dataset.getDataverseName(), externalRIDAccessFuncArgs);
-        AccessMethodUtils.addStringArg(dataset.getDatasetName(), externalRIDAccessFuncArgs);
-        AccessMethodUtils.writeVarList(primaryKeyVars, externalRIDAccessFuncArgs);
-
-        // Variables and types coming out of the external access.
-        List<LogicalVariable> externalAccessByRIDVars = new ArrayList<LogicalVariable>();
-        List<Object> externalAccessOutputTypes = new ArrayList<Object>();
-        // Append output variables/types generated by the data scan (not forwarded from input).
-        externalAccessByRIDVars.addAll(dataSourceOp.getVariables());
-        appendExternalRecTypes(dataset, recordType, externalAccessOutputTypes);
-
-        IFunctionInfo externalAccessByRID = FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.EXTERNAL_LOOKUP);
-        AbstractFunctionCallExpression externalAccessFunc = new ScalarFunctionCallExpression(externalAccessByRID,
-                externalRIDAccessFuncArgs);
-
-        ExternalDataLookupOperator externalLookupOp = new ExternalDataLookupOperator(externalAccessByRIDVars,
-                new MutableObject<ILogicalExpression>(externalAccessFunc), externalAccessOutputTypes, retainInput,
-                dataSourceOp.getDataSource());
-        // Fed by the order operator or the secondaryIndexUnnestOp.
-        externalLookupOp.getInputs().add(new MutableObject<ILogicalOperator>(order));
-
-        context.computeAndSetTypeEnvironmentForOperator(externalLookupOp);
-        externalLookupOp.setExecutionMode(ExecutionMode.PARTITIONED);
-
-        //set the physical operator
-        AqlSourceId dataSourceId = new AqlSourceId(dataset.getDataverseName(), dataset.getDatasetName());
-        externalLookupOp.setPhysicalOperator(new ExternalDataLookupPOperator(dataSourceId, dataset, recordType,
-                secondaryIndex, primaryKeyVars, false, retainInput, retainNull));
-        return externalLookupOp;
-    }
-}


[10/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/aql/translator/AqlTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/aql/translator/AqlTranslator.java b/asterix-app/src/main/java/org/apache/asterix/aql/translator/AqlTranslator.java
new file mode 100644
index 0000000..93917cc
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/aql/translator/AqlTranslator.java
@@ -0,0 +1,3069 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.aql.translator;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.PrintWriter;
+import java.rmi.RemoteException;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Properties;
+import java.util.Random;
+import java.util.Set;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.commons.lang3.StringUtils;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import edu.uci.ics.asterix.api.common.APIFramework;
+import edu.uci.ics.asterix.api.common.Job;
+import edu.uci.ics.asterix.api.common.SessionConfig;
+import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
+import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.expression.CompactStatement;
+import edu.uci.ics.asterix.aql.expression.ConnectFeedStatement;
+import edu.uci.ics.asterix.aql.expression.CreateDataverseStatement;
+import edu.uci.ics.asterix.aql.expression.CreateFeedPolicyStatement;
+import edu.uci.ics.asterix.aql.expression.CreateFeedStatement;
+import edu.uci.ics.asterix.aql.expression.CreateFunctionStatement;
+import edu.uci.ics.asterix.aql.expression.CreateIndexStatement;
+import edu.uci.ics.asterix.aql.expression.CreatePrimaryFeedStatement;
+import edu.uci.ics.asterix.aql.expression.CreateSecondaryFeedStatement;
+import edu.uci.ics.asterix.aql.expression.DatasetDecl;
+import edu.uci.ics.asterix.aql.expression.DataverseDecl;
+import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
+import edu.uci.ics.asterix.aql.expression.DeleteStatement;
+import edu.uci.ics.asterix.aql.expression.DisconnectFeedStatement;
+import edu.uci.ics.asterix.aql.expression.DropStatement;
+import edu.uci.ics.asterix.aql.expression.ExternalDetailsDecl;
+import edu.uci.ics.asterix.aql.expression.FeedDropStatement;
+import edu.uci.ics.asterix.aql.expression.FeedPolicyDropStatement;
+import edu.uci.ics.asterix.aql.expression.FunctionDecl;
+import edu.uci.ics.asterix.aql.expression.FunctionDropStatement;
+import edu.uci.ics.asterix.aql.expression.IDatasetDetailsDecl;
+import edu.uci.ics.asterix.aql.expression.Identifier;
+import edu.uci.ics.asterix.aql.expression.IndexDropStatement;
+import edu.uci.ics.asterix.aql.expression.InsertStatement;
+import edu.uci.ics.asterix.aql.expression.InternalDetailsDecl;
+import edu.uci.ics.asterix.aql.expression.LoadStatement;
+import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
+import edu.uci.ics.asterix.aql.expression.NodegroupDecl;
+import edu.uci.ics.asterix.aql.expression.Query;
+import edu.uci.ics.asterix.aql.expression.RefreshExternalDatasetStatement;
+import edu.uci.ics.asterix.aql.expression.RunStatement;
+import edu.uci.ics.asterix.aql.expression.SetStatement;
+import edu.uci.ics.asterix.aql.expression.SubscribeFeedStatement;
+import edu.uci.ics.asterix.aql.expression.TypeDecl;
+import edu.uci.ics.asterix.aql.expression.TypeDropStatement;
+import edu.uci.ics.asterix.aql.expression.TypeExpression;
+import edu.uci.ics.asterix.aql.expression.WriteStatement;
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.common.config.AsterixCompilerProperties;
+import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.config.DatasetConfig.ExternalDatasetTransactionState;
+import edu.uci.ics.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.feeds.FeedActivity.FeedActivityDetails;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionRequest;
+import edu.uci.ics.asterix.common.feeds.FeedId;
+import edu.uci.ics.asterix.common.feeds.FeedJointKey;
+import edu.uci.ics.asterix.common.feeds.FeedPolicyAccessor;
+import edu.uci.ics.asterix.common.feeds.api.IFeedJoint;
+import edu.uci.ics.asterix.common.feeds.api.IFeedJoint.FeedJointType;
+import edu.uci.ics.asterix.common.feeds.api.IFeedLifecycleEventSubscriber;
+import edu.uci.ics.asterix.common.feeds.api.IFeedLifecycleEventSubscriber.FeedLifecycleEvent;
+import edu.uci.ics.asterix.common.feeds.api.IFeedLifecycleListener.ConnectionLocation;
+import edu.uci.ics.asterix.common.functions.FunctionSignature;
+import edu.uci.ics.asterix.feeds.CentralFeedManager;
+import edu.uci.ics.asterix.feeds.FeedJoint;
+import edu.uci.ics.asterix.feeds.FeedLifecycleListener;
+import edu.uci.ics.asterix.file.DatasetOperations;
+import edu.uci.ics.asterix.file.DataverseOperations;
+import edu.uci.ics.asterix.file.ExternalIndexingOperations;
+import edu.uci.ics.asterix.file.FeedOperations;
+import edu.uci.ics.asterix.file.IndexOperations;
+import edu.uci.ics.asterix.formats.nontagged.AqlTypeTraitProvider;
+import edu.uci.ics.asterix.metadata.IDatasetDetails;
+import edu.uci.ics.asterix.metadata.MetadataException;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.api.IMetadataEntity;
+import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
+import edu.uci.ics.asterix.metadata.dataset.hints.DatasetHints;
+import edu.uci.ics.asterix.metadata.dataset.hints.DatasetHints.DatasetNodegroupCardinalityHint;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.CompactionPolicy;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.Datatype;
+import edu.uci.ics.asterix.metadata.entities.Dataverse;
+import edu.uci.ics.asterix.metadata.entities.ExternalDatasetDetails;
+import edu.uci.ics.asterix.metadata.entities.ExternalFile;
+import edu.uci.ics.asterix.metadata.entities.Feed;
+import edu.uci.ics.asterix.metadata.entities.Feed.FeedType;
+import edu.uci.ics.asterix.metadata.entities.FeedPolicy;
+import edu.uci.ics.asterix.metadata.entities.Function;
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
+import edu.uci.ics.asterix.metadata.entities.NodeGroup;
+import edu.uci.ics.asterix.metadata.entities.PrimaryFeed;
+import edu.uci.ics.asterix.metadata.entities.SecondaryFeed;
+import edu.uci.ics.asterix.metadata.feeds.FeedLifecycleEventSubscriber;
+import edu.uci.ics.asterix.metadata.feeds.FeedUtil;
+import edu.uci.ics.asterix.metadata.feeds.IFeedAdapterFactory;
+import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
+import edu.uci.ics.asterix.metadata.utils.ExternalDatasetsRegistry;
+import edu.uci.ics.asterix.metadata.utils.MetadataLockManager;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.types.TypeSignature;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.asterix.optimizer.rules.IntroduceSecondaryIndexInsertDeleteRule;
+import edu.uci.ics.asterix.result.ResultReader;
+import edu.uci.ics.asterix.result.ResultUtils;
+import edu.uci.ics.asterix.runtime.job.listener.JobEventListenerFactory;
+import edu.uci.ics.asterix.runtime.operators.std.FlushDatasetOperatorDescriptor;
+import edu.uci.ics.asterix.transaction.management.service.transaction.DatasetIdFactory;
+import edu.uci.ics.asterix.transaction.management.service.transaction.JobIdFactory;
+import edu.uci.ics.asterix.translator.AbstractAqlTranslator;
+import edu.uci.ics.asterix.translator.CompiledStatements.CompiledConnectFeedStatement;
+import edu.uci.ics.asterix.translator.CompiledStatements.CompiledCreateIndexStatement;
+import edu.uci.ics.asterix.translator.CompiledStatements.CompiledDatasetDropStatement;
+import edu.uci.ics.asterix.translator.CompiledStatements.CompiledDeleteStatement;
+import edu.uci.ics.asterix.translator.CompiledStatements.CompiledIndexCompactStatement;
+import edu.uci.ics.asterix.translator.CompiledStatements.CompiledIndexDropStatement;
+import edu.uci.ics.asterix.translator.CompiledStatements.CompiledInsertStatement;
+import edu.uci.ics.asterix.translator.CompiledStatements.CompiledLoadFromFileStatement;
+import edu.uci.ics.asterix.translator.CompiledStatements.CompiledSubscribeFeedStatement;
+import edu.uci.ics.asterix.translator.CompiledStatements.ICompiledDmlStatement;
+import edu.uci.ics.asterix.translator.TypeTranslator;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression.FunctionKind;
+import edu.uci.ics.hyracks.algebricks.data.IAWriterFactory;
+import edu.uci.ics.hyracks.algebricks.data.IResultSerializerFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
+import edu.uci.ics.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.serializer.ResultSerializerFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.runtime.writers.PrinterBasedWriterFactory;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
+
+/*
+ * Provides functionality for executing a batch of AQL statements (queries included)
+ * sequentially.
+ */
+public class AqlTranslator extends AbstractAqlTranslator {
+
+    private static Logger LOGGER = Logger.getLogger(AqlTranslator.class.getName());
+
+    private enum ProgressState {
+        NO_PROGRESS,
+        ADDED_PENDINGOP_RECORD_TO_METADATA
+    }
+
+    public static enum ResultDelivery {
+        SYNC,
+        ASYNC,
+        ASYNC_DEFERRED
+    }
+
+    public static final boolean IS_DEBUG_MODE = false;//true
+    private final List<Statement> aqlStatements;
+    private final SessionConfig sessionConfig;
+    private Dataverse activeDefaultDataverse;
+    private final List<FunctionDecl> declaredFunctions;
+
+    public AqlTranslator(List<Statement> aqlStatements, SessionConfig conf) throws MetadataException, AsterixException {
+        this.aqlStatements = aqlStatements;
+        this.sessionConfig = conf;
+        declaredFunctions = getDeclaredFunctions(aqlStatements);
+    }
+
+    private List<FunctionDecl> getDeclaredFunctions(List<Statement> statements) {
+        List<FunctionDecl> functionDecls = new ArrayList<FunctionDecl>();
+        for (Statement st : statements) {
+            if (st.getKind().equals(Statement.Kind.FUNCTION_DECL)) {
+                functionDecls.add((FunctionDecl) st);
+            }
+        }
+        return functionDecls;
+    }
+
+    /**
+     * Compiles and submits for execution a list of AQL statements.
+     * 
+     * @param hcc
+     *            A Hyracks client connection that is used to submit a jobspec to Hyracks.
+     * @param hdc
+     *            A Hyracks dataset client object that is used to read the results.
+     * @param resultDelivery
+     *            True if the results should be read asynchronously or false if we should wait for results to be read.
+     * @return A List<QueryResult> containing a QueryResult instance corresponding to each submitted query.
+     * @throws Exception
+     */
+    public void compileAndExecute(IHyracksClientConnection hcc, IHyracksDataset hdc, ResultDelivery resultDelivery)
+            throws Exception {
+        int resultSetIdCounter = 0;
+        FileSplit outputFile = null;
+        IAWriterFactory writerFactory = PrinterBasedWriterFactory.INSTANCE;
+        IResultSerializerFactoryProvider resultSerializerFactoryProvider = ResultSerializerFactoryProvider.INSTANCE;
+        Map<String, String> config = new HashMap<String, String>();
+
+        for (Statement stmt : aqlStatements) {
+            validateOperation(activeDefaultDataverse, stmt);
+            AqlMetadataProvider metadataProvider = new AqlMetadataProvider(activeDefaultDataverse,
+                    CentralFeedManager.getInstance());
+            metadataProvider.setWriterFactory(writerFactory);
+            metadataProvider.setResultSerializerFactoryProvider(resultSerializerFactoryProvider);
+            metadataProvider.setOutputFile(outputFile);
+            metadataProvider.setConfig(config);
+            switch (stmt.getKind()) {
+                case SET: {
+                    handleSetStatement(metadataProvider, stmt, config);
+                    break;
+                }
+                case DATAVERSE_DECL: {
+                    activeDefaultDataverse = handleUseDataverseStatement(metadataProvider, stmt);
+                    break;
+                }
+                case CREATE_DATAVERSE: {
+                    handleCreateDataverseStatement(metadataProvider, stmt);
+                    break;
+                }
+                case DATASET_DECL: {
+                    handleCreateDatasetStatement(metadataProvider, stmt, hcc);
+                    break;
+                }
+                case CREATE_INDEX: {
+                    handleCreateIndexStatement(metadataProvider, stmt, hcc);
+                    break;
+                }
+                case TYPE_DECL: {
+                    handleCreateTypeStatement(metadataProvider, stmt);
+                    break;
+                }
+                case NODEGROUP_DECL: {
+                    handleCreateNodeGroupStatement(metadataProvider, stmt);
+                    break;
+                }
+                case DATAVERSE_DROP: {
+                    handleDataverseDropStatement(metadataProvider, stmt, hcc);
+                    break;
+                }
+                case DATASET_DROP: {
+                    handleDatasetDropStatement(metadataProvider, stmt, hcc);
+                    break;
+                }
+                case INDEX_DROP: {
+                    handleIndexDropStatement(metadataProvider, stmt, hcc);
+                    break;
+                }
+                case TYPE_DROP: {
+                    handleTypeDropStatement(metadataProvider, stmt);
+                    break;
+                }
+                case NODEGROUP_DROP: {
+                    handleNodegroupDropStatement(metadataProvider, stmt);
+                    break;
+                }
+
+                case CREATE_FUNCTION: {
+                    handleCreateFunctionStatement(metadataProvider, stmt);
+                    break;
+                }
+
+                case FUNCTION_DROP: {
+                    handleFunctionDropStatement(metadataProvider, stmt);
+                    break;
+                }
+
+                case LOAD: {
+                    handleLoadStatement(metadataProvider, stmt, hcc);
+                    break;
+                }
+                case INSERT: {
+                    handleInsertStatement(metadataProvider, stmt, hcc);
+                    break;
+                }
+                case DELETE: {
+                    handleDeleteStatement(metadataProvider, stmt, hcc);
+                    break;
+                }
+
+                case CREATE_PRIMARY_FEED:
+                case CREATE_SECONDARY_FEED: {
+                    handleCreateFeedStatement(metadataProvider, stmt, hcc);
+                    break;
+                }
+
+                case DROP_FEED: {
+                    handleDropFeedStatement(metadataProvider, stmt, hcc);
+                    break;
+                }
+
+                case DROP_FEED_POLICY: {
+                    handleDropFeedPolicyStatement(metadataProvider, stmt, hcc);
+                    break;
+                }
+
+                case CONNECT_FEED: {
+                    handleConnectFeedStatement(metadataProvider, stmt, hcc);
+                    break;
+                }
+
+                case DISCONNECT_FEED: {
+                    handleDisconnectFeedStatement(metadataProvider, stmt, hcc);
+                    break;
+                }
+
+                case SUBSCRIBE_FEED: {
+                    handleSubscribeFeedStatement(metadataProvider, stmt, hcc);
+                    break;
+                }
+
+                case CREATE_FEED_POLICY: {
+                    handleCreateFeedPolicyStatement(metadataProvider, stmt, hcc);
+                    break;
+                }
+
+                case QUERY: {
+                    metadataProvider.setResultSetId(new ResultSetId(resultSetIdCounter++));
+                    metadataProvider.setResultAsyncMode(resultDelivery == ResultDelivery.ASYNC
+                            || resultDelivery == ResultDelivery.ASYNC_DEFERRED);
+                    handleQuery(metadataProvider, (Query) stmt, hcc, hdc, resultDelivery);
+                    break;
+                }
+
+                case COMPACT: {
+                    handleCompactStatement(metadataProvider, stmt, hcc);
+                    break;
+                }
+
+                case EXTERNAL_DATASET_REFRESH: {
+                    handleExternalDatasetRefreshStatement(metadataProvider, stmt, hcc);
+                    break;
+                }
+
+                case WRITE: {
+                    Pair<IAWriterFactory, FileSplit> result = handleWriteStatement(metadataProvider, stmt);
+                    if (result.first != null) {
+                        writerFactory = result.first;
+                    }
+                    outputFile = result.second;
+                    break;
+                }
+
+                case RUN: {
+                    handleRunStatement(metadataProvider, stmt, hcc);
+                    break;
+                }
+            }
+        }
+    }
+
+    private void handleSetStatement(AqlMetadataProvider metadataProvider, Statement stmt, Map<String, String> config)
+            throws RemoteException, ACIDException {
+        SetStatement ss = (SetStatement) stmt;
+        String pname = ss.getPropName();
+        String pvalue = ss.getPropValue();
+        config.put(pname, pvalue);
+    }
+
+    private Pair<IAWriterFactory, FileSplit> handleWriteStatement(AqlMetadataProvider metadataProvider, Statement stmt)
+            throws InstantiationException, IllegalAccessException, ClassNotFoundException {
+        WriteStatement ws = (WriteStatement) stmt;
+        File f = new File(ws.getFileName());
+        FileSplit outputFile = new FileSplit(ws.getNcName().getValue(), new FileReference(f));
+        IAWriterFactory writerFactory = null;
+        if (ws.getWriterClassName() != null) {
+            writerFactory = (IAWriterFactory) Class.forName(ws.getWriterClassName()).newInstance();
+        }
+        return new Pair<IAWriterFactory, FileSplit>(writerFactory, outputFile);
+    }
+
+    private Dataverse handleUseDataverseStatement(AqlMetadataProvider metadataProvider, Statement stmt)
+            throws Exception {
+        DataverseDecl dvd = (DataverseDecl) stmt;
+        String dvName = dvd.getDataverseName().getValue();
+        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+        metadataProvider.setMetadataTxnContext(mdTxnCtx);
+        MetadataLockManager.INSTANCE.acquireDataverseReadLock(dvName);
+        try {
+            Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName);
+            if (dv == null) {
+                throw new MetadataException("Unknown dataverse " + dvName);
+            }
+            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+            return dv;
+        } catch (Exception e) {
+            abort(e, e, mdTxnCtx);
+            throw new MetadataException(e);
+        } finally {
+            MetadataLockManager.INSTANCE.releaseDataverseReadLock(dvName);
+        }
+    }
+
+    private void handleCreateDataverseStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
+
+        CreateDataverseStatement stmtCreateDataverse = (CreateDataverseStatement) stmt;
+        String dvName = stmtCreateDataverse.getDataverseName().getValue();
+        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+        metadataProvider.setMetadataTxnContext(mdTxnCtx);
+
+        MetadataLockManager.INSTANCE.acquireDataverseReadLock(dvName);
+        try {
+            Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName);
+            if (dv != null) {
+                if (stmtCreateDataverse.getIfNotExists()) {
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                    return;
+                } else {
+                    throw new AlgebricksException("A dataverse with this name " + dvName + " already exists.");
+                }
+            }
+            MetadataManager.INSTANCE.addDataverse(metadataProvider.getMetadataTxnContext(), new Dataverse(dvName,
+                    stmtCreateDataverse.getFormat(), IMetadataEntity.PENDING_NO_OP));
+            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+        } catch (Exception e) {
+            abort(e, e, mdTxnCtx);
+            throw e;
+        } finally {
+            MetadataLockManager.INSTANCE.releaseDataverseReadLock(dvName);
+        }
+    }
+
+    private void validateCompactionPolicy(String compactionPolicy, Map<String, String> compactionPolicyProperties,
+            MetadataTransactionContext mdTxnCtx, boolean isExternalDataset) throws AsterixException, Exception {
+        CompactionPolicy compactionPolicyEntity = MetadataManager.INSTANCE.getCompactionPolicy(mdTxnCtx,
+                MetadataConstants.METADATA_DATAVERSE_NAME, compactionPolicy);
+        if (compactionPolicyEntity == null) {
+            throw new AsterixException("Unknown compaction policy: " + compactionPolicy);
+        }
+        String compactionPolicyFactoryClassName = compactionPolicyEntity.getClassName();
+        ILSMMergePolicyFactory mergePolicyFactory = (ILSMMergePolicyFactory) Class.forName(
+                compactionPolicyFactoryClassName).newInstance();
+        if (isExternalDataset && mergePolicyFactory.getName().compareTo("correlated-prefix") == 0) {
+            throw new AsterixException("The correlated-prefix merge policy cannot be used with external dataset.");
+        }
+        if (compactionPolicyProperties == null) {
+            if (mergePolicyFactory.getName().compareTo("no-merge") != 0) {
+                throw new AsterixException("Compaction policy properties are missing.");
+            }
+        } else {
+            for (Map.Entry<String, String> entry : compactionPolicyProperties.entrySet()) {
+                if (!mergePolicyFactory.getPropertiesNames().contains(entry.getKey())) {
+                    throw new AsterixException("Invalid compaction policy property: " + entry.getKey());
+                }
+            }
+            for (String p : mergePolicyFactory.getPropertiesNames()) {
+                if (!compactionPolicyProperties.containsKey(p)) {
+                    throw new AsterixException("Missing compaction policy property: " + p);
+                }
+            }
+        }
+    }
+
+    private void handleCreateDatasetStatement(AqlMetadataProvider metadataProvider, Statement stmt,
+            IHyracksClientConnection hcc) throws AsterixException, Exception {
+
+        ProgressState progress = ProgressState.NO_PROGRESS;
+        DatasetDecl dd = (DatasetDecl) stmt;
+        String dataverseName = getActiveDataverse(dd.getDataverse());
+        String datasetName = dd.getName().getValue();
+        DatasetType dsType = dd.getDatasetType();
+        String itemTypeName = dd.getItemTypeName().getValue();
+        Identifier ngNameId = dd.getNodegroupName();
+        String nodegroupName = getNodeGroupName(ngNameId, dd, dataverseName);
+        String compactionPolicy = dd.getCompactionPolicy();
+        Map<String, String> compactionPolicyProperties = dd.getCompactionPolicyProperties();
+        boolean defaultCompactionPolicy = (compactionPolicy == null);
+        boolean temp = dd.getDatasetDetailsDecl().isTemp();
+
+        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+        boolean bActiveTxn = true;
+        metadataProvider.setMetadataTxnContext(mdTxnCtx);
+
+        MetadataLockManager.INSTANCE.createDatasetBegin(dataverseName, dataverseName + "." + itemTypeName,
+                nodegroupName, compactionPolicy, dataverseName + "." + datasetName, defaultCompactionPolicy);
+        Dataset dataset = null;
+        try {
+
+            IDatasetDetails datasetDetails = null;
+            Dataset ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
+                    datasetName);
+            if (ds != null) {
+                if (dd.getIfNotExists()) {
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                    return;
+                } else {
+                    throw new AlgebricksException("A dataset with this name " + datasetName + " already exists.");
+                }
+            }
+            Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), dataverseName,
+                    itemTypeName);
+            if (dt == null) {
+                throw new AlgebricksException(": type " + itemTypeName + " could not be found.");
+            }
+            String ngName = ngNameId != null ? ngNameId.getValue() : configureNodegroupForDataset(dd, dataverseName,
+                    mdTxnCtx);
+
+            if (compactionPolicy == null) {
+                compactionPolicy = GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME;
+                compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
+            } else {
+                validateCompactionPolicy(compactionPolicy, compactionPolicyProperties, mdTxnCtx, false);
+            }
+            switch (dd.getDatasetType()) {
+                case INTERNAL: {
+                    IAType itemType = dt.getDatatype();
+                    if (itemType.getTypeTag() != ATypeTag.RECORD) {
+                        throw new AlgebricksException("Can only partition ARecord's.");
+                    }
+                    List<List<String>> partitioningExprs = ((InternalDetailsDecl) dd.getDatasetDetailsDecl())
+                            .getPartitioningExprs();
+                    boolean autogenerated = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).isAutogenerated();
+                    ARecordType aRecordType = (ARecordType) itemType;
+                    List<IAType> partitioningTypes = aRecordType.validatePartitioningExpressions(partitioningExprs,
+                            autogenerated);
+
+                    List<String> filterField = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getFilterField();
+                    if (filterField != null) {
+                        aRecordType.validateFilterField(filterField);
+                    }
+                    if (compactionPolicy == null) {
+                        if (filterField != null) {
+                            // If the dataset has a filter and the user didn't specify a merge policy, then we will pick the
+                            // correlated-prefix as the default merge policy.
+                            compactionPolicy = GlobalConfig.DEFAULT_FILTERED_DATASET_COMPACTION_POLICY_NAME;
+                            compactionPolicyProperties = GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES;
+                        }
+                    }
+                    datasetDetails = new InternalDatasetDetails(InternalDatasetDetails.FileStructure.BTREE,
+                            InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs, partitioningExprs,
+                            partitioningTypes, autogenerated, filterField, temp);
+                    break;
+                }
+                case EXTERNAL: {
+                    String adapter = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getAdapter();
+                    Map<String, String> properties = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getProperties();
+
+                    datasetDetails = new ExternalDatasetDetails(adapter, properties, new Date(),
+                            ExternalDatasetTransactionState.COMMIT);
+                    break;
+                }
+
+            }
+
+            //#. initialize DatasetIdFactory if it is not initialized.
+            if (!DatasetIdFactory.isInitialized()) {
+                DatasetIdFactory.initialize(MetadataManager.INSTANCE.getMostRecentDatasetId());
+            }
+
+            //#. add a new dataset with PendingAddOp
+            dataset = new Dataset(dataverseName, datasetName, itemTypeName, ngName, compactionPolicy,
+                    compactionPolicyProperties, datasetDetails, dd.getHints(), dsType,
+                    DatasetIdFactory.generateDatasetId(), IMetadataEntity.PENDING_ADD_OP);
+            MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
+
+            if (dd.getDatasetType() == DatasetType.INTERNAL) {
+                Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(),
+                        dataverseName);
+                JobSpecification jobSpec = DatasetOperations.createDatasetJobSpec(dataverse, datasetName,
+                        metadataProvider);
+
+                //#. make metadataTxn commit before calling runJob.
+                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                bActiveTxn = false;
+                progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
+
+                //#. runJob
+                runJob(hcc, jobSpec, true);
+
+                //#. begin new metadataTxn
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                bActiveTxn = true;
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+            }
+
+            //#. add a new dataset with PendingNoOp after deleting the dataset with PendingAddOp
+            MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
+            dataset.setPendingOp(IMetadataEntity.PENDING_NO_OP);
+            MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
+            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+        } catch (Exception e) {
+            if (bActiveTxn) {
+                abort(e, e, mdTxnCtx);
+            }
+
+            if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
+
+                //#. execute compensation operations
+                //   remove the index in NC
+                //   [Notice]
+                //   As long as we updated(and committed) metadata, we should remove any effect of the job
+                //   because an exception occurs during runJob.
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                bActiveTxn = true;
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
+                try {
+                    JobSpecification jobSpec = DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider);
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                    bActiveTxn = false;
+
+                    runJob(hcc, jobSpec, true);
+                } catch (Exception e2) {
+                    e.addSuppressed(e2);
+                    if (bActiveTxn) {
+                        abort(e, e2, mdTxnCtx);
+                    }
+                }
+
+                //   remove the record from the metadata.
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                try {
+                    MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
+                            datasetName);
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                } catch (Exception e2) {
+                    e.addSuppressed(e2);
+                    abort(e, e2, mdTxnCtx);
+                    throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName
+                            + "." + datasetName + ") couldn't be removed from the metadata", e);
+                }
+            }
+
+            throw e;
+        } finally {
+            MetadataLockManager.INSTANCE.createDatasetEnd(dataverseName, dataverseName + "." + itemTypeName,
+                    nodegroupName, compactionPolicy, dataverseName + "." + datasetName, defaultCompactionPolicy);
+        }
+    }
+
+    private void validateIfResourceIsActiveInFeed(String dataverseName, String datasetName) throws AsterixException {
+        List<FeedConnectionId> activeFeedConnections = FeedLifecycleListener.INSTANCE.getActiveFeedConnections(null);
+        boolean resourceInUse = false;
+        StringBuilder builder = new StringBuilder();
+
+        if (activeFeedConnections != null && !activeFeedConnections.isEmpty()) {
+            for (FeedConnectionId connId : activeFeedConnections) {
+                if (connId.getDatasetName().equals(datasetName)) {
+                    resourceInUse = true;
+                    builder.append(connId + "\n");
+                }
+            }
+        }
+
+        if (resourceInUse) {
+            throw new AsterixException("Dataset " + datasetName + " is currently being "
+                    + "fed into by the following feed(s).\n" + builder.toString() + "\n" + "Operation not supported");
+        }
+
+    }
+
+    private String getNodeGroupName(Identifier ngNameId, DatasetDecl dd, String dataverse) {
+        if (ngNameId != null) {
+            return ngNameId.getValue();
+        }
+        String hintValue = dd.getHints().get(DatasetNodegroupCardinalityHint.NAME);
+        if (hintValue == null) {
+            return MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME;
+        } else {
+            return (dataverse + ":" + dd.getName().getValue());
+        }
+    }
+
+    private String configureNodegroupForDataset(DatasetDecl dd, String dataverse, MetadataTransactionContext mdTxnCtx)
+            throws AsterixException {
+        int nodegroupCardinality = -1;
+        String nodegroupName;
+        String hintValue = dd.getHints().get(DatasetNodegroupCardinalityHint.NAME);
+        if (hintValue == null) {
+            nodegroupName = MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME;
+            return nodegroupName;
+        } else {
+            int numChosen = 0;
+            boolean valid = DatasetHints.validate(DatasetNodegroupCardinalityHint.NAME,
+                    dd.getHints().get(DatasetNodegroupCardinalityHint.NAME)).first;
+            if (!valid) {
+                throw new AsterixException("Incorrect use of hint:" + DatasetNodegroupCardinalityHint.NAME);
+            } else {
+                nodegroupCardinality = Integer.parseInt(dd.getHints().get(DatasetNodegroupCardinalityHint.NAME));
+            }
+            Set<String> nodeNames = AsterixAppContextInfo.getInstance().getMetadataProperties().getNodeNames();
+            Set<String> nodeNamesClone = new HashSet<String>();
+            for (String node : nodeNames) {
+                nodeNamesClone.add(node);
+            }
+            String metadataNodeName = AsterixAppContextInfo.getInstance().getMetadataProperties().getMetadataNodeName();
+            List<String> selectedNodes = new ArrayList<String>();
+            selectedNodes.add(metadataNodeName);
+            numChosen++;
+            nodeNamesClone.remove(metadataNodeName);
+
+            if (numChosen < nodegroupCardinality) {
+                Random random = new Random();
+                String[] nodes = nodeNamesClone.toArray(new String[] {});
+                int[] b = new int[nodeNamesClone.size()];
+                for (int i = 0; i < b.length; i++) {
+                    b[i] = i;
+                }
+
+                for (int i = 0; i < nodegroupCardinality - numChosen; i++) {
+                    int selected = i + random.nextInt(nodeNamesClone.size() - i);
+                    int selNodeIndex = b[selected];
+                    selectedNodes.add(nodes[selNodeIndex]);
+                    int temp = b[0];
+                    b[0] = b[selected];
+                    b[selected] = temp;
+                }
+            }
+            nodegroupName = dataverse + ":" + dd.getName().getValue();
+            MetadataManager.INSTANCE.addNodegroup(mdTxnCtx, new NodeGroup(nodegroupName, selectedNodes));
+            return nodegroupName;
+        }
+
+    }
+
+    @SuppressWarnings("unchecked")
+    private void handleCreateIndexStatement(AqlMetadataProvider metadataProvider, Statement stmt,
+            IHyracksClientConnection hcc) throws Exception {
+        ProgressState progress = ProgressState.NO_PROGRESS;
+        CreateIndexStatement stmtCreateIndex = (CreateIndexStatement) stmt;
+        String dataverseName = getActiveDataverse(stmtCreateIndex.getDataverseName());
+        String datasetName = stmtCreateIndex.getDatasetName().getValue();
+
+        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+        boolean bActiveTxn = true;
+        metadataProvider.setMetadataTxnContext(mdTxnCtx);
+
+        MetadataLockManager.INSTANCE.createIndexBegin(dataverseName, dataverseName + "." + datasetName);
+
+        String indexName = null;
+        JobSpecification spec = null;
+        Dataset ds = null;
+        // For external datasets
+        ArrayList<ExternalFile> externalFilesSnapshot = null;
+        boolean firstExternalDatasetIndex = false;
+        boolean filesIndexReplicated = false;
+        Index filesIndex = null;
+        boolean datasetLocked = false;
+        try {
+            ds = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
+                    datasetName);
+            if (ds == null) {
+                throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse "
+                        + dataverseName);
+            }
+
+            indexName = stmtCreateIndex.getIndexName().getValue();
+            Index idx = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
+                    datasetName, indexName);
+
+            String itemTypeName = ds.getItemTypeName();
+            Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), dataverseName,
+                    itemTypeName);
+            IAType itemType = dt.getDatatype();
+            ARecordType aRecordType = (ARecordType) itemType;
+
+            List<List<String>> indexFields = new ArrayList<List<String>>();
+            List<IAType> indexFieldTypes = new ArrayList<IAType>();
+            for (Pair<List<String>, TypeExpression> fieldExpr : stmtCreateIndex.getFieldExprs()) {
+                IAType fieldType = null;
+                boolean isOpen = aRecordType.isOpen();
+                ARecordType subType = aRecordType;
+                int i = 0;
+                if (fieldExpr.first.size() > 1 && !isOpen) {
+                    for (; i < fieldExpr.first.size() - 1;) {
+                        subType = (ARecordType) subType.getFieldType(fieldExpr.first.get(i));
+                        i++;
+                        if (subType.isOpen()) {
+                            isOpen = true;
+                            break;
+                        };
+                    }
+                }
+                if (fieldExpr.second == null) {
+                    fieldType = subType.getSubFieldType(fieldExpr.first.subList(i, fieldExpr.first.size()));
+                } else {
+                    if (!stmtCreateIndex.isEnforced())
+                        throw new AlgebricksException("Cannot create typed index on \"" + fieldExpr.first
+                                + "\" field without enforcing it's type");
+                    if (!isOpen)
+                        throw new AlgebricksException("Typed index on \"" + fieldExpr.first
+                                + "\" field could be created only for open datatype");
+                    Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(mdTxnCtx, fieldExpr.second,
+                            indexName, dataverseName);
+                    TypeSignature typeSignature = new TypeSignature(dataverseName, indexName);
+                    fieldType = typeMap.get(typeSignature);
+                }
+                if (fieldType == null)
+                    throw new AlgebricksException("Unknown type " + fieldExpr.second);
+
+                indexFields.add(fieldExpr.first);
+                indexFieldTypes.add(fieldType);
+            }
+
+            aRecordType.validateKeyFields(indexFields, indexFieldTypes, stmtCreateIndex.getIndexType());
+
+            if (idx != null) {
+                if (stmtCreateIndex.getIfNotExists()) {
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                    return;
+                } else {
+                    throw new AlgebricksException("An index with this name " + indexName + " already exists.");
+                }
+            }
+
+            // Checks whether a user is trying to create an inverted secondary index on a dataset with a variable-length primary key.
+            // Currently, we do not support this. Therefore, as a temporary solution, we print an error message and stop.
+            if (stmtCreateIndex.getIndexType() == IndexType.SINGLE_PARTITION_WORD_INVIX
+                    || stmtCreateIndex.getIndexType() == IndexType.SINGLE_PARTITION_NGRAM_INVIX
+                    || stmtCreateIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX
+                    || stmtCreateIndex.getIndexType() == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX) {
+                List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(ds);
+                for (List<String> partitioningKey : partitioningKeys) {
+                    IAType keyType = aRecordType.getSubFieldType(partitioningKey);
+                    ITypeTraits typeTrait = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
+
+                    // If it is not a fixed length
+                    if (typeTrait.getFixedLength() < 0) {
+                        throw new AlgebricksException("The keyword or ngram index -" + indexName
+                                + " cannot be created on the dataset -" + datasetName
+                                + " due to its variable-length primary key field - " + partitioningKey);
+                    }
+
+                }
+            }
+
+            if (ds.getDatasetType() == DatasetType.INTERNAL) {
+                validateIfResourceIsActiveInFeed(dataverseName, datasetName);
+            } else {
+                // External dataset
+                // Check if the dataset is indexible
+                if (!ExternalIndexingOperations.isIndexible((ExternalDatasetDetails) ds.getDatasetDetails())) {
+                    throw new AlgebricksException("dataset using "
+                            + ((ExternalDatasetDetails) ds.getDatasetDetails()).getAdapter()
+                            + " Adapter can't be indexed");
+                }
+                // check if the name of the index is valid
+                if (!ExternalIndexingOperations.isValidIndexName(datasetName, indexName)) {
+                    throw new AlgebricksException("external dataset index name is invalid");
+                }
+
+                // Check if the files index exist
+                filesIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
+                        datasetName, ExternalIndexingOperations.getFilesIndexName(datasetName));
+                firstExternalDatasetIndex = (filesIndex == null);
+                // lock external dataset
+                ExternalDatasetsRegistry.INSTANCE.buildIndexBegin(ds, firstExternalDatasetIndex);
+                datasetLocked = true;
+                if (firstExternalDatasetIndex) {
+                    // verify that no one has created an index before we acquire the lock
+                    filesIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(),
+                            dataverseName, datasetName, ExternalIndexingOperations.getFilesIndexName(datasetName));
+                    if (filesIndex != null) {
+                        ExternalDatasetsRegistry.INSTANCE.buildIndexEnd(ds, firstExternalDatasetIndex);
+                        firstExternalDatasetIndex = false;
+                        ExternalDatasetsRegistry.INSTANCE.buildIndexBegin(ds, firstExternalDatasetIndex);
+                    }
+                }
+                if (firstExternalDatasetIndex) {
+                    // Get snapshot from External File System
+                    externalFilesSnapshot = ExternalIndexingOperations.getSnapshotFromExternalFileSystem(ds);
+                    // Add an entry for the files index
+                    filesIndex = new Index(dataverseName, datasetName,
+                            ExternalIndexingOperations.getFilesIndexName(datasetName), IndexType.BTREE,
+                            ExternalIndexingOperations.FILE_INDEX_FIELD_NAMES,
+                            ExternalIndexingOperations.FILE_INDEX_FIELD_TYPES, false, false,
+                            IMetadataEntity.PENDING_ADD_OP);
+                    MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), filesIndex);
+                    // Add files to the external files index
+                    for (ExternalFile file : externalFilesSnapshot) {
+                        MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
+                    }
+                    // This is the first index for the external dataset, replicate the files index
+                    spec = ExternalIndexingOperations.buildFilesIndexReplicationJobSpec(ds, externalFilesSnapshot,
+                            metadataProvider, true);
+                    if (spec == null) {
+                        throw new AsterixException(
+                                "Failed to create job spec for replicating Files Index For external dataset");
+                    }
+                    filesIndexReplicated = true;
+                    runJob(hcc, spec, true);
+                }
+            }
+
+            //check whether there exists another enforced index on the same field
+            if (stmtCreateIndex.isEnforced()) {
+                List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(
+                        metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
+                for (Index index : indexes) {
+                    if (index.getKeyFieldNames().equals(indexFields)
+                            && !index.getKeyFieldTypes().equals(indexFieldTypes) && index.isEnforcingKeyFileds())
+                        throw new AsterixException("Cannot create index " + indexName + " , enforced index "
+                                + index.getIndexName() + " on field \"" + StringUtils.join(indexFields, ',')
+                                + "\" already exist");
+                }
+            }
+
+            //#. add a new index with PendingAddOp
+            Index index = new Index(dataverseName, datasetName, indexName, stmtCreateIndex.getIndexType(), indexFields,
+                    indexFieldTypes, stmtCreateIndex.getGramLength(), stmtCreateIndex.isEnforced(), false,
+                    IMetadataEntity.PENDING_ADD_OP);
+            MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
+
+            ARecordType enforcedType = null;
+            if (stmtCreateIndex.isEnforced()) {
+                enforcedType = IntroduceSecondaryIndexInsertDeleteRule.createEnforcedType(aRecordType, index);
+            }
+
+            //#. prepare to create the index artifact in NC.
+            CompiledCreateIndexStatement cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName,
+                    index.getDatasetName(), index.getKeyFieldNames(), index.getKeyFieldTypes(),
+                    index.isEnforcingKeyFileds(), index.getGramLength(), index.getIndexType());
+            spec = IndexOperations.buildSecondaryIndexCreationJobSpec(cis, aRecordType, enforcedType, metadataProvider);
+            if (spec == null) {
+                throw new AsterixException("Failed to create job spec for creating index '"
+                        + stmtCreateIndex.getDatasetName() + "." + stmtCreateIndex.getIndexName() + "'");
+            }
+            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+            bActiveTxn = false;
+
+            progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
+
+            //#. create the index artifact in NC.
+            runJob(hcc, spec, true);
+
+            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+            bActiveTxn = true;
+            metadataProvider.setMetadataTxnContext(mdTxnCtx);
+
+            //#. load data into the index in NC.
+            cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName, index.getDatasetName(),
+                    index.getKeyFieldNames(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(),
+                    index.getGramLength(), index.getIndexType());
+            spec = IndexOperations.buildSecondaryIndexLoadingJobSpec(cis, aRecordType, enforcedType, metadataProvider);
+            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+            bActiveTxn = false;
+
+            runJob(hcc, spec, true);
+
+            //#. begin new metadataTxn
+            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+            bActiveTxn = true;
+            metadataProvider.setMetadataTxnContext(mdTxnCtx);
+
+            //#. add another new index with PendingNoOp after deleting the index with PendingAddOp
+            MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName,
+                    indexName);
+            index.setPendingOp(IMetadataEntity.PENDING_NO_OP);
+            MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
+            // add another new files index with PendingNoOp after deleting the index with PendingAddOp
+            if (firstExternalDatasetIndex) {
+                MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
+                        datasetName, filesIndex.getIndexName());
+                filesIndex.setPendingOp(IMetadataEntity.PENDING_NO_OP);
+                MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), filesIndex);
+                // update transaction timestamp
+                ((ExternalDatasetDetails) ds.getDatasetDetails()).setRefreshTimestamp(new Date());
+                MetadataManager.INSTANCE.updateDataset(mdTxnCtx, ds);
+            }
+            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+
+        } catch (Exception e) {
+            if (bActiveTxn) {
+                abort(e, e, mdTxnCtx);
+            }
+            // If files index was replicated for external dataset, it should be cleaned up on NC side
+            if (filesIndexReplicated) {
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                bActiveTxn = true;
+                CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
+                        ExternalIndexingOperations.getFilesIndexName(datasetName));
+                try {
+                    JobSpecification jobSpec = ExternalIndexingOperations.buildDropFilesIndexJobSpec(cds,
+                            metadataProvider, ds);
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                    bActiveTxn = false;
+                    runJob(hcc, jobSpec, true);
+                } catch (Exception e2) {
+                    e.addSuppressed(e2);
+                    if (bActiveTxn) {
+                        abort(e, e2, mdTxnCtx);
+                    }
+                }
+            }
+
+            if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
+                //#. execute compensation operations
+                //   remove the index in NC
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                bActiveTxn = true;
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName, indexName);
+                try {
+                    JobSpecification jobSpec = IndexOperations
+                            .buildDropSecondaryIndexJobSpec(cds, metadataProvider, ds);
+
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                    bActiveTxn = false;
+                    runJob(hcc, jobSpec, true);
+                } catch (Exception e2) {
+                    e.addSuppressed(e2);
+                    if (bActiveTxn) {
+                        abort(e, e2, mdTxnCtx);
+                    }
+                }
+
+                if (firstExternalDatasetIndex) {
+                    mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                    metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                    try {
+                        // Drop External Files from metadata
+                        MetadataManager.INSTANCE.dropDatasetExternalFiles(mdTxnCtx, ds);
+                        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                    } catch (Exception e2) {
+                        e.addSuppressed(e2);
+                        abort(e, e2, mdTxnCtx);
+                        throw new IllegalStateException("System is inconsistent state: pending files for("
+                                + dataverseName + "." + datasetName + ") couldn't be removed from the metadata", e);
+                    }
+                    mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                    metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                    try {
+                        // Drop the files index from metadata
+                        MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
+                                datasetName, ExternalIndexingOperations.getFilesIndexName(datasetName));
+                        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                    } catch (Exception e2) {
+                        e.addSuppressed(e2);
+                        abort(e, e2, mdTxnCtx);
+                        throw new IllegalStateException("System is inconsistent state: pending index(" + dataverseName
+                                + "." + datasetName + "." + ExternalIndexingOperations.getFilesIndexName(datasetName)
+                                + ") couldn't be removed from the metadata", e);
+                    }
+                }
+                // remove the record from the metadata.
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                try {
+                    MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
+                            datasetName, indexName);
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                } catch (Exception e2) {
+                    e.addSuppressed(e2);
+                    abort(e, e2, mdTxnCtx);
+                    throw new IllegalStateException("System is in inconsistent state: pending index(" + dataverseName
+                            + "." + datasetName + "." + indexName + ") couldn't be removed from the metadata", e);
+                }
+            }
+            throw e;
+        } finally {
+            MetadataLockManager.INSTANCE.createIndexEnd(dataverseName, dataverseName + "." + datasetName);
+            if (datasetLocked) {
+                ExternalDatasetsRegistry.INSTANCE.buildIndexEnd(ds, firstExternalDatasetIndex);
+            }
+        }
+    }
+
+    private void handleCreateTypeStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
+        TypeDecl stmtCreateType = (TypeDecl) stmt;
+        String dataverseName = getActiveDataverse(stmtCreateType.getDataverseName());
+        String typeName = stmtCreateType.getIdent().getValue();
+        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+        metadataProvider.setMetadataTxnContext(mdTxnCtx);
+        MetadataLockManager.INSTANCE.createTypeBegin(dataverseName, dataverseName + "." + typeName);
+        try {
+
+            Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
+            if (dv == null) {
+                throw new AlgebricksException("Unknown dataverse " + dataverseName);
+            }
+            Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, typeName);
+            if (dt != null) {
+                if (!stmtCreateType.getIfNotExists()) {
+                    throw new AlgebricksException("A datatype with this name " + typeName + " already exists.");
+                }
+            } else {
+                if (builtinTypeMap.get(typeName) != null) {
+                    throw new AlgebricksException("Cannot redefine builtin type " + typeName + ".");
+                } else {
+                    Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(mdTxnCtx,
+                            stmtCreateType.getTypeDef(), stmtCreateType.getIdent().getValue(), dataverseName);
+                    TypeSignature typeSignature = new TypeSignature(dataverseName, typeName);
+                    IAType type = typeMap.get(typeSignature);
+                    MetadataManager.INSTANCE.addDatatype(mdTxnCtx, new Datatype(dataverseName, typeName, type, false));
+                }
+            }
+            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+        } catch (Exception e) {
+            abort(e, e, mdTxnCtx);
+            throw e;
+        } finally {
+            MetadataLockManager.INSTANCE.createTypeEnd(dataverseName, dataverseName + "." + typeName);
+        }
+    }
+
+    private void handleDataverseDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
+            IHyracksClientConnection hcc) throws Exception {
+        DataverseDropStatement stmtDelete = (DataverseDropStatement) stmt;
+        String dataverseName = stmtDelete.getDataverseName().getValue();
+
+        ProgressState progress = ProgressState.NO_PROGRESS;
+        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+        boolean bActiveTxn = true;
+        metadataProvider.setMetadataTxnContext(mdTxnCtx);
+        MetadataLockManager.INSTANCE.acquireDataverseWriteLock(dataverseName);
+        List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
+        try {
+
+            Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
+            if (dv == null) {
+                if (stmtDelete.getIfExists()) {
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                    return;
+                } else {
+                    throw new AlgebricksException("There is no dataverse with this name " + dataverseName + ".");
+                }
+            }
+
+            //# disconnect all feeds from any datasets in the dataverse.
+            List<FeedConnectionId> activeFeedConnections = FeedLifecycleListener.INSTANCE
+                    .getActiveFeedConnections(null);
+            DisconnectFeedStatement disStmt = null;
+            Identifier dvId = new Identifier(dataverseName);
+            for (FeedConnectionId connection : activeFeedConnections) {
+                FeedId feedId = connection.getFeedId();
+                if (feedId.getDataverse().equals(dataverseName)) {
+                    disStmt = new DisconnectFeedStatement(dvId, new Identifier(feedId.getFeedName()), new Identifier(
+                            connection.getDatasetName()));
+                    try {
+                        handleDisconnectFeedStatement(metadataProvider, disStmt, hcc);
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info("Disconnected feed " + feedId.getFeedName() + " from dataset "
+                                    + connection.getDatasetName());
+                        }
+                    } catch (Exception exception) {
+                        if (LOGGER.isLoggable(Level.WARNING)) {
+                            LOGGER.warning("Unable to disconnect feed " + feedId.getFeedName() + " from dataset "
+                                    + connection.getDatasetName() + ". Encountered exception " + exception);
+                        }
+                    }
+                }
+            }
+
+            //#. prepare jobs which will drop corresponding datasets with indexes.
+            List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverseName);
+            for (int j = 0; j < datasets.size(); j++) {
+                String datasetName = datasets.get(j).getDatasetName();
+                DatasetType dsType = datasets.get(j).getDatasetType();
+                if (dsType == DatasetType.INTERNAL) {
+                    List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName,
+                            datasetName);
+                    for (int k = 0; k < indexes.size(); k++) {
+                        if (indexes.get(k).isSecondaryIndex()) {
+                            CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
+                                    indexes.get(k).getIndexName());
+                            jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider,
+                                    datasets.get(j)));
+                        }
+                    }
+
+                    CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
+                    jobsToExecute.add(DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider));
+                } else {
+                    // External dataset
+                    List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName,
+                            datasetName);
+                    for (int k = 0; k < indexes.size(); k++) {
+                        if (ExternalIndexingOperations.isFileIndex(indexes.get(k))) {
+                            CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
+                                    indexes.get(k).getIndexName());
+                            jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(cds,
+                                    metadataProvider, datasets.get(j)));
+                        } else {
+                            CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
+                                    indexes.get(k).getIndexName());
+                            jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider,
+                                    datasets.get(j)));
+                        }
+                    }
+                    ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(datasets.get(j));
+                }
+            }
+            jobsToExecute.add(DataverseOperations.createDropDataverseJobSpec(dv, metadataProvider));
+
+            //#. mark PendingDropOp on the dataverse record by
+            //   first, deleting the dataverse record from the DATAVERSE_DATASET
+            //   second, inserting the dataverse record with the PendingDropOp value into the DATAVERSE_DATASET
+            MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
+            MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dataverseName, dv.getDataFormat(),
+                    IMetadataEntity.PENDING_DROP_OP));
+
+            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+            bActiveTxn = false;
+            progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
+
+            for (JobSpecification jobSpec : jobsToExecute) {
+                runJob(hcc, jobSpec, true);
+            }
+
+            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+            bActiveTxn = true;
+            metadataProvider.setMetadataTxnContext(mdTxnCtx);
+
+            //#. finally, delete the dataverse.
+            MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
+            if (activeDefaultDataverse != null && activeDefaultDataverse.getDataverseName() == dataverseName) {
+                activeDefaultDataverse = null;
+            }
+            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+        } catch (Exception e) {
+            if (bActiveTxn) {
+                abort(e, e, mdTxnCtx);
+            }
+
+            if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
+                if (activeDefaultDataverse != null && activeDefaultDataverse.getDataverseName() == dataverseName) {
+                    activeDefaultDataverse = null;
+                }
+
+                //#. execute compensation operations
+                //   remove the all indexes in NC
+                try {
+                    for (JobSpecification jobSpec : jobsToExecute) {
+                        runJob(hcc, jobSpec, true);
+                    }
+                } catch (Exception e2) {
+                    //do no throw exception since still the metadata needs to be compensated.
+                    e.addSuppressed(e2);
+                }
+
+                //   remove the record from the metadata.
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                try {
+                    MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                } catch (Exception e2) {
+                    e.addSuppressed(e2);
+                    abort(e, e2, mdTxnCtx);
+                    throw new IllegalStateException("System is inconsistent state: pending dataverse(" + dataverseName
+                            + ") couldn't be removed from the metadata", e);
+                }
+            }
+
+            throw e;
+        } finally {
+            MetadataLockManager.INSTANCE.releaseDataverseWriteLock(dataverseName);
+        }
+    }
+
+    private void handleDatasetDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
+            IHyracksClientConnection hcc) throws Exception {
+        DropStatement stmtDelete = (DropStatement) stmt;
+        String dataverseName = getActiveDataverse(stmtDelete.getDataverseName());
+        String datasetName = stmtDelete.getDatasetName().getValue();
+
+        ProgressState progress = ProgressState.NO_PROGRESS;
+        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+        boolean bActiveTxn = true;
+        metadataProvider.setMetadataTxnContext(mdTxnCtx);
+
+        MetadataLockManager.INSTANCE.dropDatasetBegin(dataverseName, dataverseName + "." + datasetName);
+        List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
+        try {
+
+            Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
+            if (ds == null) {
+                if (stmtDelete.getIfExists()) {
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                    return;
+                } else {
+                    throw new AlgebricksException("There is no dataset with this name " + datasetName
+                            + " in dataverse " + dataverseName + ".");
+                }
+            }
+
+            Map<FeedConnectionId, Pair<JobSpecification, Boolean>> disconnectJobList = new HashMap<FeedConnectionId, Pair<JobSpecification, Boolean>>();
+            if (ds.getDatasetType() == DatasetType.INTERNAL) {
+                // prepare job spec(s) that would disconnect any active feeds involving the dataset.
+                List<FeedConnectionId> feedConnections = FeedLifecycleListener.INSTANCE.getActiveFeedConnections(null);
+                if (feedConnections != null && !feedConnections.isEmpty()) {
+                    for (FeedConnectionId connection : feedConnections) {
+                        Pair<JobSpecification, Boolean> p = FeedOperations.buildDisconnectFeedJobSpec(metadataProvider,
+                                connection);
+                        disconnectJobList.put(connection, p);
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info("Disconnecting feed " + connection.getFeedId().getFeedName() + " from dataset "
+                                    + datasetName + " as dataset is being dropped");
+                        }
+                    }
+                }
+
+                //#. prepare jobs to drop the datatset and the indexes in NC
+                List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
+                for (int j = 0; j < indexes.size(); j++) {
+                    if (indexes.get(j).isSecondaryIndex()) {
+                        CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
+                                indexes.get(j).getIndexName());
+                        jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider, ds));
+                    }
+                }
+                CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
+                jobsToExecute.add(DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider));
+
+                //#. mark the existing dataset as PendingDropOp
+                MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
+                MetadataManager.INSTANCE.addDataset(
+                        mdTxnCtx,
+                        new Dataset(dataverseName, datasetName, ds.getItemTypeName(), ds.getNodeGroupName(), ds
+                                .getCompactionPolicy(), ds.getCompactionPolicyProperties(), ds.getDatasetDetails(), ds
+                                .getHints(), ds.getDatasetType(), ds.getDatasetId(), IMetadataEntity.PENDING_DROP_OP));
+
+                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                bActiveTxn = false;
+                progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
+
+                //# disconnect the feeds
+                for (Pair<JobSpecification, Boolean> p : disconnectJobList.values()) {
+                    runJob(hcc, p.first, true);
+                }
+
+                //#. run the jobs
+                for (JobSpecification jobSpec : jobsToExecute) {
+                    runJob(hcc, jobSpec, true);
+                }
+
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                bActiveTxn = true;
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+            } else {
+                // External dataset
+                ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(ds);
+                //#. prepare jobs to drop the datatset and the indexes in NC
+                List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
+                for (int j = 0; j < indexes.size(); j++) {
+                    if (ExternalIndexingOperations.isFileIndex(indexes.get(j))) {
+                        CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
+                                indexes.get(j).getIndexName());
+                        jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider, ds));
+                    } else {
+                        CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
+                                indexes.get(j).getIndexName());
+                        jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(cds, metadataProvider,
+                                ds));
+                    }
+                }
+
+                //#. mark the existing dataset as PendingDropOp
+                MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
+                MetadataManager.INSTANCE.addDataset(
+                        mdTxnCtx,
+                        new Dataset(dataverseName, datasetName, ds.getItemTypeName(), ds.getNodeGroupName(), ds
+                                .getCompactionPolicy(), ds.getCompactionPolicyProperties(), ds.getDatasetDetails(), ds
+                                .getHints(), ds.getDatasetType(), ds.getDatasetId(), IMetadataEntity.PENDING_DROP_OP));
+
+                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                bActiveTxn = false;
+                progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
+
+                //#. run the jobs
+                for (JobSpecification jobSpec : jobsToExecute) {
+                    runJob(hcc, jobSpec, true);
+                }
+                if (indexes.size() > 0) {
+                    ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(ds);
+                }
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                bActiveTxn = true;
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+            }
+
+            //#. finally, delete the dataset.
+            MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
+            // Drop the associated nodegroup
+            String nodegroup = ds.getNodeGroupName();
+            if (!nodegroup.equalsIgnoreCase(MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME)) {
+                MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, dataverseName + ":" + datasetName);
+            }
+
+            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+        } catch (Exception e) {
+            if (bActiveTxn) {
+                abort(e, e, mdTxnCtx);
+            }
+
+            if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
+                //#. execute compensation operations
+                //   remove the all indexes in NC
+                try {
+                    for (JobSpecification jobSpec : jobsToExecute) {
+                        runJob(hcc, jobSpec, true);
+                    }
+                } catch (Exception e2) {
+                    //do no throw exception since still the metadata needs to be compensated.
+                    e.addSuppressed(e2);
+                }
+
+                //   remove the record from the metadata.
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                try {
+                    MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
+                            datasetName);
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                } catch (Exception e2) {
+                    e.addSuppressed(e2);
+                    abort(e, e2, mdTxnCtx);
+                    throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName
+                            + "." + datasetName + ") couldn't be removed from the metadata", e);
+                }
+            }
+
+            throw e;
+        } finally {
+            MetadataLockManager.INSTANCE.dropDatasetEnd(dataverseName, dataverseName + "." + datasetName);
+        }
+    }
+
+    private void handleIndexDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
+            IHyracksClientConnection hcc) throws Exception {
+
+        IndexDropStatement stmtIndexDrop = (IndexDropStatement) stmt;
+        String datasetName = stmtIndexDrop.getDatasetName().getValue();
+        String dataverseName = getActiveDataverse(stmtIndexDrop.getDataverseName());
+        ProgressState progress = ProgressState.NO_PROGRESS;
+        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+        boolean bActiveTxn = true;
+        metadataProvider.setMetadataTxnContext(mdTxnCtx);
+
+        MetadataLockManager.INSTANCE.dropIndexBegin(dataverseName, dataverseName + "." + datasetName);
+
+        String indexName = null;
+        // For external index
+        boolean dropFilesIndex = false;
+        List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
+        try {
+
+            Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
+            if (ds == null) {
+                throw new AlgebricksException("There is no dataset with this name " + datasetName + " in dataverse "
+                        + dataverseName);
+            }
+
+            List<FeedConnectionId> feedConnections = FeedLifecycleListener.INSTANCE.getActiveFeedConnections(null);
+            boolean resourceInUse = false;
+            if (feedConnections != null && !feedConnections.isEmpty()) {
+                StringBuilder builder = new StringBuilder();
+                for (FeedConnectionId connection : feedConnections) {
+                    if (connection.getDatasetName().equals(datasetName)) {
+                        resourceInUse = true;
+                        builder.append(connection + "\n");
+                    }
+                }
+                if (resourceInUse) {
+                    throw new AsterixException("Dataset" + datasetName
+                            + " is currently being fed into by the following feeds " + "." + builder.toString()
+                            + "\nOperation not supported.");
+                }
+            }
+
+            if (ds.getDatasetType() == DatasetType.INTERNAL) {
+                indexName = stmtIndexDrop.getIndexName().getValue();
+                Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
+                if (index == null) {
+                    if (stmtIndexDrop.getIfExists()) {
+                        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                        return;
+                    } else {
+                        throw new AlgebricksException("There is no index with this name " + indexName + ".");
+                    }
+                }
+                //#. prepare a job to drop the index in NC.
+                CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName, indexName);
+                jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider, ds));
+
+                //#. mark PendingDropOp on the existing index
+                MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
+                MetadataManager.INSTANCE.addIndex(
+                        mdTxnCtx,
+                        new Index(dataverseName, datasetName, indexName, index.getIndexType(),
+                                index.getKeyFieldNames(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), index
+                                        .isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
+
+                //#. commit the existing transaction before calling runJob.
+                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                bActiveTxn = false;
+                progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
+
+                for (JobSpecification jobSpec : jobsToExecute) {
+                    runJob(hcc, jobSpec, true);
+                }
+
+                //#. begin a new transaction
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                bActiveTxn = true;
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+
+                //#. finally, delete the existing index
+                MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
+            } else {
+                // External dataset
+                indexName = stmtIndexDrop.getIndexName().getValue();
+                Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
+                if (index == null) {
+                    if (stmtIndexDrop.getIfExists()) {
+                        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                        return;
+                    } else {
+                        throw new AlgebricksException("There is no index with this name " + indexName + ".");
+                    }
+                } else if (ExternalIndexingOperations.isFileIndex(index)) {
+                    throw new AlgebricksException("Dropping a dataset's files index is not allowed.");
+                }
+                //#. prepare a job to drop the index in NC.
+                CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName, indexName);
+                jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider, ds));
+                List<Index> datasetIndexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName,
+                        datasetName);
+                if (datasetIndexes.size() == 2) {
+                    dropFilesIndex = true;
+                    // only one index + the files index, we need to delete both of the indexes
+                    for (Index externalIndex : datasetIndexes) {
+                        if (ExternalIndexingOperations.isFileIndex(externalIndex)) {
+                            cds = new CompiledIndexDropStatement(dataverseName, datasetName,
+                                    externalIndex.getIndexName());
+                            jobsToExecute.add(ExternalIndexingOperations.buildDropFilesIndexJobSpec(cds,
+                                    metadataProvider, ds));
+                            //#. mark PendingDropOp on the existing files index
+                            MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName,
+                                    externalIndex.getIndexName());
+                            MetadataManager.INSTANCE.addIndex(
+                                    mdTxnCtx,
+                                    new Index(dataverseName, datasetName, externalIndex.getIndexName(), externalIndex
+                                            .getIndexType(), externalIndex.getKeyFieldNames(),
+                                            index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), externalIndex
+                                                    .isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
+                        }
+                    }
+                }
+
+                //#. mark PendingDropOp on the existing index
+                MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
+                MetadataManager.INSTANCE.addIndex(
+                        mdTxnCtx,
+                        new Index(dataverseName, datasetName, indexName, index.getIndexType(),
+                                index.getKeyFieldNames(), index.getKeyFieldTypes(), index.isEnforcingKeyFileds(), index
+                                        .isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
+
+                //#. commit the existing transaction before calling runJob.
+                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                bActiveTxn = false;
+                progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
+
+                for (JobSpecification jobSpec : jobsToExecute) {
+                    runJob(hcc, jobSpec, true);
+                }
+
+                //#. begin a new transaction
+                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                bActiveTxn = true;
+                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+
+                //#. finally, delete the existing index
+                MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
+                if (dropFilesIndex) {
+                    // delete the files index too
+                    MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName,
+                            ExternalIndexingOperations.getFilesIndexName(datasetName));
+                    MetadataManager.INSTANCE.dropDatasetExternalFiles(mdTxnCtx, ds);
+                    ExternalDatasetsRegistry.INSTANCE.removeDatasetInfo(ds);
+                }
+            }
+            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+
+        } catch (Exception e) {
+            if (bActiveTxn) {
+                abort(e, e, mdTxnCtx);
+            }
+
+            if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
+                //#. execute compensation operations
+                //   remove the all indexes in NC
+                try {
+                    for (JobSpecification jobSpec : jobsToExecute) {
+                        runJob(hcc, jobSpec, true);
+                    }
+                } catch (Exception e2) {
+                    //do no throw exception since still the metadata needs to be compensated.
+                    e.addSuppressed(e2);
+                

<TRUNCATED>


[34/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/RuleCollections.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/RuleCollections.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/RuleCollections.java
new file mode 100644
index 0000000..a200874
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/RuleCollections.java
@@ -0,0 +1,331 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.base;
+
+import java.util.LinkedList;
+import java.util.List;
+
+import edu.uci.ics.asterix.optimizer.rules.AddEquivalenceClassForRecordConstructorRule;
+import edu.uci.ics.asterix.optimizer.rules.AsterixExtractFunctionsFromJoinConditionRule;
+import edu.uci.ics.asterix.optimizer.rules.AsterixInlineVariablesRule;
+import edu.uci.ics.asterix.optimizer.rules.AsterixIntroduceGroupByCombinerRule;
+import edu.uci.ics.asterix.optimizer.rules.AsterixMoveFreeVariableOperatorOutOfSubplanRule;
+import edu.uci.ics.asterix.optimizer.rules.ByNameToByIndexFieldAccessRule;
+import edu.uci.ics.asterix.optimizer.rules.CancelUnnestWithNestedListifyRule;
+import edu.uci.ics.asterix.optimizer.rules.CheckFilterExpressionTypeRule;
+import edu.uci.ics.asterix.optimizer.rules.ConstantFoldingRule;
+import edu.uci.ics.asterix.optimizer.rules.CountVarToCountOneRule;
+import edu.uci.ics.asterix.optimizer.rules.DisjunctivePredicateToJoinRule;
+import edu.uci.ics.asterix.optimizer.rules.ExtractDistinctByExpressionsRule;
+import edu.uci.ics.asterix.optimizer.rules.ExtractOrderExpressionsRule;
+import edu.uci.ics.asterix.optimizer.rules.FeedScanCollectionToUnnest;
+import edu.uci.ics.asterix.optimizer.rules.FuzzyEqRule;
+import edu.uci.ics.asterix.optimizer.rules.IfElseToSwitchCaseFunctionRule;
+import edu.uci.ics.asterix.optimizer.rules.InlineUnnestFunctionRule;
+import edu.uci.ics.asterix.optimizer.rules.IntroduceAutogenerateIDRule;
+import edu.uci.ics.asterix.optimizer.rules.IntroduceDynamicTypeCastForExternalFunctionRule;
+import edu.uci.ics.asterix.optimizer.rules.IntroduceDynamicTypeCastRule;
+import edu.uci.ics.asterix.optimizer.rules.IntroduceEnforcedListTypeRule;
+import edu.uci.ics.asterix.optimizer.rules.IntroduceInstantLockSearchCallbackRule;
+import edu.uci.ics.asterix.optimizer.rules.IntroduceMaterializationForInsertWithSelfScanRule;
+import edu.uci.ics.asterix.optimizer.rules.IntroduceRandomPartitioningFeedComputationRule;
+import edu.uci.ics.asterix.optimizer.rules.IntroduceRapidFrameFlushProjectAssignRule;
+import edu.uci.ics.asterix.optimizer.rules.IntroduceSecondaryIndexInsertDeleteRule;
+import edu.uci.ics.asterix.optimizer.rules.IntroduceStaticTypeCastForInsertRule;
+import edu.uci.ics.asterix.optimizer.rules.IntroduceUnionRule;
+import edu.uci.ics.asterix.optimizer.rules.IntroduceUnnestForCollectionToSequenceRule;
+import edu.uci.ics.asterix.optimizer.rules.LoadRecordFieldsRule;
+import edu.uci.ics.asterix.optimizer.rules.NestGroupByRule;
+import edu.uci.ics.asterix.optimizer.rules.PushAggFuncIntoStandaloneAggregateRule;
+import edu.uci.ics.asterix.optimizer.rules.PushAggregateIntoGroupbyRule;
+import edu.uci.ics.asterix.optimizer.rules.PushFieldAccessRule;
+import edu.uci.ics.asterix.optimizer.rules.PushGroupByThroughProduct;
+import edu.uci.ics.asterix.optimizer.rules.PushProperJoinThroughProduct;
+import edu.uci.ics.asterix.optimizer.rules.PushSimilarityFunctionsBelowJoin;
+import edu.uci.ics.asterix.optimizer.rules.RemoveRedundantListifyRule;
+import edu.uci.ics.asterix.optimizer.rules.RemoveRedundantSelectRule;
+import edu.uci.ics.asterix.optimizer.rules.RemoveSortInFeedIngestionRule;
+import edu.uci.ics.asterix.optimizer.rules.RemoveUnusedOneToOneEquiJoinRule;
+import edu.uci.ics.asterix.optimizer.rules.ReplaceSinkOpWithCommitOpRule;
+import edu.uci.ics.asterix.optimizer.rules.SetAsterixPhysicalOperatorsRule;
+import edu.uci.ics.asterix.optimizer.rules.SetClosedRecordConstructorsRule;
+import edu.uci.ics.asterix.optimizer.rules.SimilarityCheckRule;
+import edu.uci.ics.asterix.optimizer.rules.SweepIllegalNonfunctionalFunctions;
+import edu.uci.ics.asterix.optimizer.rules.UnnestToDataScanRule;
+import edu.uci.ics.asterix.optimizer.rules.am.IntroduceJoinAccessMethodRule;
+import edu.uci.ics.asterix.optimizer.rules.am.IntroduceLSMComponentFilterRule;
+import edu.uci.ics.asterix.optimizer.rules.am.IntroduceSelectAccessMethodRule;
+import edu.uci.ics.asterix.optimizer.rules.temporal.TranslateIntervalExpressionRule;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.HeuristicOptimizer;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.BreakSelectIntoConjunctsRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.ComplexJoinInferenceRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.ComplexUnnestToProductRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.ConsolidateAssignsRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.ConsolidateSelectsRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.CopyLimitDownRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.EliminateGroupByEmptyKeyRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.EliminateSubplanRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.EliminateSubplanWithInputCardinalityOneRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.EnforceOrderByAfterSubplan;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.EnforceStructuralPropertiesRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.ExtractCommonExpressionsRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.ExtractCommonOperatorsRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.ExtractGbyExpressionsRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.FactorRedundantGroupAndDecorVarsRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.InferTypesRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.InlineAssignIntoAggregateRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.InlineSingleReferenceVariablesRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.InsertOuterJoinRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.InsertProjectBeforeUnionRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.IntroHashPartitionMergeExchange;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.IntroJoinInsideSubplanRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.IntroduceAggregateCombinerRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.IntroduceGroupByForSubplanRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.IntroduceProjectsRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.IsolateHyracksOperatorsRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.NestedSubplanToJoinRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PullSelectOutOfEqJoin;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushAssignBelowUnionAllRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushGroupByIntoSortRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushMapOperatorDownThroughProductRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushNestedOrderByUnderPreSortedGroupByRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushProjectDownRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushSelectDownRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushSelectIntoJoinRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushSubplanIntoGroupByRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushSubplanWithAggregateDownThroughProductRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushUnnestDownThroughUnionRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.ReinferAllTypesRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.RemoveRedundantGroupByDecorVars;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.RemoveRedundantVariablesRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.RemoveUnusedAssignAndAggregateRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.SetAlgebricksPhysicalOperatorsRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.SetExecutionModeRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.SimpleUnnestToProductRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.SubplanOutOfGroupRule;
+
+public final class RuleCollections {
+
+    public final static List<IAlgebraicRewriteRule> buildInitialTranslationRuleCollection() {
+        List<IAlgebraicRewriteRule> typeInfer = new LinkedList<IAlgebraicRewriteRule>();
+        typeInfer.add(new TranslateIntervalExpressionRule());
+        return typeInfer;
+    }
+
+    public final static List<IAlgebraicRewriteRule> buildTypeInferenceRuleCollection() {
+        List<IAlgebraicRewriteRule> typeInfer = new LinkedList<IAlgebraicRewriteRule>();
+        typeInfer.add(new InlineUnnestFunctionRule());
+        typeInfer.add(new InferTypesRule());
+        typeInfer.add(new CheckFilterExpressionTypeRule());
+        return typeInfer;
+    }
+
+    public final static List<IAlgebraicRewriteRule> buildAutogenerateIDRuleCollection() {
+        List<IAlgebraicRewriteRule> autogen = new LinkedList<>();
+        autogen.add(new IntroduceAutogenerateIDRule());
+        return autogen;
+    }
+
+    public final static List<IAlgebraicRewriteRule> buildNormalizationRuleCollection() {
+        List<IAlgebraicRewriteRule> normalization = new LinkedList<IAlgebraicRewriteRule>();
+        normalization.add(new IntroduceUnnestForCollectionToSequenceRule());
+        normalization.add(new EliminateSubplanRule());
+        normalization.add(new EnforceOrderByAfterSubplan());
+        normalization.add(new PushAggFuncIntoStandaloneAggregateRule());
+        normalization.add(new BreakSelectIntoConjunctsRule());
+        normalization.add(new ExtractGbyExpressionsRule());
+        normalization.add(new ExtractDistinctByExpressionsRule());
+        normalization.add(new ExtractOrderExpressionsRule());
+        normalization.add(new AsterixMoveFreeVariableOperatorOutOfSubplanRule());
+
+        // IntroduceStaticTypeCastRule should go before
+        // IntroduceDynamicTypeCastRule to
+        // avoid unnecessary dynamic casting
+        normalization.add(new IntroduceStaticTypeCastForInsertRule());
+        normalization.add(new IntroduceDynamicTypeCastRule());
+        normalization.add(new IntroduceDynamicTypeCastForExternalFunctionRule());
+        normalization.add(new IntroduceEnforcedListTypeRule());
+        normalization.add(new ExtractCommonExpressionsRule());
+        normalization.add(new ConstantFoldingRule());
+        normalization.add(new RemoveRedundantSelectRule());
+        normalization.add(new UnnestToDataScanRule());
+        normalization.add(new IfElseToSwitchCaseFunctionRule());
+        normalization.add(new FuzzyEqRule());
+        normalization.add(new SimilarityCheckRule());
+        return normalization;
+    }
+
+    public final static List<IAlgebraicRewriteRule> buildCondPushDownAndJoinInferenceRuleCollection() {
+        List<IAlgebraicRewriteRule> condPushDownAndJoinInference = new LinkedList<IAlgebraicRewriteRule>();
+
+        condPushDownAndJoinInference.add(new PushSelectDownRule());
+        condPushDownAndJoinInference.add(new RemoveRedundantListifyRule());
+        condPushDownAndJoinInference.add(new CancelUnnestWithNestedListifyRule());
+        condPushDownAndJoinInference.add(new SimpleUnnestToProductRule());
+        condPushDownAndJoinInference.add(new ComplexUnnestToProductRule());
+        condPushDownAndJoinInference.add(new ComplexJoinInferenceRule());
+        condPushDownAndJoinInference.add(new DisjunctivePredicateToJoinRule());
+        condPushDownAndJoinInference.add(new PushSelectIntoJoinRule());
+        condPushDownAndJoinInference.add(new IntroJoinInsideSubplanRule());
+        condPushDownAndJoinInference.add(new PushMapOperatorDownThroughProductRule());
+        condPushDownAndJoinInference.add(new PushSubplanWithAggregateDownThroughProductRule());
+        condPushDownAndJoinInference.add(new IntroduceGroupByForSubplanRule());
+        condPushDownAndJoinInference.add(new SubplanOutOfGroupRule());
+        condPushDownAndJoinInference.add(new InsertOuterJoinRule());
+        condPushDownAndJoinInference.add(new AsterixExtractFunctionsFromJoinConditionRule());
+
+        condPushDownAndJoinInference.add(new RemoveRedundantVariablesRule());
+        condPushDownAndJoinInference.add(new AsterixInlineVariablesRule());
+        condPushDownAndJoinInference.add(new RemoveUnusedAssignAndAggregateRule());
+
+        condPushDownAndJoinInference.add(new FactorRedundantGroupAndDecorVarsRule());
+        condPushDownAndJoinInference.add(new PushAggregateIntoGroupbyRule());
+        condPushDownAndJoinInference.add(new EliminateSubplanRule());
+        condPushDownAndJoinInference.add(new PushProperJoinThroughProduct());
+        condPushDownAndJoinInference.add(new PushGroupByThroughProduct());
+        condPushDownAndJoinInference.add(new NestGroupByRule());
+        condPushDownAndJoinInference.add(new EliminateGroupByEmptyKeyRule());
+        condPushDownAndJoinInference.add(new PushSubplanIntoGroupByRule());
+        condPushDownAndJoinInference.add(new NestedSubplanToJoinRule());
+        condPushDownAndJoinInference.add(new EliminateSubplanWithInputCardinalityOneRule());
+
+        return condPushDownAndJoinInference;
+    }
+
+    public final static List<IAlgebraicRewriteRule> buildLoadFieldsRuleCollection() {
+        List<IAlgebraicRewriteRule> fieldLoads = new LinkedList<IAlgebraicRewriteRule>();
+        fieldLoads.add(new LoadRecordFieldsRule());
+        fieldLoads.add(new PushFieldAccessRule());
+        // fieldLoads.add(new ByNameToByHandleFieldAccessRule()); -- disabled
+        fieldLoads.add(new ByNameToByIndexFieldAccessRule());
+        fieldLoads.add(new RemoveRedundantVariablesRule());
+        fieldLoads.add(new AsterixInlineVariablesRule());
+        fieldLoads.add(new RemoveUnusedAssignAndAggregateRule());
+        fieldLoads.add(new ConstantFoldingRule());
+        fieldLoads.add(new FeedScanCollectionToUnnest());
+        fieldLoads.add(new ComplexJoinInferenceRule());
+        return fieldLoads;
+    }
+
+    public final static List<IAlgebraicRewriteRule> buildFuzzyJoinRuleCollection() {
+        List<IAlgebraicRewriteRule> fuzzy = new LinkedList<IAlgebraicRewriteRule>();
+        // fuzzy.add(new FuzzyJoinRule()); -- The non-indexed fuzzy join will be temporarily disabled. It should be enabled some time in the near future.
+        fuzzy.add(new InferTypesRule());
+        return fuzzy;
+    }
+
+    public final static List<IAlgebraicRewriteRule> buildConsolidationRuleCollection() {
+        List<IAlgebraicRewriteRule> consolidation = new LinkedList<IAlgebraicRewriteRule>();
+        consolidation.add(new ConsolidateSelectsRule());
+        consolidation.add(new ConsolidateAssignsRule());
+        consolidation.add(new InlineAssignIntoAggregateRule());
+        consolidation.add(new AsterixIntroduceGroupByCombinerRule());
+        consolidation.add(new IntroduceAggregateCombinerRule());
+        consolidation.add(new CountVarToCountOneRule());
+        consolidation.add(new RemoveUnusedAssignAndAggregateRule());
+        consolidation.add(new RemoveRedundantGroupByDecorVars());
+        consolidation.add(new NestedSubplanToJoinRule());
+        //unionRule => PushUnnestDownUnion => RemoveRedundantListifyRule cause these rules are correlated
+        consolidation.add(new IntroduceUnionRule());
+        consolidation.add(new PushUnnestDownThroughUnionRule());
+        consolidation.add(new RemoveRedundantListifyRule());
+
+        return consolidation;
+    }
+
+    public final static List<IAlgebraicRewriteRule> buildAccessMethodRuleCollection() {
+        List<IAlgebraicRewriteRule> accessMethod = new LinkedList<IAlgebraicRewriteRule>();
+        accessMethod.add(new IntroduceSelectAccessMethodRule());
+        accessMethod.add(new IntroduceJoinAccessMethodRule());
+        accessMethod.add(new IntroduceLSMComponentFilterRule());
+        accessMethod.add(new IntroduceSecondaryIndexInsertDeleteRule());
+        accessMethod.add(new RemoveUnusedOneToOneEquiJoinRule());
+        accessMethod.add(new PushSimilarityFunctionsBelowJoin());
+        accessMethod.add(new RemoveUnusedAssignAndAggregateRule());
+        return accessMethod;
+    }
+
+    public final static List<IAlgebraicRewriteRule> buildPlanCleanupRuleCollection() {
+        List<IAlgebraicRewriteRule> planCleanupRules = new LinkedList<IAlgebraicRewriteRule>();
+        planCleanupRules.add(new PushAssignBelowUnionAllRule());
+        planCleanupRules.add(new ExtractCommonExpressionsRule());
+        planCleanupRules.add(new RemoveRedundantVariablesRule());
+        planCleanupRules.add(new PushProjectDownRule());
+        planCleanupRules.add(new PushSelectDownRule());
+        planCleanupRules.add(new SetClosedRecordConstructorsRule());
+        planCleanupRules.add(new IntroduceDynamicTypeCastRule());
+        planCleanupRules.add(new IntroduceDynamicTypeCastForExternalFunctionRule());
+        planCleanupRules.add(new RemoveUnusedAssignAndAggregateRule());
+        return planCleanupRules;
+    }
+
+    public final static List<IAlgebraicRewriteRule> buildDataExchangeRuleCollection() {
+        List<IAlgebraicRewriteRule> dataExchange = new LinkedList<IAlgebraicRewriteRule>();
+        dataExchange.add(new SetExecutionModeRule());
+        return dataExchange;
+    }
+
+    public final static List<IAlgebraicRewriteRule> buildPhysicalRewritesAllLevelsRuleCollection() {
+        List<IAlgebraicRewriteRule> physicalRewritesAllLevels = new LinkedList<IAlgebraicRewriteRule>();
+        physicalRewritesAllLevels.add(new PullSelectOutOfEqJoin());
+        //Turned off the following rule for now not to change OptimizerTest results.
+        //physicalRewritesAllLevels.add(new IntroduceTransactionCommitByAssignOpRule());
+        physicalRewritesAllLevels.add(new ReplaceSinkOpWithCommitOpRule());
+        physicalRewritesAllLevels.add(new SetAlgebricksPhysicalOperatorsRule());
+        physicalRewritesAllLevels.add(new SetAsterixPhysicalOperatorsRule());
+        physicalRewritesAllLevels.add(new IntroduceInstantLockSearchCallbackRule());
+        physicalRewritesAllLevels.add(new AddEquivalenceClassForRecordConstructorRule());
+        physicalRewritesAllLevels.add(new EnforceStructuralPropertiesRule());
+        physicalRewritesAllLevels.add(new RemoveSortInFeedIngestionRule());
+        physicalRewritesAllLevels.add(new IntroHashPartitionMergeExchange());
+        physicalRewritesAllLevels.add(new PushProjectDownRule());
+        physicalRewritesAllLevels.add(new InsertProjectBeforeUnionRule());
+        physicalRewritesAllLevels.add(new IntroduceMaterializationForInsertWithSelfScanRule());
+        physicalRewritesAllLevels.add(new InlineSingleReferenceVariablesRule());
+        physicalRewritesAllLevels.add(new RemoveUnusedAssignAndAggregateRule());
+        physicalRewritesAllLevels.add(new ConsolidateAssignsRule());
+        // After adding projects, we may need need to set physical operators again.
+        physicalRewritesAllLevels.add(new SetAlgebricksPhysicalOperatorsRule());
+        return physicalRewritesAllLevels;
+    }
+
+    public final static List<IAlgebraicRewriteRule> buildPhysicalRewritesTopLevelRuleCollection() {
+        List<IAlgebraicRewriteRule> physicalRewritesTopLevel = new LinkedList<IAlgebraicRewriteRule>();
+        physicalRewritesTopLevel.add(new PushNestedOrderByUnderPreSortedGroupByRule());
+        physicalRewritesTopLevel.add(new CopyLimitDownRule());
+        physicalRewritesTopLevel.add(new IntroduceProjectsRule());
+        physicalRewritesTopLevel.add(new SetAlgebricksPhysicalOperatorsRule());
+        physicalRewritesTopLevel.add(new IntroduceRapidFrameFlushProjectAssignRule());
+        physicalRewritesTopLevel.add(new SetExecutionModeRule());
+        physicalRewritesTopLevel.add(new IntroduceRandomPartitioningFeedComputationRule());
+        return physicalRewritesTopLevel;
+    }
+
+    public final static List<IAlgebraicRewriteRule> prepareForJobGenRuleCollection() {
+        List<IAlgebraicRewriteRule> prepareForJobGenRewrites = new LinkedList<IAlgebraicRewriteRule>();
+        prepareForJobGenRewrites.add(new IsolateHyracksOperatorsRule(
+                HeuristicOptimizer.hyraxOperatorsBelowWhichJobGenIsDisabled));
+        prepareForJobGenRewrites.add(new ExtractCommonOperatorsRule());
+        // Re-infer all types, so that, e.g., the effect of not-is-null is
+        // propagated.
+        prepareForJobGenRewrites.add(new ReinferAllTypesRule());
+        prepareForJobGenRewrites.add(new PushGroupByIntoSortRule());
+        prepareForJobGenRewrites.add(new SetExecutionModeRule());
+        prepareForJobGenRewrites.add(new SweepIllegalNonfunctionalFunctions());
+        return prepareForJobGenRewrites;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/handle/FieldIndexAndTypeHandle.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/handle/FieldIndexAndTypeHandle.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/handle/FieldIndexAndTypeHandle.java
new file mode 100644
index 0000000..7eee1ff
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/handle/FieldIndexAndTypeHandle.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.handle;
+
+import edu.uci.ics.asterix.om.types.IAType;
+
+public class FieldIndexAndTypeHandle implements IHandle {
+
+    private int fieldIndex;
+    private IAType fieldType;
+
+    public FieldIndexAndTypeHandle(int fieldIndex, IAType fieldType) {
+        this.fieldIndex = fieldIndex;
+        this.fieldType = fieldType;
+    }
+
+    @Override
+    public HandleType getHandleType() {
+        return HandleType.FIELD_INDEX_AND_TYPE;
+    }
+
+    public int getFieldIndex() {
+        return fieldIndex;
+    }
+
+    public IAType getFieldType() {
+        return fieldType;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/handle/FieldNameHandle.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/handle/FieldNameHandle.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/handle/FieldNameHandle.java
new file mode 100644
index 0000000..a1b8409
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/handle/FieldNameHandle.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.handle;
+
+import edu.uci.ics.asterix.om.types.IAType;
+
+public class FieldNameHandle implements IHandle {
+
+    private String fieldName;
+    private IAType fieldType;
+
+    public FieldNameHandle(String fieldName) {
+        this.fieldName = fieldName;
+    }
+
+    public IAType getFieldType() {
+        return fieldType;
+    }
+
+    public void setFieldType(IAType fieldType) {
+        this.fieldType = fieldType;
+    }
+
+    @Override
+    public HandleType getHandleType() {
+        return HandleType.FIELD_NAME;
+    }
+
+    public String getFieldName() {
+        return fieldName;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/handle/IHandle.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/handle/IHandle.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/handle/IHandle.java
new file mode 100644
index 0000000..80103b2
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/handle/IHandle.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.handle;
+
+/**
+ * A handle is a way of accessing an ADM instance or a collection of ADM
+ * instances nested within another ADM instance.
+ *
+ * @author Nicola
+ */
+
+public interface IHandle {
+    public enum HandleType {
+        FIELD_INDEX_AND_TYPE,
+        FIELD_NAME
+    }
+
+    public HandleType getHandleType();
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AddEquivalenceClassForRecordConstructorRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AddEquivalenceClassForRecordConstructorRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AddEquivalenceClassForRecordConstructorRule.java
new file mode 100644
index 0000000..c738720
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AddEquivalenceClassForRecordConstructorRule.java
@@ -0,0 +1,127 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+import org.mortbay.util.SingletonList;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.EquivalenceClass;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.util.PhysicalOptimizationsUtil;
+
+/**
+ * Adds equivalent classes for record-constructors.
+ * For example, for $x:=record-constructor("field1": $v, "field2": $t),
+ * two equivalent classes will be added:
+ * <$v, field-access-by-index($x, 0)>
+ * <$t, field-access-by-index($x, 1)>
+ *
+ * @author yingyi
+ */
+public class AddEquivalenceClassForRecordConstructorRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
+            return false;
+        }
+        // Computes FDs and equivalence classes for the operator.
+        PhysicalOptimizationsUtil.computeFDsAndEquivalenceClasses(op, context);
+        AssignOperator assignOp = (AssignOperator) op;
+        List<LogicalVariable> vars = assignOp.getVariables();
+        List<Mutable<ILogicalExpression>> exprRefs = assignOp.getExpressions();
+        return addEquivalenceClassesForRecordConstructor(vars, exprRefs, assignOp, context);
+    }
+
+    private boolean addEquivalenceClassesForRecordConstructor(List<LogicalVariable> vars,
+            List<Mutable<ILogicalExpression>> exprRefs, AssignOperator assignOp, IOptimizationContext context) {
+        boolean changed = false;
+        for (int exprIndex = 0; exprIndex < exprRefs.size(); ++exprIndex) {
+            ILogicalExpression expr = exprRefs.get(exprIndex).getValue();
+            if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                ScalarFunctionCallExpression funcExpr = (ScalarFunctionCallExpression) expr;
+                FunctionIdentifier fid = funcExpr.getFunctionIdentifier();
+                if (fid == AsterixBuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR
+                        || fid == AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR) {
+                    changed |= propagateEquivalenceClassesForRecordConstructor(vars.get(exprIndex), funcExpr, assignOp,
+                            context);
+                }
+            }
+        }
+        return changed;
+    }
+
+    @SuppressWarnings("unchecked")
+    private boolean propagateEquivalenceClassesForRecordConstructor(LogicalVariable recordVar,
+            ScalarFunctionCallExpression funcExpr, AssignOperator assignOp, IOptimizationContext context) {
+        List<Mutable<ILogicalExpression>> argRefs = funcExpr.getArguments();
+        boolean changed = false;
+        // Only odd position arguments are field value expressions.
+        for (int parameterIndex = 1; parameterIndex < argRefs.size(); parameterIndex += 2) {
+            ILogicalExpression fieldExpr = argRefs.get(parameterIndex).getValue();
+            // Adds equivalent classes if a field is from a variable reference.
+            if (fieldExpr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+                VariableReferenceExpression varExpr = (VariableReferenceExpression) fieldExpr;
+                LogicalVariable fieldVar = varExpr.getVariableReference();
+                Map<LogicalVariable, EquivalenceClass> ecs = context.getEquivalenceClassMap(assignOp);
+                if (ecs == null) {
+                    ecs = new HashMap<LogicalVariable, EquivalenceClass>();
+                    context.putEquivalenceClassMap(assignOp, ecs);
+                }
+                ILogicalExpression expr = new ScalarFunctionCallExpression(
+                        FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX),
+                        new MutableObject<ILogicalExpression>(new VariableReferenceExpression(recordVar)),
+                        new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
+                                new AInt32(parameterIndex / 2))))); // Every two parameters corresponds to a field.
+                EquivalenceClass equivClass = new EquivalenceClass(SingletonList.newSingletonList(fieldVar), fieldVar,
+                        SingletonList.newSingletonList(expr));
+                ecs.put(fieldVar, equivClass);
+                changed = true;
+            }
+        }
+        return changed;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AsterixExtractFunctionsFromJoinConditionRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AsterixExtractFunctionsFromJoinConditionRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AsterixExtractFunctionsFromJoinConditionRule.java
new file mode 100644
index 0000000..5457e55
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AsterixExtractFunctionsFromJoinConditionRule.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.ExtractFunctionsFromJoinConditionRule;
+
+public class AsterixExtractFunctionsFromJoinConditionRule extends ExtractFunctionsFromJoinConditionRule {
+
+    @Override
+    protected boolean processArgumentsToFunction(FunctionIdentifier fi) {
+        return fi.equals(AsterixBuiltinFunctions.GET_ITEM);
+    }
+
+    @Override
+    protected boolean isComparisonFunction(FunctionIdentifier fi) {
+        return AsterixBuiltinFunctions.isSimilarityFunction(fi);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AsterixInlineVariablesRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AsterixInlineVariablesRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AsterixInlineVariablesRule.java
new file mode 100644
index 0000000..296bbb2
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AsterixInlineVariablesRule.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.InlineVariablesRule;
+
+public class AsterixInlineVariablesRule extends InlineVariablesRule {
+
+    public AsterixInlineVariablesRule() {
+        // Do not inline field accesses and spatial functions because doing so would interfere with our access method rewrites.
+        // TODO: For now we must also exclude record constructor functions to avoid breaking our type casting rules
+        // IntroduceStaticTypeCastRule and IntroduceDynamicTypeCastRule.
+        doNotInlineFuncs.add(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME);
+        doNotInlineFuncs.add(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX);
+        doNotInlineFuncs.add(AsterixBuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR);
+        doNotInlineFuncs.add(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR);
+        doNotInlineFuncs.add(AsterixBuiltinFunctions.CAST_RECORD);
+        doNotInlineFuncs.add(AsterixBuiltinFunctions.CREATE_CIRCLE);
+        doNotInlineFuncs.add(AsterixBuiltinFunctions.CREATE_LINE);
+        doNotInlineFuncs.add(AsterixBuiltinFunctions.CREATE_MBR);
+        doNotInlineFuncs.add(AsterixBuiltinFunctions.CREATE_POINT);
+        doNotInlineFuncs.add(AsterixBuiltinFunctions.CREATE_POLYGON);
+        doNotInlineFuncs.add(AsterixBuiltinFunctions.CREATE_RECTANGLE);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AsterixIntroduceGroupByCombinerRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AsterixIntroduceGroupByCombinerRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AsterixIntroduceGroupByCombinerRule.java
new file mode 100644
index 0000000..b71ffab
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AsterixIntroduceGroupByCombinerRule.java
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.AbstractIntroduceGroupByCombinerRule;
+
+public class AsterixIntroduceGroupByCombinerRule extends AbstractIntroduceGroupByCombinerRule {
+
+    @SuppressWarnings("unchecked")
+    @Override
+    protected void processNullTest(IOptimizationContext context, GroupByOperator nestedGby,
+            List<LogicalVariable> aggregateVarsProducedByCombiner) {
+        IFunctionInfo finfoEq = context.getMetadataProvider().lookupFunction(AsterixBuiltinFunctions.IS_SYSTEM_NULL);
+        SelectOperator selectNonSystemNull;
+
+        if (aggregateVarsProducedByCombiner.size() == 1) {
+            ILogicalExpression isSystemNullTest = new ScalarFunctionCallExpression(finfoEq,
+                    new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
+                            aggregateVarsProducedByCombiner.get(0))));
+            IFunctionInfo finfoNot = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.NOT);
+            ScalarFunctionCallExpression nonSystemNullTest = new ScalarFunctionCallExpression(finfoNot,
+                    new MutableObject<ILogicalExpression>(isSystemNullTest));
+            selectNonSystemNull = new SelectOperator(new MutableObject<ILogicalExpression>(nonSystemNullTest), false,
+                    null);
+        } else {
+            List<Mutable<ILogicalExpression>> isSystemNullTestList = new ArrayList<Mutable<ILogicalExpression>>();
+            for (LogicalVariable aggVar : aggregateVarsProducedByCombiner) {
+                ILogicalExpression isSystemNullTest = new ScalarFunctionCallExpression(finfoEq,
+                        new MutableObject<ILogicalExpression>(new VariableReferenceExpression(aggVar)));
+                IFunctionInfo finfoNot = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.NOT);
+                ScalarFunctionCallExpression nonSystemNullTest = new ScalarFunctionCallExpression(finfoNot,
+                        new MutableObject<ILogicalExpression>(isSystemNullTest));
+                isSystemNullTestList.add(new MutableObject<ILogicalExpression>(nonSystemNullTest));
+            }
+            IFunctionInfo finfoAnd = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.AND);
+            selectNonSystemNull = new SelectOperator(new MutableObject<ILogicalExpression>(
+                    new ScalarFunctionCallExpression(finfoAnd, isSystemNullTestList)), false, null);
+        }
+
+        //add the not-system-null check into the nested pipeline
+        Mutable<ILogicalOperator> ntsBeforeNestedGby = nestedGby.getInputs().get(0);
+        nestedGby.getInputs().set(0, new MutableObject<ILogicalOperator>(selectNonSystemNull));
+        selectNonSystemNull.getInputs().add(ntsBeforeNestedGby);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AsterixMoveFreeVariableOperatorOutOfSubplanRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AsterixMoveFreeVariableOperatorOutOfSubplanRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AsterixMoveFreeVariableOperatorOutOfSubplanRule.java
new file mode 100644
index 0000000..c42fa15
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AsterixMoveFreeVariableOperatorOutOfSubplanRule.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.MoveFreeVariableOperatorOutOfSubplanRule;
+
+public class AsterixMoveFreeVariableOperatorOutOfSubplanRule extends MoveFreeVariableOperatorOutOfSubplanRule {
+
+    @Override
+    protected boolean movableOperator(LogicalOperatorTag operatorTag) {
+        return (operatorTag == LogicalOperatorTag.ASSIGN);
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ByNameToByHandleFieldAccessRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ByNameToByHandleFieldAccessRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ByNameToByHandleFieldAccessRule.java
new file mode 100644
index 0000000..9c04564
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ByNameToByHandleFieldAccessRule.java
@@ -0,0 +1,116 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.algebra.base.AsterixOperatorAnnotations;
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class ByNameToByHandleFieldAccessRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
+            return false;
+        }
+        AssignOperator assign = (AssignOperator) op;
+        if (assign.getAnnotations().get(AsterixOperatorAnnotations.PUSHED_FIELD_ACCESS) == null) {
+            return false;
+        }
+        byNameToByHandle(assign, context);
+        return true;
+    }
+
+    private static void byNameToByHandle(AssignOperator fieldAccessOp, IOptimizationContext context) {
+        Mutable<ILogicalOperator> opUnder = fieldAccessOp.getInputs().get(0);
+        AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) fieldAccessOp.getExpressions().get(0)
+                .getValue();
+        ILogicalExpression a1 = fce.getArguments().get(0).getValue();
+
+        VariableReferenceExpression x;
+        if (a1.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+            x = (VariableReferenceExpression) a1;
+        } else {
+            LogicalVariable var1 = context.newVar();
+            ArrayList<LogicalVariable> varArray = new ArrayList<LogicalVariable>(1);
+            varArray.add(var1);
+            ArrayList<Mutable<ILogicalExpression>> exprArray = new ArrayList<Mutable<ILogicalExpression>>(1);
+            exprArray.add(new MutableObject<ILogicalExpression>(a1));
+            AssignOperator assignVar = new AssignOperator(varArray, exprArray);
+            x = new VariableReferenceExpression(var1);
+            assignVar.getInputs().add(opUnder);
+            opUnder = new MutableObject<ILogicalOperator>(assignVar);
+        }
+
+        // let $t := type-of(x)
+        LogicalVariable t = context.newVar();
+
+        AbstractFunctionCallExpression typeOf = new ScalarFunctionCallExpression(
+                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.TYPE_OF));
+        typeOf.getArguments().add(new MutableObject<ILogicalExpression>(x));
+        AssignOperator typAssign = new AssignOperator(t, new MutableObject<ILogicalExpression>(typeOf));
+        typAssign.getInputs().add(opUnder);
+
+        // let $w := get-handle($t, path-expression)
+        LogicalVariable w = context.newVar();
+        AbstractFunctionCallExpression getHandle = new ScalarFunctionCallExpression(
+                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_HANDLE));
+        getHandle.getArguments().add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(t)));
+        // the accessed field
+        getHandle.getArguments().add(new MutableObject<ILogicalExpression>(fce.getArguments().get(1).getValue()));
+        AssignOperator handleAssign = new AssignOperator(w, new MutableObject<ILogicalExpression>(getHandle));
+        handleAssign.getInputs().add(new MutableObject<ILogicalOperator>(typAssign));
+
+        // let $y := get-data(x, $w)
+        AbstractFunctionCallExpression getData = new ScalarFunctionCallExpression(
+                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_DATA));
+        VariableReferenceExpression ref2 = new VariableReferenceExpression(x.getVariableReference());
+        getData.getArguments().add(new MutableObject<ILogicalExpression>(ref2));
+        getData.getArguments().add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(w)));
+        fieldAccessOp.getExpressions().get(0).setValue(getData);
+        List<Mutable<ILogicalOperator>> faInputs = fieldAccessOp.getInputs();
+        faInputs.clear();
+        faInputs.add(new MutableObject<ILogicalOperator>(handleAssign));
+
+        // fieldAccess.setAnnotation(OperatorAnnotation.FIELD_ACCESS,
+        // fce.getArguments().get(0));
+        fieldAccessOp.removeAnnotation(AsterixOperatorAnnotations.PUSHED_FIELD_ACCESS);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ByNameToByIndexFieldAccessRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ByNameToByIndexFieldAccessRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ByNameToByIndexFieldAccessRule.java
new file mode 100644
index 0000000..0dee2be
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ByNameToByIndexFieldAccessRule.java
@@ -0,0 +1,175 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.algebra.base.AsterixOperatorAnnotations;
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.base.IAObject;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class ByNameToByIndexFieldAccessRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
+            return false;
+        }
+        AssignOperator assign = (AssignOperator) op;
+        if (assign.getExpressions().get(0).getValue().getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return false;
+        }
+
+        List<Mutable<ILogicalExpression>> expressions = assign.getExpressions();
+        boolean changed = false;
+        for (int i = 0; i < expressions.size(); i++) {
+            AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expressions.get(i).getValue();
+            if (fce.getFunctionIdentifier() != AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME) {
+                continue;
+            }
+            IVariableTypeEnvironment env = context.getOutputTypeEnvironment(op);
+
+            ILogicalExpression a0 = fce.getArguments().get(0).getValue();
+            if (a0.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                LogicalVariable var1 = context.newVar();
+                ArrayList<LogicalVariable> varArray = new ArrayList<LogicalVariable>(1);
+                varArray.add(var1);
+                ArrayList<Mutable<ILogicalExpression>> exprArray = new ArrayList<Mutable<ILogicalExpression>>(1);
+                exprArray.add(new MutableObject<ILogicalExpression>(a0));
+                AssignOperator assignVar = new AssignOperator(varArray, exprArray);
+                fce.getArguments().get(0).setValue(new VariableReferenceExpression(var1));
+                assignVar.getInputs().add(new MutableObject<ILogicalOperator>(assign.getInputs().get(0).getValue()));
+                assign.getInputs().get(0).setValue(assignVar);
+                context.computeAndSetTypeEnvironmentForOperator(assignVar);
+                context.computeAndSetTypeEnvironmentForOperator(assign);
+                //access by name was not replaced to access by index, but the plan was altered, hence changed is true
+                changed = true;
+            }
+
+            IAType t = (IAType) env.getType(fce.getArguments().get(0).getValue());
+            try {
+                switch (t.getTypeTag()) {
+                    case ANY: {
+                        return false || changed;
+                    }
+                    case RECORD: {
+                        ARecordType recType = (ARecordType) t;
+                        ILogicalExpression fai = createFieldAccessByIndex(recType, fce);
+                        if (fai == null) {
+                            return false || changed;
+                        }
+                        expressions.get(i).setValue(fai);
+                        changed = true;
+                        break;
+                    }
+                    case UNION: {
+                        AUnionType unionT = (AUnionType) t;
+                        if (unionT.isNullableType()) {
+                            IAType t2 = unionT.getNullableType();
+                            if (t2.getTypeTag() == ATypeTag.RECORD) {
+                                ARecordType recType = (ARecordType) t2;
+                                ILogicalExpression fai = createFieldAccessByIndex(recType, fce);
+                                if (fai == null) {
+                                    return false || changed;
+                                }
+                                expressions.get(i).setValue(fai);
+                                changed = true;
+                                break;
+                            }
+                        }
+                        throw new NotImplementedException("Union " + unionT);
+                    }
+                    default: {
+                        throw new AlgebricksException("Cannot call field-access on data of type " + t);
+                    }
+                }
+            } catch (IOException e) {
+                throw new AlgebricksException(e);
+            }
+        }
+        assign.removeAnnotation(AsterixOperatorAnnotations.PUSHED_FIELD_ACCESS);
+        return changed;
+    }
+
+    @SuppressWarnings("unchecked")
+    private static ILogicalExpression createFieldAccessByIndex(ARecordType recType, AbstractFunctionCallExpression fce)
+            throws IOException {
+        String s = getStringSecondArgument(fce);
+        if (s == null) {
+            return null;
+        }
+        int k = recType.findFieldPosition(s);
+        if (k < 0) {
+            return null;
+        }
+        return new ScalarFunctionCallExpression(
+                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX),
+                fce.getArguments().get(0), new MutableObject<ILogicalExpression>(new ConstantExpression(
+                        new AsterixConstantValue(new AInt32(k)))));
+    }
+
+    private static String getStringSecondArgument(AbstractFunctionCallExpression expr) {
+        ILogicalExpression e2 = expr.getArguments().get(1).getValue();
+        if (e2.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+            return null;
+        }
+        ConstantExpression c = (ConstantExpression) e2;
+        if (!(c.getValue() instanceof AsterixConstantValue)) {
+            return null;
+        }
+        IAObject v = ((AsterixConstantValue) c.getValue()).getObject();
+        if (v.getType().getTypeTag() != ATypeTag.STRING) {
+            return null;
+        }
+        return ((AString) v).getStringValue();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/CancelUnnestWithNestedListifyRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/CancelUnnestWithNestedListifyRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/CancelUnnestWithNestedListifyRule.java
new file mode 100644
index 0000000..73b0de7
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/CancelUnnestWithNestedListifyRule.java
@@ -0,0 +1,282 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.StatefulFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.RunningAggregatePOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.UnpartitionedPropertyComputer;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * This rule cancels the unnest with the nested listify. Formally, the following plan<br/>
+ *
+ * <pre>
+ * unnset $x <- $y
+ *   group-by($k){
+ *     aggregate $y <- listify($z)
+ *     ...
+ *   }
+ * </pre>
+ *
+ * will be converted into<br/>
+ *
+ * <pre>
+ * assign $x <- $z
+ *   sort($k)
+ * </pre>
+ *
+ * When the positional variable exists, for example the original plan is like<br/>
+ *
+ * <pre>
+ * unnset $x at $p <- $y
+ *   group-by($k){
+ *     aggregate $y <- listify($z)
+ *     ...
+ *   }
+ * </pre>
+ *
+ * will be converted into<br/>
+ *
+ * <pre>
+ * group-by($k){
+ *   running-aggregate $p <- tid()
+ * }
+ * </pre>
+ */
+public class CancelUnnestWithNestedListifyRule implements IAlgebraicRewriteRule {
+
+    /* (non-Javadoc)
+     * @see edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule#rewritePost(org.apache.commons.lang3.mutable.Mutable, edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext)
+     */
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        // apply it only at the top of the plan
+        ILogicalOperator op = opRef.getValue();
+        if (context.checkIfInDontApplySet(this, op)) {
+            return false;
+        }
+        Set<LogicalVariable> varSet = new HashSet<LogicalVariable>();
+        return applyRuleDown(opRef, varSet, context);
+    }
+
+    private boolean applyRuleDown(Mutable<ILogicalOperator> opRef, Set<LogicalVariable> varSet,
+            IOptimizationContext context) throws AlgebricksException {
+        boolean changed = applies(opRef, varSet, context);
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        VariableUtilities.getUsedVariables(op, varSet);
+        if (op.hasNestedPlans()) {
+            AbstractOperatorWithNestedPlans aonp = (AbstractOperatorWithNestedPlans) op;
+            for (ILogicalPlan p : aonp.getNestedPlans()) {
+                for (Mutable<ILogicalOperator> r : p.getRoots()) {
+                    if (applyRuleDown(r, varSet, context)) {
+                        changed = true;
+                    }
+                    context.addToDontApplySet(this, r.getValue());
+                }
+            }
+        }
+        for (Mutable<ILogicalOperator> i : op.getInputs()) {
+            if (applyRuleDown(i, varSet, context)) {
+                changed = true;
+            }
+            context.addToDontApplySet(this, i.getValue());
+        }
+        return changed;
+    }
+
+    private boolean applies(Mutable<ILogicalOperator> opRef, Set<LogicalVariable> varUsedAbove,
+            IOptimizationContext context) throws AlgebricksException {
+        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
+        if (op1.getOperatorTag() != LogicalOperatorTag.UNNEST) {
+            return false;
+        }
+        UnnestOperator unnest1 = (UnnestOperator) op1;
+        ILogicalExpression expr = unnest1.getExpressionRef().getValue();
+        LogicalVariable unnestedVar;
+        switch (expr.getExpressionTag()) {
+            case VARIABLE:
+                unnestedVar = ((VariableReferenceExpression) expr).getVariableReference();
+                break;
+            case FUNCTION_CALL:
+                if (((AbstractFunctionCallExpression) expr).getFunctionIdentifier() != AsterixBuiltinFunctions.SCAN_COLLECTION) {
+                    return false;
+                }
+                AbstractFunctionCallExpression functionCall = (AbstractFunctionCallExpression) expr;
+                ILogicalExpression functionCallArgExpr = functionCall.getArguments().get(0).getValue();
+                if (functionCallArgExpr.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
+                    return false;
+                }
+                unnestedVar = ((VariableReferenceExpression) functionCallArgExpr).getVariableReference();
+                break;
+            default:
+                return false;
+        }
+        if (varUsedAbove.contains(unnestedVar)) {
+            return false;
+        }
+
+        Mutable<ILogicalOperator> opRef2 = op1.getInputs().get(0);
+        AbstractLogicalOperator r = (AbstractLogicalOperator) opRef2.getValue();
+
+        if (r.getOperatorTag() != LogicalOperatorTag.GROUP) {
+            return false;
+        }
+
+        // go inside of a group-by plan
+        GroupByOperator gby = (GroupByOperator) r;
+        if (gby.getNestedPlans().size() != 1) {
+            return false;
+        }
+        if (gby.getNestedPlans().get(0).getRoots().size() != 1) {
+            return false;
+        }
+
+        AbstractLogicalOperator nestedPlanRoot = (AbstractLogicalOperator) gby.getNestedPlans().get(0).getRoots()
+                .get(0).getValue();
+        if (nestedPlanRoot.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
+            return false;
+        }
+        AggregateOperator agg = (AggregateOperator) nestedPlanRoot;
+        Mutable<ILogicalOperator> aggInputOpRef = agg.getInputs().get(0);
+
+        if (agg.getVariables().size() > 1) {
+            return false;
+        }
+
+        LogicalVariable aggVar = agg.getVariables().get(0);
+        ILogicalExpression aggFun = agg.getExpressions().get(0).getValue();
+        if (!aggVar.equals(unnestedVar)
+                || ((AbstractLogicalExpression) aggFun).getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return false;
+        }
+        AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) aggFun;
+        if (!AsterixBuiltinFunctions.LISTIFY.equals(f.getFunctionIdentifier())) {
+            return false;
+        }
+        if (f.getArguments().size() != 1) {
+            return false;
+        }
+        ILogicalExpression arg0 = f.getArguments().get(0).getValue();
+        if (((AbstractLogicalExpression) arg0).getExpressionTag() != LogicalExpressionTag.VARIABLE) {
+            return false;
+        }
+        LogicalVariable paramVar = ((VariableReferenceExpression) arg0).getVariableReference();
+
+        ArrayList<LogicalVariable> assgnVars = new ArrayList<LogicalVariable>(1);
+        assgnVars.add(unnest1.getVariable());
+        ArrayList<Mutable<ILogicalExpression>> assgnExprs = new ArrayList<Mutable<ILogicalExpression>>(1);
+        assgnExprs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(paramVar)));
+        AssignOperator assign = new AssignOperator(assgnVars, assgnExprs);
+
+        LogicalVariable posVar = unnest1.getPositionalVariable();
+        if (posVar == null) {
+            // Creates assignment for group-by keys.
+            ArrayList<LogicalVariable> gbyKeyAssgnVars = new ArrayList<LogicalVariable>();
+            ArrayList<Mutable<ILogicalExpression>> gbyKeyAssgnExprs = new ArrayList<Mutable<ILogicalExpression>>();
+            for (int i = 0; i < gby.getGroupByList().size(); i++) {
+                if (gby.getGroupByList().get(i).first != null) {
+                    gbyKeyAssgnVars.add(gby.getGroupByList().get(i).first);
+                    gbyKeyAssgnExprs.add(gby.getGroupByList().get(i).second);
+                }
+            }
+
+            // Moves the nested pipeline before aggregation out of the group-by op.
+            Mutable<ILogicalOperator> bottomOpRef = aggInputOpRef;
+            AbstractLogicalOperator bottomOp = (AbstractLogicalOperator) bottomOpRef.getValue();
+            while (bottomOp.getOperatorTag() != LogicalOperatorTag.NESTEDTUPLESOURCE) {
+                bottomOpRef = bottomOp.getInputs().get(0);
+                bottomOp = (AbstractLogicalOperator) bottomOpRef.getValue();
+            }
+
+            // Removes the group-by operator.
+            opRef.setValue(assign);
+            assign.getInputs().add(aggInputOpRef);
+            AssignOperator gbyKeyAssign = new AssignOperator(gbyKeyAssgnVars, gbyKeyAssgnExprs);
+            gbyKeyAssign.getInputs().add(gby.getInputs().get(0));
+            bottomOpRef.setValue(gbyKeyAssign);
+
+            context.computeAndSetTypeEnvironmentForOperator(gbyKeyAssign);
+            context.computeAndSetTypeEnvironmentForOperator(assign);
+        } else {
+            // if positional variable is used in unnest, the unnest will be pushed into the group-by as a running-aggregate
+
+            // First create assign for the unnest variable
+            List<LogicalVariable> nestedAssignVars = new ArrayList<LogicalVariable>();
+            List<Mutable<ILogicalExpression>> nestedAssignExprs = new ArrayList<Mutable<ILogicalExpression>>();
+            nestedAssignVars.add(unnest1.getVariable());
+            nestedAssignExprs.add(new MutableObject<ILogicalExpression>(arg0));
+            AssignOperator nestedAssign = new AssignOperator(nestedAssignVars, nestedAssignExprs);
+            nestedAssign.getInputs().add(opRef2);
+
+            // Then create running aggregation for the positional variable
+            List<LogicalVariable> raggVars = new ArrayList<LogicalVariable>();
+            List<Mutable<ILogicalExpression>> raggExprs = new ArrayList<Mutable<ILogicalExpression>>();
+            raggVars.add(posVar);
+            StatefulFunctionCallExpression fce = new StatefulFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.TID), UnpartitionedPropertyComputer.INSTANCE);
+            raggExprs.add(new MutableObject<ILogicalExpression>(fce));
+            RunningAggregateOperator raggOp = new RunningAggregateOperator(raggVars, raggExprs);
+            raggOp.setExecutionMode(unnest1.getExecutionMode());
+            RunningAggregatePOperator raggPOp = new RunningAggregatePOperator();
+            raggOp.setPhysicalOperator(raggPOp);
+            raggOp.getInputs().add(nestedPlanRoot.getInputs().get(0));
+            gby.getNestedPlans().get(0).getRoots().set(0, new MutableObject<ILogicalOperator>(raggOp));
+
+            opRef.setValue(nestedAssign);
+
+            context.computeAndSetTypeEnvironmentForOperator(nestedAssign);
+            context.computeAndSetTypeEnvironmentForOperator(raggOp);
+            context.computeAndSetTypeEnvironmentForOperator(gby);
+
+        }
+
+        return true;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/CheckFilterExpressionTypeRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/CheckFilterExpressionTypeRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/CheckFilterExpressionTypeRule.java
new file mode 100644
index 0000000..7e9ebcc
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/CheckFilterExpressionTypeRule.java
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * This rule is to check if all the filter expression are of the boolean type or a possible (determined
+ * at the runtime) boolean type.
+ * If that is not the case, an exception should be thrown.
+ *
+ * @author yingyib
+ */
+public class CheckFilterExpressionTypeRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
+            return false;
+        }
+        SelectOperator select = (SelectOperator) op;
+        ILogicalExpression condition = select.getCondition().getValue();
+        IVariableTypeEnvironment env = select.computeOutputTypeEnvironment(context);
+        IAType condType = (IAType) env.getType(condition);
+        if (condType.getTypeTag() != ATypeTag.BOOLEAN && condType.getTypeTag() != ATypeTag.ANY
+                && !isPossibleBoolean(condType)) {
+            throw new AlgebricksException("The select condition " + condition.toString()
+                    + " should be of the boolean type.");
+        }
+        return false;
+    }
+
+    /**
+     * Check if the type is optional boolean or not
+     * 
+     * @param type
+     * @return true if it is; false otherwise.
+     */
+    private boolean isPossibleBoolean(IAType type) {
+        while (NonTaggedFormatUtil.isOptional(type)) {
+            type = ((AUnionType) type).getNullableType();
+            if (type.getTypeTag() == ATypeTag.BOOLEAN || type.getTypeTag() == ATypeTag.ANY) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+}


[14/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryBTreeOperationsHelper.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryBTreeOperationsHelper.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryBTreeOperationsHelper.java
deleted file mode 100644
index ffecc8e..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryBTreeOperationsHelper.java
+++ /dev/null
@@ -1,363 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.file;
-
-import java.util.List;
-
-import edu.uci.ics.asterix.common.api.ILocalResourceMetadata;
-import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
-import edu.uci.ics.asterix.common.config.GlobalConfig;
-import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
-import edu.uci.ics.asterix.common.context.AsterixVirtualBufferCacheProvider;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
-import edu.uci.ics.asterix.common.ioopcallbacks.LSMBTreeWithBuddyIOOperationCallbackFactory;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.asterix.metadata.external.IndexingConstants;
-import edu.uci.ics.asterix.metadata.feeds.ExternalDataScanOperatorDescriptor;
-import edu.uci.ics.asterix.metadata.utils.ExternalDatasetsRegistry;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.transaction.management.opcallbacks.SecondaryIndexOperationTrackerProvider;
-import edu.uci.ics.asterix.transaction.management.resource.ExternalBTreeWithBuddyLocalResourceMetadata;
-import edu.uci.ics.asterix.transaction.management.resource.LSMBTreeLocalResourceMetadata;
-import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceFactoryProvider;
-import edu.uci.ics.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.ConnectorPolicyAssignmentPolicy;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
-import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
-import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;
-import edu.uci.ics.hyracks.algebricks.data.ITypeTraitProvider;
-import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.operators.base.SinkRuntimeFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow.ExternalBTreeWithBuddyDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexCompactOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceFactoryProvider;
-import edu.uci.ics.hyracks.storage.common.file.LocalResource;
-
-public class SecondaryBTreeOperationsHelper extends SecondaryIndexOperationsHelper {
-
-    protected SecondaryBTreeOperationsHelper(PhysicalOptimizationConfig physOptConf,
-            IAsterixPropertiesProvider propertiesProvider) {
-        super(physOptConf, propertiesProvider);
-    }
-
-    @Override
-    public JobSpecification buildCreationJobSpec() throws AsterixException, AlgebricksException {
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-
-        AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
-        ILocalResourceFactoryProvider localResourceFactoryProvider;
-        IIndexDataflowHelperFactory indexDataflowHelperFactory;
-        if (dataset.getDatasetType() == DatasetType.INTERNAL) {
-            //prepare a LocalResourceMetadata which will be stored in NC's local resource repository
-            ILocalResourceMetadata localResourceMetadata = new LSMBTreeLocalResourceMetadata(secondaryTypeTraits,
-                    secondaryComparatorFactories, secondaryBloomFilterKeyFields, true, dataset.getDatasetId(),
-                    mergePolicyFactory, mergePolicyFactoryProperties, filterTypeTraits, filterCmpFactories,
-                    secondaryBTreeFields, secondaryFilterFields);
-            localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(localResourceMetadata,
-                    LocalResource.LSMBTreeResource);
-            // The index create operation should be persistent regardless of temp datasets or permanent dataset.
-            indexDataflowHelperFactory = new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
-                    dataset.getDatasetId()), mergePolicyFactory, mergePolicyFactoryProperties,
-                    new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
-                    storageProperties.getBloomFilterFalsePositiveRate(), false, filterTypeTraits, filterCmpFactories,
-                    secondaryBTreeFields, secondaryFilterFields, true);
-        } else {
-            // External dataset local resource and dataflow helper
-            int[] buddyBreeFields = new int[] { numSecondaryKeys };
-            ILocalResourceMetadata localResourceMetadata = new ExternalBTreeWithBuddyLocalResourceMetadata(
-                    dataset.getDatasetId(), secondaryComparatorFactories, secondaryTypeTraits, mergePolicyFactory,
-                    mergePolicyFactoryProperties, buddyBreeFields);
-            localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(localResourceMetadata,
-                    LocalResource.ExternalBTreeWithBuddyResource);
-            indexDataflowHelperFactory = new ExternalBTreeWithBuddyDataflowHelperFactory(mergePolicyFactory,
-                    mergePolicyFactoryProperties, new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                    LSMBTreeWithBuddyIOOperationCallbackFactory.INSTANCE,
-                    storageProperties.getBloomFilterFalsePositiveRate(), buddyBreeFields,
-                    ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true);
-        }
-        TreeIndexCreateOperatorDescriptor secondaryIndexCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                secondaryFileSplitProvider, secondaryTypeTraits, secondaryComparatorFactories,
-                secondaryBloomFilterKeyFields, indexDataflowHelperFactory, localResourceFactoryProvider,
-                NoOpOperationCallbackFactory.INSTANCE);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, secondaryIndexCreateOp,
-                secondaryPartitionConstraint);
-        spec.addRoot(secondaryIndexCreateOp);
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-        return spec;
-    }
-
-    @Override
-    public JobSpecification buildLoadingJobSpec() throws AsterixException, AlgebricksException {
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-
-        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
-            /*
-             * In case of external data, this method is used to build loading jobs for both initial load on index creation
-             * and transaction load on dataset referesh
-             */
-
-            // Create external indexing scan operator
-            ExternalDataScanOperatorDescriptor primaryScanOp = createExternalIndexingOp(spec);
-
-            // Assign op.
-            AbstractOperatorDescriptor sourceOp = primaryScanOp;
-            if (isEnforcingKeyTypes) {
-                sourceOp = createCastOp(spec, primaryScanOp, numSecondaryKeys, dataset.getDatasetType());
-                spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, sourceOp, 0);
-            }
-            AlgebricksMetaOperatorDescriptor asterixAssignOp = createExternalAssignOp(spec, numSecondaryKeys);
-
-            // If any of the secondary fields are nullable, then add a select op that filters nulls.
-            AlgebricksMetaOperatorDescriptor selectOp = null;
-            if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
-                selectOp = createFilterNullsSelectOp(spec, numSecondaryKeys);
-            }
-
-            // Sort by secondary keys.
-            ExternalSortOperatorDescriptor sortOp = createSortOp(spec, secondaryComparatorFactories, secondaryRecDesc);
-
-            AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
-            // Create secondary BTree bulk load op.
-            AbstractTreeIndexOperatorDescriptor secondaryBulkLoadOp;
-            ExternalBTreeWithBuddyDataflowHelperFactory dataflowHelperFactory = new ExternalBTreeWithBuddyDataflowHelperFactory(
-                    mergePolicyFactory, mergePolicyFactoryProperties, new SecondaryIndexOperationTrackerProvider(
-                            dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                    LSMBTreeWithBuddyIOOperationCallbackFactory.INSTANCE,
-                    storageProperties.getBloomFilterFalsePositiveRate(), new int[] { numSecondaryKeys },
-                    ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true);
-            IOperatorDescriptor root;
-            if (externalFiles != null) {
-                // Transaction load
-                secondaryBulkLoadOp = createExternalIndexBulkModifyOp(spec, numSecondaryKeys, dataflowHelperFactory,
-                        GlobalConfig.DEFAULT_TREE_FILL_FACTOR);
-                root = secondaryBulkLoadOp;
-            } else {
-                // Initial load
-                secondaryBulkLoadOp = createTreeIndexBulkLoadOp(spec, numSecondaryKeys, dataflowHelperFactory,
-                        GlobalConfig.DEFAULT_TREE_FILL_FACTOR);
-                AlgebricksMetaOperatorDescriptor metaOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
-                        new IPushRuntimeFactory[] { new SinkRuntimeFactory() },
-                        new RecordDescriptor[] { secondaryRecDesc });
-                spec.connect(new OneToOneConnectorDescriptor(spec), secondaryBulkLoadOp, 0, metaOp, 0);
-                root = metaOp;
-            }
-            spec.connect(new OneToOneConnectorDescriptor(spec), sourceOp, 0, asterixAssignOp, 0);
-            if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
-                spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, selectOp, 0);
-                spec.connect(new OneToOneConnectorDescriptor(spec), selectOp, 0, sortOp, 0);
-            } else {
-                spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, sortOp, 0);
-            }
-            spec.connect(new OneToOneConnectorDescriptor(spec), sortOp, 0, secondaryBulkLoadOp, 0);
-            spec.addRoot(root);
-            spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-            return spec;
-        } else {
-            // Create dummy key provider for feeding the primary index scan. 
-            AbstractOperatorDescriptor keyProviderOp = createDummyKeyProviderOp(spec);
-
-            // Create primary index scan op.
-            BTreeSearchOperatorDescriptor primaryScanOp = createPrimaryIndexScanOp(spec);
-
-            // Assign op.
-            AbstractOperatorDescriptor sourceOp = primaryScanOp;
-            if (isEnforcingKeyTypes) {
-                sourceOp = createCastOp(spec, primaryScanOp, numSecondaryKeys, dataset.getDatasetType());
-                spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, sourceOp, 0);
-            }
-            AlgebricksMetaOperatorDescriptor asterixAssignOp = createAssignOp(spec, sourceOp, numSecondaryKeys);
-
-            // If any of the secondary fields are nullable, then add a select op that filters nulls.
-            AlgebricksMetaOperatorDescriptor selectOp = null;
-            if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
-                selectOp = createFilterNullsSelectOp(spec, numSecondaryKeys);
-            }
-
-            // Sort by secondary keys.
-            ExternalSortOperatorDescriptor sortOp = createSortOp(spec, secondaryComparatorFactories, secondaryRecDesc);
-
-            AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
-            boolean temp = dataset.getDatasetDetails().isTemp();
-            // Create secondary BTree bulk load op.
-            TreeIndexBulkLoadOperatorDescriptor secondaryBulkLoadOp = createTreeIndexBulkLoadOp(
-                    spec,
-                    numSecondaryKeys,
-                    new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
-                            mergePolicyFactory, mergePolicyFactoryProperties,
-                            new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                            LSMBTreeIOOperationCallbackFactory.INSTANCE, storageProperties
-                                    .getBloomFilterFalsePositiveRate(), false, filterTypeTraits, filterCmpFactories,
-                            secondaryBTreeFields, secondaryFilterFields, !temp), GlobalConfig.DEFAULT_TREE_FILL_FACTOR);
-
-            AlgebricksMetaOperatorDescriptor metaOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
-                    new IPushRuntimeFactory[] { new SinkRuntimeFactory() }, new RecordDescriptor[] { secondaryRecDesc });
-            // Connect the operators.
-            spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryScanOp, 0);
-            spec.connect(new OneToOneConnectorDescriptor(spec), sourceOp, 0, asterixAssignOp, 0);
-            if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
-                spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, selectOp, 0);
-                spec.connect(new OneToOneConnectorDescriptor(spec), selectOp, 0, sortOp, 0);
-            } else {
-                spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, sortOp, 0);
-            }
-            spec.connect(new OneToOneConnectorDescriptor(spec), sortOp, 0, secondaryBulkLoadOp, 0);
-            spec.connect(new OneToOneConnectorDescriptor(spec), secondaryBulkLoadOp, 0, metaOp, 0);
-            spec.addRoot(metaOp);
-            spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-            return spec;
-        }
-    }
-
-    @Override
-    protected int getNumSecondaryKeys() {
-        return numSecondaryKeys;
-    }
-
-    @Override
-    public JobSpecification buildCompactJobSpec() throws AsterixException, AlgebricksException {
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-
-        AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
-        boolean temp = dataset.getDatasetDetails().isTemp();
-        LSMTreeIndexCompactOperatorDescriptor compactOp;
-        if (dataset.getDatasetType() == DatasetType.INTERNAL) {
-            compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec,
-                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, secondaryFileSplitProvider, secondaryTypeTraits,
-                    secondaryComparatorFactories, secondaryBloomFilterKeyFields, new LSMBTreeDataflowHelperFactory(
-                            new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()), mergePolicyFactory,
-                            mergePolicyFactoryProperties, new SecondaryIndexOperationTrackerProvider(
-                                    dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                            LSMBTreeIOOperationCallbackFactory.INSTANCE,
-                            storageProperties.getBloomFilterFalsePositiveRate(), false, filterTypeTraits,
-                            filterCmpFactories, secondaryBTreeFields, secondaryFilterFields, !temp),
-                    NoOpOperationCallbackFactory.INSTANCE);
-        } else {
-            // External dataset
-            compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec,
-                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, secondaryFileSplitProvider, secondaryTypeTraits,
-                    secondaryComparatorFactories, secondaryBloomFilterKeyFields,
-                    new ExternalBTreeWithBuddyDataflowHelperFactory(mergePolicyFactory, mergePolicyFactoryProperties,
-                            new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                            LSMBTreeWithBuddyIOOperationCallbackFactory.INSTANCE, storageProperties
-                                    .getBloomFilterFalsePositiveRate(), new int[] { numSecondaryKeys },
-                            ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true),
-                    NoOpOperationCallbackFactory.INSTANCE);
-        }
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp,
-                secondaryPartitionConstraint);
-        spec.addRoot(compactOp);
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-        return spec;
-    }
-
-    @Override
-    @SuppressWarnings("rawtypes")
-    protected void setSecondaryRecDescAndComparators(IndexType indexType, List<List<String>> secondaryKeyFields,
-            List<IAType> secondaryKeyTypes, int gramLength, AqlMetadataProvider metadataProvider)
-            throws AlgebricksException, AsterixException {
-        secondaryFieldAccessEvalFactories = new ICopyEvaluatorFactory[numSecondaryKeys + numFilterFields];
-        secondaryComparatorFactories = new IBinaryComparatorFactory[numSecondaryKeys + numPrimaryKeys];
-        secondaryBloomFilterKeyFields = new int[numSecondaryKeys];
-        ISerializerDeserializer[] secondaryRecFields = new ISerializerDeserializer[numPrimaryKeys + numSecondaryKeys
-                + numFilterFields];
-        ISerializerDeserializer[] enforcedRecFields = new ISerializerDeserializer[1 + numPrimaryKeys + numFilterFields];
-        secondaryTypeTraits = new ITypeTraits[numSecondaryKeys + numPrimaryKeys];
-        ITypeTraits[] enforcedTypeTraits = new ITypeTraits[1 + numPrimaryKeys];
-        ISerializerDeserializerProvider serdeProvider = metadataProvider.getFormat().getSerdeProvider();
-        ITypeTraitProvider typeTraitProvider = metadataProvider.getFormat().getTypeTraitProvider();
-        IBinaryComparatorFactoryProvider comparatorFactoryProvider = metadataProvider.getFormat()
-                .getBinaryComparatorFactoryProvider();
-        // Record column is 0 for external datasets, numPrimaryKeys for internal ones
-        int recordColumn = dataset.getDatasetType() == DatasetType.INTERNAL ? numPrimaryKeys : 0;
-        for (int i = 0; i < numSecondaryKeys; i++) {
-            secondaryFieldAccessEvalFactories[i] = metadataProvider.getFormat().getFieldAccessEvaluatorFactory(
-                    isEnforcingKeyTypes ? enforcedItemType : itemType, secondaryKeyFields.get(i), recordColumn);
-            Pair<IAType, Boolean> keyTypePair = Index.getNonNullableOpenFieldType(secondaryKeyTypes.get(i),
-                    secondaryKeyFields.get(i), itemType);
-            IAType keyType = keyTypePair.first;
-            anySecondaryKeyIsNullable = anySecondaryKeyIsNullable || keyTypePair.second;
-            ISerializerDeserializer keySerde = serdeProvider.getSerializerDeserializer(keyType);
-            secondaryRecFields[i] = keySerde;
-            secondaryComparatorFactories[i] = comparatorFactoryProvider.getBinaryComparatorFactory(keyType, true);
-            secondaryTypeTraits[i] = typeTraitProvider.getTypeTrait(keyType);
-            secondaryBloomFilterKeyFields[i] = i;
-        }
-        if (dataset.getDatasetType() == DatasetType.INTERNAL) {
-            // Add serializers and comparators for primary index fields.
-            for (int i = 0; i < numPrimaryKeys; i++) {
-                secondaryRecFields[numSecondaryKeys + i] = primaryRecDesc.getFields()[i];
-                enforcedRecFields[i] = primaryRecDesc.getFields()[i];
-                secondaryTypeTraits[numSecondaryKeys + i] = primaryRecDesc.getTypeTraits()[i];
-                enforcedTypeTraits[i] = primaryRecDesc.getTypeTraits()[i];
-                secondaryComparatorFactories[numSecondaryKeys + i] = primaryComparatorFactories[i];
-            }
-        } else {
-            // Add serializers and comparators for RID fields.
-            for (int i = 0; i < numPrimaryKeys; i++) {
-                secondaryRecFields[numSecondaryKeys + i] = IndexingConstants.getSerializerDeserializer(i);
-                enforcedRecFields[i] = IndexingConstants.getSerializerDeserializer(i);
-                secondaryTypeTraits[numSecondaryKeys + i] = IndexingConstants.getTypeTraits(i);
-                enforcedTypeTraits[i] = IndexingConstants.getTypeTraits(i);
-                secondaryComparatorFactories[numSecondaryKeys + i] = IndexingConstants.getComparatorFactory(i);
-            }
-        }
-        enforcedRecFields[numPrimaryKeys] = serdeProvider.getSerializerDeserializer(itemType);
-
-        if (numFilterFields > 0) {
-            secondaryFieldAccessEvalFactories[numSecondaryKeys] = metadataProvider.getFormat()
-                    .getFieldAccessEvaluatorFactory(itemType, filterFieldName, numPrimaryKeys);
-            Pair<IAType, Boolean> keyTypePair = Index.getNonNullableKeyFieldType(filterFieldName, itemType);
-            IAType type = keyTypePair.first;
-            ISerializerDeserializer serde = serdeProvider.getSerializerDeserializer(type);
-            secondaryRecFields[numPrimaryKeys + numSecondaryKeys] = serde;
-        }
-
-        secondaryRecDesc = new RecordDescriptor(secondaryRecFields, secondaryTypeTraits);
-        enforcedRecDesc = new RecordDescriptor(enforcedRecFields, enforcedTypeTraits);
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexOperationsHelper.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexOperationsHelper.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexOperationsHelper.java
deleted file mode 100644
index 07c8bab..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryIndexOperationsHelper.java
+++ /dev/null
@@ -1,575 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.file;
-
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.List;
-import java.util.Map;
-
-import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
-import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
-import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
-import edu.uci.ics.asterix.common.context.AsterixVirtualBufferCacheProvider;
-import edu.uci.ics.asterix.common.context.ITransactionSubsystemProvider;
-import edu.uci.ics.asterix.common.context.TransactionSubsystemProvider;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
-import edu.uci.ics.asterix.common.transactions.IRecoveryManager.ResourceType;
-import edu.uci.ics.asterix.common.transactions.JobId;
-import edu.uci.ics.asterix.external.indexing.operators.ExternalIndexBulkModifyOperatorDescriptor;
-import edu.uci.ics.asterix.formats.nontagged.AqlBinaryBooleanInspectorImpl;
-import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
-import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
-import edu.uci.ics.asterix.formats.nontagged.AqlTypeTraitProvider;
-import edu.uci.ics.asterix.metadata.MetadataException;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.ExternalFile;
-import edu.uci.ics.asterix.metadata.external.IndexingConstants;
-import edu.uci.ics.asterix.metadata.feeds.ExternalDataScanOperatorDescriptor;
-import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
-import edu.uci.ics.asterix.runtime.evaluators.functions.AndDescriptor;
-import edu.uci.ics.asterix.runtime.evaluators.functions.CastRecordDescriptor;
-import edu.uci.ics.asterix.runtime.evaluators.functions.IsNullDescriptor;
-import edu.uci.ics.asterix.runtime.evaluators.functions.NotDescriptor;
-import edu.uci.ics.asterix.runtime.job.listener.JobEventListenerFactory;
-import edu.uci.ics.asterix.transaction.management.opcallbacks.PrimaryIndexInstantSearchOperationCallbackFactory;
-import edu.uci.ics.asterix.transaction.management.opcallbacks.PrimaryIndexOperationTrackerProvider;
-import edu.uci.ics.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
-import edu.uci.ics.asterix.transaction.management.service.transaction.JobIdFactory;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.LogicalExpressionJobGenToExpressionRuntimeProviderAdapter;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
-import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;
-import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.evaluators.ColumnAccessEvalFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
-import edu.uci.ics.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.operators.std.StreamSelectRuntimeFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.IJobletEventListenerFactory;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
-
-@SuppressWarnings("rawtypes")
-// TODO: We should eventually have a hierarchy of classes that can create all
-// possible index job specs,
-// not just for creation.
-public abstract class SecondaryIndexOperationsHelper {
-    protected final PhysicalOptimizationConfig physOptConf;
-
-    protected int numPrimaryKeys;
-    protected int numSecondaryKeys;
-    protected AqlMetadataProvider metadataProvider;
-    protected String dataverseName;
-    protected String datasetName;
-    protected Dataset dataset;
-    protected ARecordType itemType;
-    protected ISerializerDeserializer payloadSerde;
-    protected IFileSplitProvider primaryFileSplitProvider;
-    protected AlgebricksPartitionConstraint primaryPartitionConstraint;
-    protected IFileSplitProvider secondaryFileSplitProvider;
-    protected AlgebricksPartitionConstraint secondaryPartitionConstraint;
-    protected String secondaryIndexName;
-    protected boolean anySecondaryKeyIsNullable = false;
-    protected boolean isEnforcingKeyTypes = false;
-
-    protected long numElementsHint;
-    protected IBinaryComparatorFactory[] primaryComparatorFactories;
-    protected int[] primaryBloomFilterKeyFields;
-    protected RecordDescriptor primaryRecDesc;
-    protected IBinaryComparatorFactory[] secondaryComparatorFactories;
-    protected ITypeTraits[] secondaryTypeTraits;
-    protected int[] secondaryBloomFilterKeyFields;
-    protected RecordDescriptor secondaryRecDesc;
-    protected ICopyEvaluatorFactory[] secondaryFieldAccessEvalFactories;
-
-    protected IAsterixPropertiesProvider propertiesProvider;
-    protected ILSMMergePolicyFactory mergePolicyFactory;
-    protected Map<String, String> mergePolicyFactoryProperties;
-    protected RecordDescriptor enforcedRecDesc;
-    protected ARecordType enforcedItemType;
-
-    protected int numFilterFields;
-    protected List<String> filterFieldName;
-    protected ITypeTraits[] filterTypeTraits;
-    protected IBinaryComparatorFactory[] filterCmpFactories;
-    protected int[] secondaryFilterFields;
-    protected int[] primaryFilterFields;
-    protected int[] primaryBTreeFields;
-    protected int[] secondaryBTreeFields;
-    protected List<ExternalFile> externalFiles;
-
-    // Prevent public construction. Should be created via createIndexCreator().
-    protected SecondaryIndexOperationsHelper(PhysicalOptimizationConfig physOptConf,
-            IAsterixPropertiesProvider propertiesProvider) {
-        this.physOptConf = physOptConf;
-        this.propertiesProvider = propertiesProvider;
-    }
-
-    public static SecondaryIndexOperationsHelper createIndexOperationsHelper(IndexType indexType, String dataverseName,
-            String datasetName, String indexName, List<List<String>> secondaryKeyFields,
-            List<IAType> secondaryKeyTypes, boolean isEnforced, int gramLength, AqlMetadataProvider metadataProvider,
-            PhysicalOptimizationConfig physOptConf, ARecordType recType, ARecordType enforcedType)
-            throws AsterixException, AlgebricksException {
-        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
-        SecondaryIndexOperationsHelper indexOperationsHelper = null;
-        switch (indexType) {
-            case BTREE: {
-                indexOperationsHelper = new SecondaryBTreeOperationsHelper(physOptConf, asterixPropertiesProvider);
-                break;
-            }
-            case RTREE: {
-                indexOperationsHelper = new SecondaryRTreeOperationsHelper(physOptConf, asterixPropertiesProvider);
-                break;
-            }
-            case SINGLE_PARTITION_WORD_INVIX:
-            case SINGLE_PARTITION_NGRAM_INVIX:
-            case LENGTH_PARTITIONED_WORD_INVIX:
-            case LENGTH_PARTITIONED_NGRAM_INVIX: {
-                indexOperationsHelper = new SecondaryInvertedIndexOperationsHelper(physOptConf,
-                        asterixPropertiesProvider);
-                break;
-            }
-            default: {
-                throw new AsterixException("Unknown Index Type: " + indexType);
-            }
-        }
-        indexOperationsHelper.init(indexType, dataverseName, datasetName, indexName, secondaryKeyFields,
-                secondaryKeyTypes, isEnforced, gramLength, metadataProvider, recType, enforcedType);
-        return indexOperationsHelper;
-    }
-
-    public abstract JobSpecification buildCreationJobSpec() throws AsterixException, AlgebricksException;
-
-    public abstract JobSpecification buildLoadingJobSpec() throws AsterixException, AlgebricksException;
-
-    public abstract JobSpecification buildCompactJobSpec() throws AsterixException, AlgebricksException;
-
-    protected void init(IndexType indexType, String dvn, String dsn, String in, List<List<String>> secondaryKeyFields,
-            List<IAType> secondaryKeyTypes, boolean isEnforced, int gramLength, AqlMetadataProvider metadataProvider,
-            ARecordType aRecType, ARecordType enforcedType) throws AsterixException, AlgebricksException {
-        this.metadataProvider = metadataProvider;
-        dataverseName = dvn == null ? metadataProvider.getDefaultDataverseName() : dvn;
-        datasetName = dsn;
-        secondaryIndexName = in;
-        isEnforcingKeyTypes = isEnforced;
-        dataset = metadataProvider.findDataset(dataverseName, datasetName);
-        if (dataset == null) {
-            throw new AsterixException("Unknown dataset " + datasetName);
-        }
-        boolean temp = dataset.getDatasetDetails().isTemp();
-        itemType = aRecType;
-        enforcedItemType = enforcedType;
-        payloadSerde = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(itemType);
-        numSecondaryKeys = secondaryKeyFields.size();
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider
-                .splitProviderAndPartitionConstraintsForDataset(dataverseName, datasetName, secondaryIndexName, temp);
-        secondaryFileSplitProvider = secondarySplitsAndConstraint.first;
-        secondaryPartitionConstraint = secondarySplitsAndConstraint.second;
-
-        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
-            numPrimaryKeys = ExternalIndexingOperations.getRIDSize(dataset);
-        } else {
-            filterFieldName = DatasetUtils.getFilterField(dataset);
-            if (filterFieldName != null) {
-                numFilterFields = 1;
-            } else {
-                numFilterFields = 0;
-            }
-
-            numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
-            Pair<IFileSplitProvider, AlgebricksPartitionConstraint> primarySplitsAndConstraint = metadataProvider
-                    .splitProviderAndPartitionConstraintsForDataset(dataverseName, datasetName, datasetName, temp);
-            primaryFileSplitProvider = primarySplitsAndConstraint.first;
-            primaryPartitionConstraint = primarySplitsAndConstraint.second;
-            setPrimaryRecDescAndComparators();
-        }
-        setSecondaryRecDescAndComparators(indexType, secondaryKeyFields, secondaryKeyTypes, gramLength,
-                metadataProvider);
-        numElementsHint = metadataProvider.getCardinalityPerPartitionHint(dataset);
-        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
-                metadataProvider.getMetadataTxnContext());
-        mergePolicyFactory = compactionInfo.first;
-        mergePolicyFactoryProperties = compactionInfo.second;
-
-        if (numFilterFields > 0) {
-            setFilterTypeTraitsAndComparators();
-        }
-    }
-
-    protected void setFilterTypeTraitsAndComparators() throws AlgebricksException {
-        filterTypeTraits = new ITypeTraits[numFilterFields];
-        filterCmpFactories = new IBinaryComparatorFactory[numFilterFields];
-        secondaryFilterFields = new int[numFilterFields];
-        primaryFilterFields = new int[numFilterFields];
-        primaryBTreeFields = new int[numPrimaryKeys + 1];
-        secondaryBTreeFields = new int[numSecondaryKeys + numPrimaryKeys];
-        for (int i = 0; i < primaryBTreeFields.length; i++) {
-            primaryBTreeFields[i] = i;
-        }
-        for (int i = 0; i < secondaryBTreeFields.length; i++) {
-            secondaryBTreeFields[i] = i;
-        }
-
-        IAType type;
-        try {
-            type = itemType.getSubFieldType(filterFieldName);
-        } catch (IOException e) {
-            throw new AlgebricksException(e);
-        }
-        filterCmpFactories[0] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(type, true);
-        filterTypeTraits[0] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(type);
-        secondaryFilterFields[0] = getNumSecondaryKeys() + numPrimaryKeys;
-        primaryFilterFields[0] = numPrimaryKeys + 1;
-    }
-
-    protected abstract int getNumSecondaryKeys();
-
-    protected void setPrimaryRecDescAndComparators() throws AlgebricksException {
-        List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
-        int numPrimaryKeys = partitioningKeys.size();
-        ISerializerDeserializer[] primaryRecFields = new ISerializerDeserializer[numPrimaryKeys + 1];
-        ITypeTraits[] primaryTypeTraits = new ITypeTraits[numPrimaryKeys + 1];
-        primaryComparatorFactories = new IBinaryComparatorFactory[numPrimaryKeys];
-        primaryBloomFilterKeyFields = new int[numPrimaryKeys];
-        ISerializerDeserializerProvider serdeProvider = metadataProvider.getFormat().getSerdeProvider();
-        for (int i = 0; i < numPrimaryKeys; i++) {
-            IAType keyType;
-            try {
-                keyType = itemType.getSubFieldType(partitioningKeys.get(i));
-            } catch (IOException e) {
-                throw new AlgebricksException(e);
-            }
-            primaryRecFields[i] = serdeProvider.getSerializerDeserializer(keyType);
-            primaryComparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(
-                    keyType, true);
-            primaryTypeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
-            primaryBloomFilterKeyFields[i] = i;
-        }
-        primaryRecFields[numPrimaryKeys] = payloadSerde;
-        primaryTypeTraits[numPrimaryKeys] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(itemType);
-        primaryRecDesc = new RecordDescriptor(primaryRecFields, primaryTypeTraits);
-    }
-
-    protected abstract void setSecondaryRecDescAndComparators(IndexType indexType,
-            List<List<String>> secondaryKeyFields, List<IAType> secondaryKeyTypes, int gramLength,
-            AqlMetadataProvider metadataProvider) throws AlgebricksException, AsterixException;
-
-    protected AbstractOperatorDescriptor createDummyKeyProviderOp(JobSpecification spec) throws AsterixException,
-            AlgebricksException {
-        // Build dummy tuple containing one field with a dummy value inside.
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
-        DataOutput dos = tb.getDataOutput();
-        tb.reset();
-        try {
-            // Serialize dummy value into a field.
-            IntegerSerializerDeserializer.INSTANCE.serialize(0, dos);
-        } catch (HyracksDataException e) {
-            throw new AsterixException(e);
-        }
-        // Add dummy field.
-        tb.addFieldEndOffset();
-        ISerializerDeserializer[] keyRecDescSers = { IntegerSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, keyProviderOp,
-                primaryPartitionConstraint);
-        return keyProviderOp;
-    }
-
-    protected BTreeSearchOperatorDescriptor createPrimaryIndexScanOp(JobSpecification spec) throws AlgebricksException {
-        // -Infinity
-        int[] lowKeyFields = null;
-        // +Infinity
-        int[] highKeyFields = null;
-        ITransactionSubsystemProvider txnSubsystemProvider = new TransactionSubsystemProvider();
-        JobId jobId = JobIdFactory.generateJobId();
-        metadataProvider.setJobId(jobId);
-        boolean isWriteTransaction = metadataProvider.isWriteTransaction();
-        IJobletEventListenerFactory jobEventListenerFactory = new JobEventListenerFactory(jobId, isWriteTransaction);
-        spec.setJobletEventListenerFactory(jobEventListenerFactory);
-
-        boolean temp = dataset.getDatasetDetails().isTemp();
-        ISearchOperationCallbackFactory searchCallbackFactory = temp ? NoOpOperationCallbackFactory.INSTANCE
-                : new PrimaryIndexInstantSearchOperationCallbackFactory(jobId, dataset.getDatasetId(),
-                        primaryBloomFilterKeyFields, txnSubsystemProvider, ResourceType.LSM_BTREE);
-        AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
-        BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                primaryFileSplitProvider, primaryRecDesc.getTypeTraits(), primaryComparatorFactories,
-                primaryBloomFilterKeyFields, lowKeyFields, highKeyFields, true, true,
-                new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
-                        mergePolicyFactory, mergePolicyFactoryProperties, new PrimaryIndexOperationTrackerProvider(
-                                dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                        LSMBTreeIOOperationCallbackFactory.INSTANCE, storageProperties
-                                .getBloomFilterFalsePositiveRate(), true, filterTypeTraits, filterCmpFactories,
-                        primaryBTreeFields, primaryFilterFields, !temp), false, false, null,
-                searchCallbackFactory, null, null);
-
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, primarySearchOp,
-                primaryPartitionConstraint);
-        return primarySearchOp;
-    }
-
-    protected AlgebricksMetaOperatorDescriptor createAssignOp(JobSpecification spec,
-            AbstractOperatorDescriptor primaryScanOp, int numSecondaryKeyFields) throws AlgebricksException {
-        int[] outColumns = new int[numSecondaryKeyFields + numFilterFields];
-        int[] projectionList = new int[numSecondaryKeyFields + numPrimaryKeys + numFilterFields];
-        for (int i = 0; i < numSecondaryKeyFields + numFilterFields; i++) {
-            outColumns[i] = numPrimaryKeys + i;
-        }
-        int projCount = 0;
-        for (int i = 0; i < numSecondaryKeyFields; i++) {
-            projectionList[projCount++] = numPrimaryKeys + i;
-        }
-        for (int i = 0; i < numPrimaryKeys; i++) {
-            projectionList[projCount++] = i;
-        }
-        if (numFilterFields > 0) {
-            projectionList[projCount++] = numPrimaryKeys + numSecondaryKeyFields;
-        }
-
-        IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[secondaryFieldAccessEvalFactories.length];
-        for (int i = 0; i < secondaryFieldAccessEvalFactories.length; ++i) {
-            sefs[i] = new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.ScalarEvaluatorFactoryAdapter(
-                    secondaryFieldAccessEvalFactories[i]);
-        }
-        AssignRuntimeFactory assign = new AssignRuntimeFactory(outColumns, sefs, projectionList);
-        AlgebricksMetaOperatorDescriptor asterixAssignOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 1,
-                new IPushRuntimeFactory[] { assign }, new RecordDescriptor[] { secondaryRecDesc });
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, asterixAssignOp,
-                primaryPartitionConstraint);
-        return asterixAssignOp;
-    }
-
-    protected AlgebricksMetaOperatorDescriptor createCastOp(JobSpecification spec,
-            AbstractOperatorDescriptor primaryScanOp, int numSecondaryKeyFields, DatasetType dsType) {
-        CastRecordDescriptor castFuncDesc = (CastRecordDescriptor) CastRecordDescriptor.FACTORY
-                .createFunctionDescriptor();
-        castFuncDesc.reset(enforcedItemType, itemType);
-
-        int[] outColumns = new int[1];
-        int[] projectionList = new int[1 + numPrimaryKeys];
-        int recordIdx;
-        //external datascan operator returns a record as the first field, instead of the last in internal case
-        if (dsType == DatasetType.EXTERNAL) {
-            recordIdx = 0;
-            outColumns[0] = 0;
-        } else {
-            recordIdx = numPrimaryKeys;
-            outColumns[0] = numPrimaryKeys;
-        }
-        for (int i = 0; i <= numPrimaryKeys; i++) {
-            projectionList[i] = i;
-        }
-        ICopyEvaluatorFactory[] castEvalFact = new ICopyEvaluatorFactory[] { new ColumnAccessEvalFactory(recordIdx) };
-        IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[1];
-        sefs[0] = new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.ScalarEvaluatorFactoryAdapter(
-                castFuncDesc.createEvaluatorFactory(castEvalFact));
-        AssignRuntimeFactory castAssign = new AssignRuntimeFactory(outColumns, sefs, projectionList);
-        AlgebricksMetaOperatorDescriptor castRecAssignOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 1,
-                new IPushRuntimeFactory[] { castAssign }, new RecordDescriptor[] { enforcedRecDesc });
-
-        return castRecAssignOp;
-    }
-
-    protected ExternalSortOperatorDescriptor createSortOp(JobSpecification spec,
-            IBinaryComparatorFactory[] secondaryComparatorFactories, RecordDescriptor secondaryRecDesc) {
-        int[] sortFields = new int[secondaryComparatorFactories.length];
-        for (int i = 0; i < secondaryComparatorFactories.length; i++) {
-            sortFields[i] = i;
-        }
-        ExternalSortOperatorDescriptor sortOp = new ExternalSortOperatorDescriptor(spec,
-                physOptConf.getMaxFramesExternalSort(), sortFields, secondaryComparatorFactories, secondaryRecDesc);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, sortOp, primaryPartitionConstraint);
-        return sortOp;
-    }
-
-    protected TreeIndexBulkLoadOperatorDescriptor createTreeIndexBulkLoadOp(JobSpecification spec,
-            int numSecondaryKeyFields, IIndexDataflowHelperFactory dataflowHelperFactory, float fillFactor)
-            throws MetadataException, AlgebricksException {
-        int[] fieldPermutation = new int[numSecondaryKeyFields + numPrimaryKeys + numFilterFields];
-        for (int i = 0; i < fieldPermutation.length; i++) {
-            fieldPermutation[i] = i;
-        }
-        TreeIndexBulkLoadOperatorDescriptor treeIndexBulkLoadOp = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                secondaryRecDesc, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, secondaryFileSplitProvider,
-                secondaryRecDesc.getTypeTraits(), secondaryComparatorFactories, secondaryBloomFilterKeyFields,
-                fieldPermutation, fillFactor, false, numElementsHint, false, dataflowHelperFactory);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, treeIndexBulkLoadOp,
-                secondaryPartitionConstraint);
-        return treeIndexBulkLoadOp;
-    }
-
-    public AlgebricksMetaOperatorDescriptor createFilterNullsSelectOp(JobSpecification spec, int numSecondaryKeyFields)
-            throws AlgebricksException {
-        ICopyEvaluatorFactory[] andArgsEvalFactories = new ICopyEvaluatorFactory[numSecondaryKeyFields];
-        NotDescriptor notDesc = new NotDescriptor();
-        IsNullDescriptor isNullDesc = new IsNullDescriptor();
-        for (int i = 0; i < numSecondaryKeyFields; i++) {
-            // Access column i, and apply 'is not null'.
-            ColumnAccessEvalFactory columnAccessEvalFactory = new ColumnAccessEvalFactory(i);
-            ICopyEvaluatorFactory isNullEvalFactory = isNullDesc
-                    .createEvaluatorFactory(new ICopyEvaluatorFactory[] { columnAccessEvalFactory });
-            ICopyEvaluatorFactory notEvalFactory = notDesc
-                    .createEvaluatorFactory(new ICopyEvaluatorFactory[] { isNullEvalFactory });
-            andArgsEvalFactories[i] = notEvalFactory;
-        }
-        ICopyEvaluatorFactory selectCond = null;
-        if (numSecondaryKeyFields > 1) {
-            // Create conjunctive condition where all secondary index keys must
-            // satisfy 'is not null'.
-            AndDescriptor andDesc = new AndDescriptor();
-            selectCond = andDesc.createEvaluatorFactory(andArgsEvalFactories);
-        } else {
-            selectCond = andArgsEvalFactories[0];
-        }
-        StreamSelectRuntimeFactory select = new StreamSelectRuntimeFactory(
-                new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.ScalarEvaluatorFactoryAdapter(selectCond),
-                null, AqlBinaryBooleanInspectorImpl.FACTORY, false, -1, null);
-        AlgebricksMetaOperatorDescriptor asterixSelectOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 1,
-                new IPushRuntimeFactory[] { select }, new RecordDescriptor[] { secondaryRecDesc });
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, asterixSelectOp,
-                primaryPartitionConstraint);
-        return asterixSelectOp;
-    }
-
-    // This method creates a source indexing operator for external data
-    protected ExternalDataScanOperatorDescriptor createExternalIndexingOp(JobSpecification spec)
-            throws AlgebricksException, AsterixException {
-        // A record + primary keys
-        ISerializerDeserializer[] serdes = new ISerializerDeserializer[1 + numPrimaryKeys];
-        ITypeTraits[] typeTraits = new ITypeTraits[1 + numPrimaryKeys];
-        // payload serde and type traits for the record slot
-        serdes[0] = payloadSerde;
-        typeTraits[0] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(itemType);
-        //  serdes and type traits for rid fields
-        for (int i = 1; i < serdes.length; i++) {
-            serdes[i] = IndexingConstants.getSerializerDeserializer(i - 1);
-            typeTraits[i] = IndexingConstants.getTypeTraits(i - 1);
-        }
-        // output record desc
-        RecordDescriptor indexerDesc = new RecordDescriptor(serdes, typeTraits);
-
-        // Create the operator and its partition constraits
-        Pair<ExternalDataScanOperatorDescriptor, AlgebricksPartitionConstraint> indexingOpAndConstraints;
-        try {
-            indexingOpAndConstraints = ExternalIndexingOperations.createExternalIndexingOp(spec, metadataProvider,
-                    dataset, itemType, indexerDesc, externalFiles);
-        } catch (Exception e) {
-            throw new AlgebricksException(e);
-        }
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, indexingOpAndConstraints.first,
-                indexingOpAndConstraints.second);
-
-        // Set the primary partition constraints to this partition constraints
-        primaryPartitionConstraint = indexingOpAndConstraints.second;
-        return indexingOpAndConstraints.first;
-    }
-
-    protected AlgebricksMetaOperatorDescriptor createExternalAssignOp(JobSpecification spec, int numSecondaryKeys)
-            throws AlgebricksException {
-        int[] outColumns = new int[numSecondaryKeys];
-        int[] projectionList = new int[numSecondaryKeys + numPrimaryKeys];
-        for (int i = 0; i < numSecondaryKeys; i++) {
-            outColumns[i] = i + numPrimaryKeys + 1;
-            projectionList[i] = i + numPrimaryKeys + 1;
-        }
-
-        IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[secondaryFieldAccessEvalFactories.length];
-        for (int i = 0; i < secondaryFieldAccessEvalFactories.length; ++i) {
-            sefs[i] = new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter.ScalarEvaluatorFactoryAdapter(
-                    secondaryFieldAccessEvalFactories[i]);
-        }
-        //add External RIDs to the projection list
-        for (int i = 0; i < numPrimaryKeys; i++) {
-            projectionList[numSecondaryKeys + i] = i + 1;
-        }
-
-        AssignRuntimeFactory assign = new AssignRuntimeFactory(outColumns, sefs, projectionList);
-        AlgebricksMetaOperatorDescriptor asterixAssignOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 1,
-                new IPushRuntimeFactory[] { assign }, new RecordDescriptor[] { secondaryRecDesc });
-        return asterixAssignOp;
-    }
-
-    protected ExternalIndexBulkModifyOperatorDescriptor createExternalIndexBulkModifyOp(JobSpecification spec,
-            int numSecondaryKeyFields, IIndexDataflowHelperFactory dataflowHelperFactory, float fillFactor)
-            throws MetadataException, AlgebricksException {
-        int[] fieldPermutation = new int[numSecondaryKeyFields + numPrimaryKeys];
-        for (int i = 0; i < numSecondaryKeyFields + numPrimaryKeys; i++) {
-            fieldPermutation[i] = i;
-        }
-        // create a list of file ids
-        int numOfDeletedFiles = 0;
-        for (ExternalFile file : externalFiles) {
-            if (file.getPendingOp() == ExternalFilePendingOp.PENDING_DROP_OP)
-                numOfDeletedFiles++;
-        }
-        int[] deletedFiles = new int[numOfDeletedFiles];
-        int i = 0;
-        for (ExternalFile file : externalFiles) {
-            if (file.getPendingOp() == ExternalFilePendingOp.PENDING_DROP_OP) {
-                deletedFiles[i] = file.getFileNumber();
-            }
-        }
-        ExternalIndexBulkModifyOperatorDescriptor treeIndexBulkLoadOp = new ExternalIndexBulkModifyOperatorDescriptor(
-                spec, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, secondaryFileSplitProvider, secondaryTypeTraits,
-                secondaryComparatorFactories, secondaryBloomFilterKeyFields, dataflowHelperFactory,
-                NoOpOperationCallbackFactory.INSTANCE, deletedFiles, fieldPermutation, fillFactor, numElementsHint);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, treeIndexBulkLoadOp,
-                secondaryPartitionConstraint);
-        return treeIndexBulkLoadOp;
-    }
-
-    public List<ExternalFile> getExternalFiles() {
-        return externalFiles;
-    }
-
-    public void setExternalFiles(List<ExternalFile> externalFiles) {
-        this.externalFiles = externalFiles;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryInvertedIndexOperationsHelper.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryInvertedIndexOperationsHelper.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryInvertedIndexOperationsHelper.java
deleted file mode 100644
index 74c4256..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/SecondaryInvertedIndexOperationsHelper.java
+++ /dev/null
@@ -1,373 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.file;
-
-import java.util.List;
-
-import edu.uci.ics.asterix.common.api.ILocalResourceMetadata;
-import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
-import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
-import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
-import edu.uci.ics.asterix.common.context.AsterixVirtualBufferCacheProvider;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.ioopcallbacks.LSMInvertedIndexIOOperationCallbackFactory;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.asterix.runtime.formats.FormatUtils;
-import edu.uci.ics.asterix.transaction.management.opcallbacks.SecondaryIndexOperationTrackerProvider;
-import edu.uci.ics.asterix.transaction.management.resource.LSMInvertedIndexLocalResourceMetadata;
-import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceFactoryProvider;
-import edu.uci.ics.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.ConnectorPolicyAssignmentPolicy;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
-import edu.uci.ics.hyracks.algebricks.data.ISerializerDeserializerProvider;
-import edu.uci.ics.hyracks.algebricks.data.ITypeTraitProvider;
-import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.operators.base.SinkRuntimeFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.ShortPointable;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.ShortSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.BinaryTokenizerOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexCompactOperator;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexCreateOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.PartitionedLSMInvertedIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.common.file.ILocalResourceFactoryProvider;
-import edu.uci.ics.hyracks.storage.common.file.LocalResource;
-
-public class SecondaryInvertedIndexOperationsHelper extends SecondaryIndexOperationsHelper {
-
-    private IAType secondaryKeyType;
-    private ITypeTraits[] invListsTypeTraits;
-    private IBinaryComparatorFactory[] tokenComparatorFactories;
-    private ITypeTraits[] tokenTypeTraits;
-    private IBinaryTokenizerFactory tokenizerFactory;
-    // For tokenization, sorting and loading. Represents <token, primary keys>.
-    private int numTokenKeyPairFields;
-    private IBinaryComparatorFactory[] tokenKeyPairComparatorFactories;
-    private RecordDescriptor tokenKeyPairRecDesc;
-    private boolean isPartitioned;
-    private int[] invertedIndexFields;
-    private int[] invertedIndexFieldsForNonBulkLoadOps;
-    private int[] secondaryFilterFieldsForNonBulkLoadOps;
-
-    protected SecondaryInvertedIndexOperationsHelper(PhysicalOptimizationConfig physOptConf,
-            IAsterixPropertiesProvider propertiesProvider) {
-        super(physOptConf, propertiesProvider);
-    }
-
-    @Override
-    @SuppressWarnings("rawtypes")
-    protected void setSecondaryRecDescAndComparators(IndexType indexType, List<List<String>> secondaryKeyFields,
-            List<IAType> secondaryKeyTypes, int gramLength, AqlMetadataProvider metadata) throws AlgebricksException,
-            AsterixException {
-        // Sanity checks.
-        if (numPrimaryKeys > 1) {
-            throw new AsterixException("Cannot create inverted index on dataset with composite primary key.");
-        }
-        if (numSecondaryKeys > 1) {
-            throw new AsterixException("Cannot create composite inverted index on multiple fields.");
-        }
-        if (indexType == IndexType.LENGTH_PARTITIONED_WORD_INVIX
-                || indexType == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX) {
-            isPartitioned = true;
-        } else {
-            isPartitioned = false;
-        }
-        // Prepare record descriptor used in the assign op, and the optional
-        // select op.
-        secondaryFieldAccessEvalFactories = new ICopyEvaluatorFactory[numSecondaryKeys + numFilterFields];
-        ISerializerDeserializer[] secondaryRecFields = new ISerializerDeserializer[numPrimaryKeys + numSecondaryKeys
-                + numFilterFields];
-        ISerializerDeserializer[] enforcedRecFields = new ISerializerDeserializer[1 + numPrimaryKeys + numFilterFields];
-        secondaryTypeTraits = new ITypeTraits[numSecondaryKeys + numPrimaryKeys];
-        ITypeTraits[] enforcedTypeTraits = new ITypeTraits[1 + numPrimaryKeys];
-        ISerializerDeserializerProvider serdeProvider = FormatUtils.getDefaultFormat().getSerdeProvider();
-        ITypeTraitProvider typeTraitProvider = FormatUtils.getDefaultFormat().getTypeTraitProvider();
-        if (numSecondaryKeys > 0) {
-            secondaryFieldAccessEvalFactories[0] = FormatUtils.getDefaultFormat().getFieldAccessEvaluatorFactory(
-                    isEnforcingKeyTypes ? enforcedItemType : itemType, secondaryKeyFields.get(0), numPrimaryKeys);
-            Pair<IAType, Boolean> keyTypePair = Index.getNonNullableOpenFieldType(secondaryKeyTypes.get(0),
-                    secondaryKeyFields.get(0), itemType);
-            secondaryKeyType = keyTypePair.first;
-            anySecondaryKeyIsNullable = anySecondaryKeyIsNullable || keyTypePair.second;
-            ISerializerDeserializer keySerde = serdeProvider.getSerializerDeserializer(secondaryKeyType);
-            secondaryRecFields[0] = keySerde;
-            secondaryTypeTraits[0] = typeTraitProvider.getTypeTrait(secondaryKeyType);
-        }
-        if (numFilterFields > 0) {
-            secondaryFieldAccessEvalFactories[numSecondaryKeys] = FormatUtils.getDefaultFormat()
-                    .getFieldAccessEvaluatorFactory(itemType, filterFieldName, numPrimaryKeys);
-            Pair<IAType, Boolean> keyTypePair = Index.getNonNullableKeyFieldType(filterFieldName, itemType);
-            IAType type = keyTypePair.first;
-            ISerializerDeserializer serde = serdeProvider.getSerializerDeserializer(type);
-            secondaryRecFields[numPrimaryKeys + numSecondaryKeys] = serde;
-        }
-        secondaryRecDesc = new RecordDescriptor(secondaryRecFields);
-        // Comparators and type traits for tokens.
-        int numTokenFields = (!isPartitioned) ? numSecondaryKeys : numSecondaryKeys + 1;
-        tokenComparatorFactories = new IBinaryComparatorFactory[numTokenFields];
-        tokenTypeTraits = new ITypeTraits[numTokenFields];
-        tokenComparatorFactories[0] = NonTaggedFormatUtil.getTokenBinaryComparatorFactory(secondaryKeyType);
-        tokenTypeTraits[0] = NonTaggedFormatUtil.getTokenTypeTrait(secondaryKeyType);
-        if (isPartitioned) {
-            // The partitioning field is hardcoded to be a short *without* an Asterix type tag.
-            tokenComparatorFactories[1] = PointableBinaryComparatorFactory.of(ShortPointable.FACTORY);
-            tokenTypeTraits[1] = ShortPointable.TYPE_TRAITS;
-        }
-        // Set tokenizer factory.
-        // TODO: We might want to expose the hashing option at the AQL level,
-        // and add the choice to the index metadata.
-        tokenizerFactory = NonTaggedFormatUtil.getBinaryTokenizerFactory(secondaryKeyType.getTypeTag(), indexType,
-                gramLength);
-        // Type traits for inverted-list elements. Inverted lists contain
-        // primary keys.
-        invListsTypeTraits = new ITypeTraits[numPrimaryKeys];
-        if (numPrimaryKeys > 0) {
-            invListsTypeTraits[0] = primaryRecDesc.getTypeTraits()[0];
-            enforcedRecFields[0] = primaryRecDesc.getFields()[0];
-            enforcedTypeTraits[0] = primaryRecDesc.getTypeTraits()[0];
-        }
-        enforcedRecFields[numPrimaryKeys] = serdeProvider.getSerializerDeserializer(itemType);
-        enforcedRecDesc = new RecordDescriptor(enforcedRecFields, enforcedTypeTraits);
-        // For tokenization, sorting and loading.
-        // One token (+ optional partitioning field) + primary keys.
-        numTokenKeyPairFields = (!isPartitioned) ? 1 + numPrimaryKeys : 2 + numPrimaryKeys;
-        ISerializerDeserializer[] tokenKeyPairFields = new ISerializerDeserializer[numTokenKeyPairFields
-                + numFilterFields];
-        ITypeTraits[] tokenKeyPairTypeTraits = new ITypeTraits[numTokenKeyPairFields];
-        tokenKeyPairComparatorFactories = new IBinaryComparatorFactory[numTokenKeyPairFields];
-        tokenKeyPairFields[0] = serdeProvider.getSerializerDeserializer(secondaryKeyType);
-        tokenKeyPairTypeTraits[0] = tokenTypeTraits[0];
-        tokenKeyPairComparatorFactories[0] = NonTaggedFormatUtil.getTokenBinaryComparatorFactory(secondaryKeyType);
-        int pkOff = 1;
-        if (isPartitioned) {
-            tokenKeyPairFields[1] = ShortSerializerDeserializer.INSTANCE;
-            tokenKeyPairTypeTraits[1] = tokenTypeTraits[1];
-            tokenKeyPairComparatorFactories[1] = PointableBinaryComparatorFactory.of(ShortPointable.FACTORY);
-            pkOff = 2;
-        }
-        if (numPrimaryKeys > 0) {
-            tokenKeyPairFields[pkOff] = primaryRecDesc.getFields()[0];
-            tokenKeyPairTypeTraits[pkOff] = primaryRecDesc.getTypeTraits()[0];
-            tokenKeyPairComparatorFactories[pkOff] = primaryComparatorFactories[0];
-        }
-        if (numFilterFields > 0) {
-            tokenKeyPairFields[numPrimaryKeys + pkOff] = secondaryRecFields[numPrimaryKeys + numSecondaryKeys];
-        }
-        tokenKeyPairRecDesc = new RecordDescriptor(tokenKeyPairFields, tokenKeyPairTypeTraits);
-        if (filterFieldName != null) {
-            invertedIndexFields = new int[numTokenKeyPairFields];
-            for (int i = 0; i < invertedIndexFields.length; i++) {
-                invertedIndexFields[i] = i;
-            }
-            secondaryFilterFieldsForNonBulkLoadOps = new int[numFilterFields];
-            secondaryFilterFieldsForNonBulkLoadOps[0] = numSecondaryKeys + numPrimaryKeys;
-            invertedIndexFieldsForNonBulkLoadOps = new int[numSecondaryKeys + numPrimaryKeys];
-            for (int i = 0; i < invertedIndexFieldsForNonBulkLoadOps.length; i++) {
-                invertedIndexFieldsForNonBulkLoadOps[i] = i;
-            }
-        }
-
-    }
-
-    @Override
-    protected int getNumSecondaryKeys() {
-        return numTokenKeyPairFields - numPrimaryKeys;
-    }
-
-    @Override
-    public JobSpecification buildCreationJobSpec() throws AsterixException, AlgebricksException {
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-
-        //prepare a LocalResourceMetadata which will be stored in NC's local resource repository
-        ILocalResourceMetadata localResourceMetadata = new LSMInvertedIndexLocalResourceMetadata(invListsTypeTraits,
-                primaryComparatorFactories, tokenTypeTraits, tokenComparatorFactories, tokenizerFactory, isPartitioned,
-                dataset.getDatasetId(), mergePolicyFactory, mergePolicyFactoryProperties, filterTypeTraits,
-                filterCmpFactories, invertedIndexFields, secondaryFilterFields, secondaryFilterFieldsForNonBulkLoadOps,
-                invertedIndexFieldsForNonBulkLoadOps);
-        ILocalResourceFactoryProvider localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(
-                localResourceMetadata, LocalResource.LSMInvertedIndexResource);
-
-        IIndexDataflowHelperFactory dataflowHelperFactory = createDataflowHelperFactory();
-        LSMInvertedIndexCreateOperatorDescriptor invIndexCreateOp = new LSMInvertedIndexCreateOperatorDescriptor(spec,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, secondaryFileSplitProvider,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, tokenTypeTraits, tokenComparatorFactories,
-                invListsTypeTraits, primaryComparatorFactories, tokenizerFactory, dataflowHelperFactory,
-                localResourceFactoryProvider, NoOpOperationCallbackFactory.INSTANCE);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, invIndexCreateOp,
-                secondaryPartitionConstraint);
-        spec.addRoot(invIndexCreateOp);
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-        return spec;
-    }
-
-    @Override
-    public JobSpecification buildLoadingJobSpec() throws AsterixException, AlgebricksException {
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-
-        // Create dummy key provider for feeding the primary index scan.
-        AbstractOperatorDescriptor keyProviderOp = createDummyKeyProviderOp(spec);
-
-        // Create primary index scan op.
-        BTreeSearchOperatorDescriptor primaryScanOp = createPrimaryIndexScanOp(spec);
-
-        AbstractOperatorDescriptor sourceOp = primaryScanOp;
-        if (isEnforcingKeyTypes) {
-            sourceOp = createCastOp(spec, primaryScanOp, numSecondaryKeys, dataset.getDatasetType());
-            spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, sourceOp, 0);
-        }
-        AlgebricksMetaOperatorDescriptor asterixAssignOp = createAssignOp(spec, sourceOp, numSecondaryKeys);
-
-        // If any of the secondary fields are nullable, then add a select op
-        // that filters nulls.
-        AlgebricksMetaOperatorDescriptor selectOp = null;
-        if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
-            selectOp = createFilterNullsSelectOp(spec, numSecondaryKeys);
-        }
-
-        // Create a tokenizer op.
-        AbstractOperatorDescriptor tokenizerOp = createTokenizerOp(spec);
-
-        // Sort by token + primary keys.
-        ExternalSortOperatorDescriptor sortOp = createSortOp(spec, tokenKeyPairComparatorFactories, tokenKeyPairRecDesc);
-
-        // Create secondary inverted index bulk load op.
-        LSMInvertedIndexBulkLoadOperatorDescriptor invIndexBulkLoadOp = createInvertedIndexBulkLoadOp(spec);
-
-        AlgebricksMetaOperatorDescriptor metaOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
-                new IPushRuntimeFactory[] { new SinkRuntimeFactory() }, new RecordDescriptor[] {});
-        // Connect the operators.
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryScanOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), sourceOp, 0, asterixAssignOp, 0);
-        if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
-            spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, selectOp, 0);
-            spec.connect(new OneToOneConnectorDescriptor(spec), selectOp, 0, tokenizerOp, 0);
-        } else {
-            spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, tokenizerOp, 0);
-        }
-        spec.connect(new OneToOneConnectorDescriptor(spec), tokenizerOp, 0, sortOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), sortOp, 0, invIndexBulkLoadOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), invIndexBulkLoadOp, 0, metaOp, 0);
-        spec.addRoot(metaOp);
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-        return spec;
-    }
-
-    private AbstractOperatorDescriptor createTokenizerOp(JobSpecification spec) throws AlgebricksException {
-        int docField = 0;
-        int[] primaryKeyFields = new int[numPrimaryKeys + numFilterFields];
-        for (int i = 0; i < primaryKeyFields.length; i++) {
-            primaryKeyFields[i] = numSecondaryKeys + i;
-        }
-        BinaryTokenizerOperatorDescriptor tokenizerOp = new BinaryTokenizerOperatorDescriptor(spec,
-                tokenKeyPairRecDesc, tokenizerFactory, docField, primaryKeyFields, isPartitioned, false);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, tokenizerOp,
-                primaryPartitionConstraint);
-        return tokenizerOp;
-    }
-
-    @Override
-    protected ExternalSortOperatorDescriptor createSortOp(JobSpecification spec,
-            IBinaryComparatorFactory[] secondaryComparatorFactories, RecordDescriptor secondaryRecDesc) {
-        // Sort on token and primary keys.
-        int[] sortFields = new int[numTokenKeyPairFields];
-        for (int i = 0; i < numTokenKeyPairFields; i++) {
-            sortFields[i] = i;
-        }
-        ExternalSortOperatorDescriptor sortOp = new ExternalSortOperatorDescriptor(spec,
-                physOptConf.getMaxFramesExternalSort(), sortFields, tokenKeyPairComparatorFactories, secondaryRecDesc);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, sortOp, primaryPartitionConstraint);
-        return sortOp;
-    }
-
-    private LSMInvertedIndexBulkLoadOperatorDescriptor createInvertedIndexBulkLoadOp(JobSpecification spec) {
-        int[] fieldPermutation = new int[numTokenKeyPairFields + numFilterFields];
-        for (int i = 0; i < fieldPermutation.length; i++) {
-            fieldPermutation[i] = i;
-        }
-        IIndexDataflowHelperFactory dataflowHelperFactory = createDataflowHelperFactory();
-        LSMInvertedIndexBulkLoadOperatorDescriptor invIndexBulkLoadOp = new LSMInvertedIndexBulkLoadOperatorDescriptor(
-                spec, secondaryRecDesc, fieldPermutation, false, numElementsHint, false,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, secondaryFileSplitProvider,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, tokenTypeTraits, tokenComparatorFactories,
-                invListsTypeTraits, primaryComparatorFactories, tokenizerFactory, dataflowHelperFactory);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, invIndexBulkLoadOp,
-                secondaryPartitionConstraint);
-        return invIndexBulkLoadOp;
-    }
-
-    private IIndexDataflowHelperFactory createDataflowHelperFactory() {
-        AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
-        boolean temp = dataset.getDatasetDetails().isTemp();
-        if (!isPartitioned) {
-            return new LSMInvertedIndexDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
-                    dataset.getDatasetId()), mergePolicyFactory, mergePolicyFactoryProperties,
-                    new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                    LSMInvertedIndexIOOperationCallbackFactory.INSTANCE,
-                    storageProperties.getBloomFilterFalsePositiveRate(), invertedIndexFields, filterTypeTraits,
-                    filterCmpFactories, secondaryFilterFields, secondaryFilterFieldsForNonBulkLoadOps,
-                    invertedIndexFieldsForNonBulkLoadOps, !temp);
-        } else {
-            return new PartitionedLSMInvertedIndexDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
-                    dataset.getDatasetId()), mergePolicyFactory, mergePolicyFactoryProperties,
-                    new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                    LSMInvertedIndexIOOperationCallbackFactory.INSTANCE,
-                    storageProperties.getBloomFilterFalsePositiveRate(), invertedIndexFields, filterTypeTraits,
-                    filterCmpFactories, secondaryFilterFields, secondaryFilterFieldsForNonBulkLoadOps,
-                    invertedIndexFieldsForNonBulkLoadOps, !temp);
-        }
-    }
-
-    @Override
-    public JobSpecification buildCompactJobSpec() throws AsterixException, AlgebricksException {
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-
-        IIndexDataflowHelperFactory dataflowHelperFactory = createDataflowHelperFactory();
-        LSMInvertedIndexCompactOperator compactOp = new LSMInvertedIndexCompactOperator(spec,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, secondaryFileSplitProvider,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, tokenTypeTraits, tokenComparatorFactories,
-                invListsTypeTraits, primaryComparatorFactories, tokenizerFactory, dataflowHelperFactory,
-                NoOpOperationCallbackFactory.INSTANCE);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp,
-                secondaryPartitionConstraint);
-
-        spec.addRoot(compactOp);
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-        return spec;
-    }
-}


[09/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixCLI.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixCLI.java b/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixCLI.java
new file mode 100644
index 0000000..69b0805
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixCLI.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.drivers;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.Reader;
+import java.util.List;
+
+import org.apache.commons.io.FileUtils;
+import org.kohsuke.args4j.Argument;
+import org.kohsuke.args4j.CmdLineParser;
+import org.kohsuke.args4j.Option;
+
+import edu.uci.ics.asterix.api.common.AsterixHyracksIntegrationUtil;
+import edu.uci.ics.asterix.api.java.AsterixJavaClient;
+import edu.uci.ics.asterix.common.config.GlobalConfig;
+
+public class AsterixCLI {
+    private static class Options {
+        @Option(name = "-properties", usage = "Name of properties file", required = true)
+        public String properties;
+
+        @Option(name = "-output", usage = "Output folder to place results", required = true)
+        public String outputFolder;
+
+        @Argument(usage = "AQL Files to run", multiValued = true, required = true)
+        public List<String> args;
+    }
+
+    public static void main(String args[]) throws Exception {
+        Options options = new Options();
+        CmdLineParser parser = new CmdLineParser(options);
+        parser.parseArgument(args);
+
+        setUp(options);
+        try {
+            for (String queryFile : options.args) {
+                Reader in = new FileReader(queryFile);
+                AsterixJavaClient ajc = new AsterixJavaClient(
+                        AsterixHyracksIntegrationUtil.getHyracksClientConnection(), in);
+                try {
+                    ajc.compile(true, false, false, false, false, true, false);
+                } finally {
+                    in.close();
+                }
+                ajc.execute();
+            }
+        } finally {
+            tearDown();
+        }
+        System.exit(0);
+    }
+
+    public static void setUp(Options options) throws Exception {
+        System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, options.properties);
+        File outdir = new File(options.outputFolder);
+        outdir.mkdirs();
+
+        File log = new File("asterix_logs");
+        if (log.exists())
+            FileUtils.deleteDirectory(log);
+        File lsn = new File("last_checkpoint_lsn");
+        lsn.deleteOnExit();
+
+        AsterixHyracksIntegrationUtil.init();
+    }
+
+    public static void tearDown() throws Exception {
+        AsterixHyracksIntegrationUtil.deinit();
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixClientDriver.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixClientDriver.java b/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixClientDriver.java
new file mode 100644
index 0000000..1a82941
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixClientDriver.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.drivers;
+
+import java.io.FileReader;
+
+import org.kohsuke.args4j.CmdLineParser;
+
+import edu.uci.ics.asterix.api.common.AsterixClientConfig;
+import edu.uci.ics.asterix.api.java.AsterixJavaClient;
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+
+public class AsterixClientDriver {
+
+    public static void main(String args[]) throws Exception {
+        AsterixClientConfig acc = new AsterixClientConfig();
+        CmdLineParser cmdParser = new CmdLineParser(acc);
+        try {
+            cmdParser.parseArgument(args);
+        } catch (Exception e) {
+            cmdParser.printUsage(System.err);
+            throw e;
+        }
+
+        if (acc.getArguments().isEmpty()) {
+            System.err.println("Please specify the file containing the query.");
+            return;
+        }
+        if (acc.getArguments().size() > 1) {
+            System.err.print("Too many arguments. ");
+            System.err.println("Only the file contained the query needs to be specified.");
+            return;
+        }
+        boolean exec = new Boolean(acc.execute);
+        IHyracksClientConnection hcc = exec ? new HyracksConnection("localhost", acc.hyracksPort) : null;
+        AsterixJavaClient q = compileQuery(hcc, acc.getArguments().get(0), new Boolean(acc.optimize), new Boolean(
+                acc.onlyPhysical), exec || new Boolean(acc.hyracksJob));
+        if (exec) {
+            q.execute();
+        }
+    }
+
+    private static AsterixJavaClient compileQuery(IHyracksClientConnection hcc, String filename, boolean optimize,
+            boolean onlyPhysical, boolean createBinaryRuntime) throws Exception {
+        FileReader reader = new FileReader(filename);
+        AsterixJavaClient q = new AsterixJavaClient(hcc, reader);
+        q.compile(optimize, true, true, true, onlyPhysical, createBinaryRuntime, createBinaryRuntime);
+        return q;
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixWebServer.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixWebServer.java b/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixWebServer.java
new file mode 100644
index 0000000..a1204b1
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixWebServer.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.drivers;
+
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+
+import edu.uci.ics.asterix.api.http.servlet.APIServlet;
+
+public class AsterixWebServer {
+    public static void main(String args[]) throws Exception {
+        Server server = new Server(8080);
+        ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
+        context.setContextPath("/");
+        server.setHandler(context);
+
+        context.addServlet(new ServletHolder(new APIServlet()), "/*");
+        server.start();
+        server.join();
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/feeds/CentralFeedManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/CentralFeedManager.java b/asterix-app/src/main/java/org/apache/asterix/feeds/CentralFeedManager.java
new file mode 100644
index 0000000..7bd19dd
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feeds/CentralFeedManager.java
@@ -0,0 +1,100 @@
+/*
+ * Copyright 2009-2014 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.feeds;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringReader;
+import java.util.List;
+
+import edu.uci.ics.asterix.api.common.SessionConfig;
+import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
+import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.parser.AQLParser;
+import edu.uci.ics.asterix.aql.translator.AqlTranslator;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.feeds.api.ICentralFeedManager;
+import edu.uci.ics.asterix.common.feeds.api.IFeedLoadManager;
+import edu.uci.ics.asterix.common.feeds.api.IFeedTrackingManager;
+import edu.uci.ics.asterix.metadata.feeds.SocketMessageListener;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+public class CentralFeedManager implements ICentralFeedManager {
+
+    private static final ICentralFeedManager centralFeedManager = new CentralFeedManager();
+
+    public static ICentralFeedManager getInstance() {
+        return centralFeedManager;
+    }
+
+    private final int port;
+    private final IFeedLoadManager feedLoadManager;
+    private final IFeedTrackingManager feedTrackingManager;
+    private final SocketMessageListener messageListener;
+
+    private CentralFeedManager() {
+        this.port = AsterixAppContextInfo.getInstance().getFeedProperties().getFeedCentralManagerPort();
+        this.feedLoadManager = new FeedLoadManager();
+        this.feedTrackingManager = new FeedTrackingManager();
+        this.messageListener = new SocketMessageListener(port, new FeedMessageReceiver(this));
+    }
+
+    @Override
+    public void start() throws AsterixException {
+        messageListener.start();
+    }
+
+    @Override
+    public void stop() throws AsterixException, IOException {
+        messageListener.stop();
+    }
+
+    public static JobId runJob(JobSpecification spec, boolean waitForCompletion) throws Exception {
+        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+        JobId jobId = hcc.startJob(spec);
+        if (waitForCompletion) {
+            hcc.waitForCompletion(jobId);
+        }
+        return jobId;
+    }
+
+    @Override
+    public IFeedLoadManager getFeedLoadManager() {
+        return feedLoadManager;
+    }
+
+    @Override
+    public IFeedTrackingManager getFeedTrackingManager() {
+        return feedTrackingManager;
+    }
+
+    public static class AQLExecutor {
+
+        private static final PrintWriter out = new PrintWriter(System.out, true);
+
+        public static void executeAQL(String aql) throws Exception {
+            AQLParser parser = new AQLParser(new StringReader(aql));
+            List<Statement> statements;
+            statements = parser.Statement();
+            SessionConfig pc = new SessionConfig(out, OutputFormat.ADM);
+            AqlTranslator translator = new AqlTranslator(statements, pc);
+            translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null, AqlTranslator.ResultDelivery.SYNC);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/feeds/FeedCollectInfo.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedCollectInfo.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedCollectInfo.java
new file mode 100644
index 0000000..35b8378
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedCollectInfo.java
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.feeds;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.common.feeds.FeedId;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+public class FeedCollectInfo extends FeedInfo {
+    public FeedId sourceFeedId;
+    public FeedConnectionId feedConnectionId;
+    public List<String> collectLocations = new ArrayList<String>();
+    public List<String> computeLocations = new ArrayList<String>();
+    public List<String> storageLocations = new ArrayList<String>();
+    public Map<String, String> feedPolicy;
+    public String superFeedManagerHost;
+    public int superFeedManagerPort;
+    public boolean fullyConnected;
+
+    public FeedCollectInfo(FeedId sourceFeedId, FeedConnectionId feedConnectionId, JobSpecification jobSpec,
+            JobId jobId, Map<String, String> feedPolicy) {
+        super(jobSpec, jobId, FeedInfoType.COLLECT);
+        this.sourceFeedId = sourceFeedId;
+        this.feedConnectionId = feedConnectionId;
+        this.feedPolicy = feedPolicy;
+        this.fullyConnected = true;
+    }
+
+    @Override
+    public String toString() {
+        return FeedInfoType.COLLECT + "[" + feedConnectionId + "]";
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/feeds/FeedInfo.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedInfo.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedInfo.java
new file mode 100644
index 0000000..afc6ccb
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedInfo.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.feeds;
+
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobInfo;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+public class FeedInfo {
+    public JobSpecification jobSpec;
+    public JobInfo jobInfo;
+    public JobId jobId;
+    public FeedInfoType infoType;
+    public State state;
+
+    public enum State {
+        ACTIVE,
+        INACTIVE
+    }
+
+    public enum FeedInfoType {
+        INTAKE,
+        COLLECT
+    }
+
+    public FeedInfo(JobSpecification jobSpec, JobId jobId, FeedInfoType infoType) {
+        this.jobSpec = jobSpec;
+        this.jobId = jobId;
+        this.infoType = infoType;
+        this.state = State.INACTIVE;
+    }
+
+    @Override
+    public String toString() {
+        return " job id " + jobId;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/feeds/FeedJobNotificationHandler.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedJobNotificationHandler.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedJobNotificationHandler.java
new file mode 100644
index 0000000..b045eae
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedJobNotificationHandler.java
@@ -0,0 +1,739 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.feeds;
+
+import java.rmi.RemoteException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.commons.lang3.StringUtils;
+
+import edu.uci.ics.asterix.api.common.FeedWorkCollection.SubscribeFeedWork;
+import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.feeds.FeedActivity;
+import edu.uci.ics.asterix.common.feeds.FeedConnectJobInfo;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionRequest;
+import edu.uci.ics.asterix.common.feeds.FeedId;
+import edu.uci.ics.asterix.common.feeds.FeedIntakeInfo;
+import edu.uci.ics.asterix.common.feeds.FeedJobInfo;
+import edu.uci.ics.asterix.common.feeds.FeedJobInfo.FeedJobState;
+import edu.uci.ics.asterix.common.feeds.FeedJobInfo.JobType;
+import edu.uci.ics.asterix.common.feeds.FeedJointKey;
+import edu.uci.ics.asterix.common.feeds.FeedPolicyAccessor;
+import edu.uci.ics.asterix.common.feeds.api.IFeedJoint;
+import edu.uci.ics.asterix.common.feeds.api.IFeedJoint.State;
+import edu.uci.ics.asterix.common.feeds.api.IFeedLifecycleEventSubscriber;
+import edu.uci.ics.asterix.common.feeds.api.IFeedLifecycleEventSubscriber.FeedLifecycleEvent;
+import edu.uci.ics.asterix.common.feeds.api.IIntakeProgressTracker;
+import edu.uci.ics.asterix.common.feeds.message.StorageReportFeedMessage;
+import edu.uci.ics.asterix.feeds.FeedLifecycleListener.Message;
+import edu.uci.ics.asterix.metadata.feeds.BuiltinFeedPolicies;
+import edu.uci.ics.asterix.metadata.feeds.FeedCollectOperatorDescriptor;
+import edu.uci.ics.asterix.metadata.feeds.FeedIntakeOperatorDescriptor;
+import edu.uci.ics.asterix.metadata.feeds.FeedMetaOperatorDescriptor;
+import edu.uci.ics.asterix.metadata.feeds.FeedWorkManager;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
+import edu.uci.ics.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.OperatorDescriptorId;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobInfo;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.api.job.JobStatus;
+import edu.uci.ics.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexInsertUpdateDeleteOperatorDescriptor;
+
+public class FeedJobNotificationHandler implements Runnable {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedJobNotificationHandler.class.getName());
+
+    private final LinkedBlockingQueue<Message> inbox;
+    private final Map<FeedConnectionId, List<IFeedLifecycleEventSubscriber>> eventSubscribers;
+
+    private final Map<JobId, FeedJobInfo> jobInfos;
+    private final Map<FeedId, FeedIntakeInfo> intakeJobInfos;
+    private final Map<FeedConnectionId, FeedConnectJobInfo> connectJobInfos;
+    private final Map<FeedId, List<IFeedJoint>> feedPipeline;
+    private final Map<FeedConnectionId, Pair<IIntakeProgressTracker, Long>> feedIntakeProgressTrackers;
+
+    public FeedJobNotificationHandler(LinkedBlockingQueue<Message> inbox) {
+        this.inbox = inbox;
+        this.jobInfos = new HashMap<JobId, FeedJobInfo>();
+        this.intakeJobInfos = new HashMap<FeedId, FeedIntakeInfo>();
+        this.connectJobInfos = new HashMap<FeedConnectionId, FeedConnectJobInfo>();
+        this.feedPipeline = new HashMap<FeedId, List<IFeedJoint>>();
+        this.eventSubscribers = new HashMap<FeedConnectionId, List<IFeedLifecycleEventSubscriber>>();
+        this.feedIntakeProgressTrackers = new HashMap<FeedConnectionId, Pair<IIntakeProgressTracker, Long>>();
+    }
+
+    @Override
+    public void run() {
+        Message mesg;
+        while (true) {
+            try {
+                mesg = inbox.take();
+                switch (mesg.messageKind) {
+                    case JOB_START:
+                        handleJobStartMessage(mesg);
+                        break;
+                    case JOB_FINISH:
+                        handleJobFinishMessage(mesg);
+                        break;
+                }
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+
+        }
+    }
+
+    public void registerFeedIntakeProgressTracker(FeedConnectionId connectionId,
+            IIntakeProgressTracker feedIntakeProgressTracker) {
+        if (feedIntakeProgressTrackers.get(connectionId) == null) {
+            this.feedIntakeProgressTrackers.put(connectionId, new Pair<IIntakeProgressTracker, Long>(
+                    feedIntakeProgressTracker, 0L));
+        } else {
+            throw new IllegalStateException(" Progress tracker for connection " + connectionId
+                    + " is alreader registered");
+        }
+    }
+
+    public void deregisterFeedIntakeProgressTracker(FeedConnectionId connectionId) {
+        this.feedIntakeProgressTrackers.remove(connectionId);
+    }
+
+    public void updateTrackingInformation(StorageReportFeedMessage srm) {
+        Pair<IIntakeProgressTracker, Long> p = feedIntakeProgressTrackers.get(srm.getConnectionId());
+        if (p != null && p.second < srm.getLastPersistedTupleIntakeTimestamp()) {
+            p.second = srm.getLastPersistedTupleIntakeTimestamp();
+            p.first.notifyIngestedTupleTimestamp(p.second);
+        }
+    }
+
+    public Collection<FeedIntakeInfo> getFeedIntakeInfos() {
+        return intakeJobInfos.values();
+    }
+
+    public Collection<FeedConnectJobInfo> getFeedConnectInfos() {
+        return connectJobInfos.values();
+    }
+
+    public void registerFeedJoint(IFeedJoint feedJoint) {
+        List<IFeedJoint> feedJointsOnPipeline = feedPipeline.get(feedJoint.getOwnerFeedId());
+        if (feedJointsOnPipeline == null) {
+            feedJointsOnPipeline = new ArrayList<IFeedJoint>();
+            feedPipeline.put(feedJoint.getOwnerFeedId(), feedJointsOnPipeline);
+            feedJointsOnPipeline.add(feedJoint);
+        } else {
+            if (!feedJointsOnPipeline.contains(feedJoint)) {
+                feedJointsOnPipeline.add(feedJoint);
+            } else {
+                throw new IllegalArgumentException("Feed joint " + feedJoint + " already registered");
+            }
+        }
+    }
+
+    public void registerFeedIntakeJob(FeedId feedId, JobId jobId, JobSpecification jobSpec) throws HyracksDataException {
+        if (jobInfos.get(jobId) != null) {
+            throw new IllegalStateException("Feed job already registered");
+        }
+
+        List<IFeedJoint> joints = feedPipeline.get(feedId);
+        IFeedJoint intakeJoint = null;
+        for (IFeedJoint joint : joints) {
+            if (joint.getType().equals(IFeedJoint.FeedJointType.INTAKE)) {
+                intakeJoint = joint;
+                break;
+            }
+        }
+
+        if (intakeJoint != null) {
+            FeedIntakeInfo intakeJobInfo = new FeedIntakeInfo(jobId, FeedJobState.CREATED, FeedJobInfo.JobType.INTAKE,
+                    feedId, intakeJoint, jobSpec);
+            intakeJobInfos.put(feedId, intakeJobInfo);
+            jobInfos.put(jobId, intakeJobInfo);
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Registered feed intake [" + jobId + "]" + " for feed " + feedId);
+            }
+        } else {
+            throw new HyracksDataException("Could not register feed intake job [" + jobId + "]" + " for feed  "
+                    + feedId);
+        }
+    }
+
+    public void registerFeedCollectionJob(FeedId sourceFeedId, FeedConnectionId connectionId, JobId jobId,
+            JobSpecification jobSpec, Map<String, String> feedPolicy) {
+        if (jobInfos.get(jobId) != null) {
+            throw new IllegalStateException("Feed job already registered");
+        }
+
+        List<IFeedJoint> feedJoints = feedPipeline.get(sourceFeedId);
+        FeedConnectionId cid = null;
+        IFeedJoint sourceFeedJoint = null;
+        for (IFeedJoint joint : feedJoints) {
+            cid = joint.getReceiver(connectionId);
+            if (cid != null) {
+                sourceFeedJoint = joint;
+                break;
+            }
+        }
+
+        if (cid != null) {
+            FeedConnectJobInfo cInfo = new FeedConnectJobInfo(jobId, FeedJobState.CREATED, connectionId,
+                    sourceFeedJoint, null, jobSpec, feedPolicy);
+            jobInfos.put(jobId, cInfo);
+            connectJobInfos.put(connectionId, cInfo);
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Registered feed connection [" + jobId + "]" + " for feed " + connectionId);
+            }
+        } else {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Could not register feed collection job [" + jobId + "]" + " for feed connection "
+                        + connectionId);
+            }
+        }
+
+    }
+
+    public void deregisterFeedIntakeJob(JobId jobId) {
+        if (jobInfos.get(jobId) == null) {
+            throw new IllegalStateException(" Feed Intake job not registered ");
+        }
+
+        FeedIntakeInfo info = (FeedIntakeInfo) jobInfos.get(jobId);
+        jobInfos.remove(jobId);
+        intakeJobInfos.remove(info.getFeedId());
+
+        if (!info.getState().equals(FeedJobState.UNDER_RECOVERY)) {
+            List<IFeedJoint> joints = feedPipeline.get(info.getFeedId());
+            joints.remove(info.getIntakeFeedJoint());
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Deregistered feed intake job [" + jobId + "]");
+            }
+        } else {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Not removing feed joint as intake job is in " + FeedJobState.UNDER_RECOVERY + " state.");
+            }
+        }
+
+    }
+
+    private void handleJobStartMessage(Message message) throws Exception {
+        FeedJobInfo jobInfo = jobInfos.get(message.jobId);
+        switch (jobInfo.getJobType()) {
+            case INTAKE:
+                handleIntakeJobStartMessage((FeedIntakeInfo) jobInfo);
+                break;
+            case FEED_CONNECT:
+                handleCollectJobStartMessage((FeedConnectJobInfo) jobInfo);
+                break;
+        }
+
+    }
+
+    private void handleJobFinishMessage(Message message) throws Exception {
+        FeedJobInfo jobInfo = jobInfos.get(message.jobId);
+        switch (jobInfo.getJobType()) {
+            case INTAKE:
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Intake Job finished for feed intake " + jobInfo.getJobId());
+                }
+                handleFeedIntakeJobFinishMessage((FeedIntakeInfo) jobInfo, message);
+                break;
+            case FEED_CONNECT:
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Collect Job finished for  " + (FeedConnectJobInfo) jobInfo);
+                }
+                handleFeedCollectJobFinishMessage((FeedConnectJobInfo) jobInfo);
+                break;
+        }
+
+    }
+
+    private synchronized void handleIntakeJobStartMessage(FeedIntakeInfo intakeJobInfo) throws Exception {
+        List<OperatorDescriptorId> intakeOperatorIds = new ArrayList<OperatorDescriptorId>();
+        Map<OperatorDescriptorId, IOperatorDescriptor> operators = intakeJobInfo.getSpec().getOperatorMap();
+        for (Entry<OperatorDescriptorId, IOperatorDescriptor> entry : operators.entrySet()) {
+            IOperatorDescriptor opDesc = entry.getValue();
+            if (opDesc instanceof FeedIntakeOperatorDescriptor) {
+                intakeOperatorIds.add(opDesc.getOperatorId());
+            }
+        }
+
+        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+        JobInfo info = hcc.getJobInfo(intakeJobInfo.getJobId());
+        List<String> intakeLocations = new ArrayList<String>();
+        for (OperatorDescriptorId intakeOperatorId : intakeOperatorIds) {
+            Map<Integer, String> operatorLocations = info.getOperatorLocations().get(intakeOperatorId);
+            int nOperatorInstances = operatorLocations.size();
+            for (int i = 0; i < nOperatorInstances; i++) {
+                intakeLocations.add(operatorLocations.get(i));
+            }
+        }
+        // intakeLocations is an ordered list; element at position i corresponds to location of i'th instance of operator
+        intakeJobInfo.setIntakeLocation(intakeLocations);
+        intakeJobInfo.getIntakeFeedJoint().setState(State.ACTIVE);
+        intakeJobInfo.setState(FeedJobState.ACTIVE);
+
+        // notify event listeners 
+        notifyFeedEventSubscribers(intakeJobInfo, FeedLifecycleEvent.FEED_INTAKE_STARTED);
+    }
+
+    private void handleCollectJobStartMessage(FeedConnectJobInfo cInfo) throws RemoteException, ACIDException {
+        // set locations of feed sub-operations (intake, compute, store)
+        setLocations(cInfo);
+
+        // activate joints
+        List<IFeedJoint> joints = feedPipeline.get(cInfo.getConnectionId().getFeedId());
+        for (IFeedJoint joint : joints) {
+            if (joint.getProvider().equals(cInfo.getConnectionId())) {
+                joint.setState(State.ACTIVE);
+                if (joint.getType().equals(IFeedJoint.FeedJointType.COMPUTE)) {
+                    cInfo.setComputeFeedJoint(joint);
+                }
+            }
+        }
+        cInfo.setState(FeedJobState.ACTIVE);
+
+        // register activity in metadata
+        registerFeedActivity(cInfo);
+        // notify event listeners
+        notifyFeedEventSubscribers(cInfo, FeedLifecycleEvent.FEED_COLLECT_STARTED);
+    }
+
+    private void notifyFeedEventSubscribers(FeedJobInfo jobInfo, FeedLifecycleEvent event) {
+        JobType jobType = jobInfo.getJobType();
+        List<FeedConnectionId> impactedConnections = new ArrayList<FeedConnectionId>();
+        if (jobType.equals(JobType.INTAKE)) {
+            FeedId feedId = ((FeedIntakeInfo) jobInfo).getFeedId();
+            for (FeedConnectionId connId : eventSubscribers.keySet()) {
+                if (connId.getFeedId().equals(feedId)) {
+                    impactedConnections.add(connId);
+                }
+            }
+        } else {
+            impactedConnections.add(((FeedConnectJobInfo) jobInfo).getConnectionId());
+        }
+
+        for (FeedConnectionId connId : impactedConnections) {
+            List<IFeedLifecycleEventSubscriber> subscribers = eventSubscribers.get(connId);
+            if (subscribers != null && !subscribers.isEmpty()) {
+                for (IFeedLifecycleEventSubscriber subscriber : subscribers) {
+                    subscriber.handleFeedEvent(event);
+                }
+            }
+        }
+    }
+
+    public synchronized void submitFeedConnectionRequest(IFeedJoint feedJoint, final FeedConnectionRequest request)
+            throws Exception {
+        List<String> locations = null;
+        switch (feedJoint.getType()) {
+            case INTAKE:
+                FeedIntakeInfo intakeInfo = intakeJobInfos.get(feedJoint.getOwnerFeedId());
+                locations = intakeInfo.getIntakeLocation();
+                break;
+            case COMPUTE:
+                FeedConnectionId connectionId = feedJoint.getProvider();
+                FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
+                locations = cInfo.getComputeLocations();
+                break;
+        }
+
+        SubscribeFeedWork work = new SubscribeFeedWork(locations.toArray(new String[] {}), request);
+        FeedWorkManager.INSTANCE.submitWork(work, new SubscribeFeedWork.FeedSubscribeWorkEventListener());
+    }
+
+    public IFeedJoint getSourceFeedJoint(FeedConnectionId connectionId) {
+        FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
+        if (cInfo != null) {
+            return cInfo.getSourceFeedJoint();
+        }
+        return null;
+    }
+
+    public Set<FeedConnectionId> getActiveFeedConnections() {
+        Set<FeedConnectionId> activeConnections = new HashSet<FeedConnectionId>();
+        for (FeedConnectJobInfo cInfo : connectJobInfos.values()) {
+            if (cInfo.getState().equals(FeedJobState.ACTIVE)) {
+                activeConnections.add(cInfo.getConnectionId());
+            }
+        }
+        return activeConnections;
+    }
+
+    public boolean isFeedConnectionActive(FeedConnectionId connectionId) {
+        FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
+        if (cInfo != null) {
+            return cInfo.getState().equals(FeedJobState.ACTIVE);
+        }
+        return false;
+    }
+
+    public void setJobState(FeedConnectionId connectionId, FeedJobState jobState) {
+        FeedConnectJobInfo connectJobInfo = connectJobInfos.get(connectionId);
+        connectJobInfo.setState(jobState);
+    }
+
+    public FeedJobState getFeedJobState(FeedConnectionId connectionId) {
+        return connectJobInfos.get(connectionId).getState();
+    }
+
+    private void handleFeedIntakeJobFinishMessage(FeedIntakeInfo intakeInfo, Message message) throws Exception {
+        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+        JobInfo info = hcc.getJobInfo(message.jobId);
+        JobStatus status = info.getStatus();
+        FeedLifecycleEvent event;
+        event = status.equals(JobStatus.FAILURE) ? FeedLifecycleEvent.FEED_INTAKE_FAILURE
+                : FeedLifecycleEvent.FEED_ENDED;
+
+        // remove feed joints
+        deregisterFeedIntakeJob(message.jobId);
+
+        // notify event listeners 
+        notifyFeedEventSubscribers(intakeInfo, event);
+
+    }
+
+    private void handleFeedCollectJobFinishMessage(FeedConnectJobInfo cInfo) throws Exception {
+        FeedConnectionId connectionId = cInfo.getConnectionId();
+
+        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+        JobInfo info = hcc.getJobInfo(cInfo.getJobId());
+        JobStatus status = info.getStatus();
+        boolean failure = status != null && status.equals(JobStatus.FAILURE);
+        FeedPolicyAccessor fpa = new FeedPolicyAccessor(cInfo.getFeedPolicy());
+
+        boolean removeJobHistory = !failure;
+        boolean retainSubsription = cInfo.getState().equals(FeedJobState.UNDER_RECOVERY)
+                || (failure && fpa.continueOnHardwareFailure());
+
+        if (!retainSubsription) {
+            IFeedJoint feedJoint = cInfo.getSourceFeedJoint();
+            feedJoint.removeReceiver(connectionId);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Subscription " + cInfo.getConnectionId() + " completed successfully. Removed subscription");
+            }
+            removeFeedJointsPostPipelineTermination(cInfo.getConnectionId());
+        }
+
+        if (removeJobHistory) {
+            connectJobInfos.remove(connectionId);
+            jobInfos.remove(cInfo.getJobId());
+            feedIntakeProgressTrackers.remove(cInfo.getConnectionId());
+        }
+        deregisterFeedActivity(cInfo);
+
+        // notify event listeners 
+        FeedLifecycleEvent event = failure ? FeedLifecycleEvent.FEED_COLLECT_FAILURE : FeedLifecycleEvent.FEED_ENDED;
+        notifyFeedEventSubscribers(cInfo, event);
+    }
+
+    private void registerFeedActivity(FeedConnectJobInfo cInfo) {
+        Map<String, String> feedActivityDetails = new HashMap<String, String>();
+
+        if (cInfo.getCollectLocations() != null) {
+            feedActivityDetails.put(FeedActivity.FeedActivityDetails.INTAKE_LOCATIONS,
+                    StringUtils.join(cInfo.getCollectLocations().iterator(), ','));
+        }
+
+        if (cInfo.getComputeLocations() != null) {
+            feedActivityDetails.put(FeedActivity.FeedActivityDetails.COMPUTE_LOCATIONS,
+                    StringUtils.join(cInfo.getComputeLocations().iterator(), ','));
+        }
+
+        if (cInfo.getStorageLocations() != null) {
+            feedActivityDetails.put(FeedActivity.FeedActivityDetails.STORAGE_LOCATIONS,
+                    StringUtils.join(cInfo.getStorageLocations().iterator(), ','));
+        }
+
+        String policyName = cInfo.getFeedPolicy().get(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY);
+        feedActivityDetails.put(FeedActivity.FeedActivityDetails.FEED_POLICY_NAME, policyName);
+
+        feedActivityDetails.put(FeedActivity.FeedActivityDetails.FEED_CONNECT_TIMESTAMP, (new Date()).toString());
+        try {
+            FeedActivity feedActivity = new FeedActivity(cInfo.getConnectionId().getFeedId().getDataverse(), cInfo
+                    .getConnectionId().getFeedId().getFeedName(), cInfo.getConnectionId().getDatasetName(),
+                    feedActivityDetails);
+            CentralFeedManager.getInstance().getFeedLoadManager()
+                    .reportFeedActivity(cInfo.getConnectionId(), feedActivity);
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Unable to register feed activity for " + cInfo + " " + e.getMessage());
+            }
+
+        }
+
+    }
+
+    public void deregisterFeedActivity(FeedConnectJobInfo cInfo) {
+        try {
+            CentralFeedManager.getInstance().getFeedLoadManager().removeFeedActivity(cInfo.getConnectionId());
+        } catch (Exception e) {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Unable to deregister feed activity for " + cInfo + " " + e.getMessage());
+            }
+        }
+    }
+
+    public void removeFeedJointsPostPipelineTermination(FeedConnectionId connectionId) {
+        FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
+        List<IFeedJoint> feedJoints = feedPipeline.get(connectionId.getFeedId());
+
+        IFeedJoint sourceJoint = cInfo.getSourceFeedJoint();
+        List<FeedConnectionId> all = sourceJoint.getReceivers();
+        boolean removeSourceJoint = all.size() < 2;
+        if (removeSourceJoint) {
+            feedJoints.remove(sourceJoint);
+        }
+
+        IFeedJoint computeJoint = cInfo.getComputeFeedJoint();
+        if (computeJoint != null && computeJoint.getReceivers().size() < 2) {
+            feedJoints.remove(computeJoint);
+        }
+    }
+
+    public boolean isRegisteredFeedJob(JobId jobId) {
+        return jobInfos.get(jobId) != null;
+    }
+
+    public List<String> getFeedComputeLocations(FeedId feedId) {
+        List<IFeedJoint> feedJoints = feedPipeline.get(feedId);
+        for (IFeedJoint joint : feedJoints) {
+            if (joint.getFeedJointKey().getFeedId().equals(feedId)) {
+                return connectJobInfos.get(joint.getProvider()).getComputeLocations();
+            }
+        }
+        return null;
+    }
+
+    public List<String> getFeedStorageLocations(FeedConnectionId connectionId) {
+        return connectJobInfos.get(connectionId).getStorageLocations();
+    }
+
+    public List<String> getFeedCollectLocations(FeedConnectionId connectionId) {
+        return connectJobInfos.get(connectionId).getCollectLocations();
+    }
+
+    public List<String> getFeedIntakeLocations(FeedId feedId) {
+        return intakeJobInfos.get(feedId).getIntakeLocation();
+    }
+
+    public JobId getFeedCollectJobId(FeedConnectionId connectionId) {
+        return connectJobInfos.get(connectionId).getJobId();
+    }
+
+    public void registerFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
+        List<IFeedLifecycleEventSubscriber> subscribers = eventSubscribers.get(connectionId);
+        if (subscribers == null) {
+            subscribers = new ArrayList<IFeedLifecycleEventSubscriber>();
+            eventSubscribers.put(connectionId, subscribers);
+        }
+        subscribers.add(subscriber);
+    }
+
+    public void deregisterFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
+        List<IFeedLifecycleEventSubscriber> subscribers = eventSubscribers.get(connectionId);
+        if (subscribers != null) {
+            subscribers.remove(subscriber);
+        }
+    }
+
+    //============================
+
+    public boolean isFeedPointAvailable(FeedJointKey feedJointKey) {
+        List<IFeedJoint> joints = feedPipeline.get(feedJointKey.getFeedId());
+        if (joints != null && !joints.isEmpty()) {
+            for (IFeedJoint joint : joints) {
+                if (joint.getFeedJointKey().equals(feedJointKey)) {
+                    return true;
+                }
+            }
+        }
+        return false;
+    }
+
+    public Collection<IFeedJoint> getFeedIntakeJoints() {
+        List<IFeedJoint> intakeFeedPoints = new ArrayList<IFeedJoint>();
+        for (FeedIntakeInfo info : intakeJobInfos.values()) {
+            intakeFeedPoints.add(info.getIntakeFeedJoint());
+        }
+        return intakeFeedPoints;
+    }
+
+    public IFeedJoint getFeedJoint(FeedJointKey feedPointKey) {
+        List<IFeedJoint> joints = feedPipeline.get(feedPointKey.getFeedId());
+        if (joints != null && !joints.isEmpty()) {
+            for (IFeedJoint joint : joints) {
+                if (joint.getFeedJointKey().equals(feedPointKey)) {
+                    return joint;
+                }
+            }
+        }
+        return null;
+    }
+
+    public IFeedJoint getAvailableFeedJoint(FeedJointKey feedJointKey) {
+        IFeedJoint feedJoint = getFeedJoint(feedJointKey);
+        if (feedJoint != null) {
+            return feedJoint;
+        } else {
+            String jointKeyString = feedJointKey.getStringRep();
+            List<IFeedJoint> jointsOnPipeline = feedPipeline.get(feedJointKey.getFeedId());
+            IFeedJoint candidateJoint = null;
+            if (jointsOnPipeline != null) {
+                for (IFeedJoint joint : jointsOnPipeline) {
+                    if (jointKeyString.contains(joint.getFeedJointKey().getStringRep())) {
+                        if (candidateJoint == null) {
+                            candidateJoint = joint;
+                        } else if (joint.getFeedJointKey().getStringRep()
+                                .contains(candidateJoint.getFeedJointKey().getStringRep())) { // found feed point is a super set of the earlier find
+                            candidateJoint = joint;
+                        }
+                    }
+                }
+            }
+            return candidateJoint;
+        }
+    }
+
+    public JobSpecification getCollectJobSpecification(FeedConnectionId connectionId) {
+        return connectJobInfos.get(connectionId).getSpec();
+    }
+
+    public IFeedJoint getFeedPoint(FeedId sourceFeedId, IFeedJoint.FeedJointType type) {
+        List<IFeedJoint> joints = feedPipeline.get(sourceFeedId);
+        for (IFeedJoint joint : joints) {
+            if (joint.getType().equals(type)) {
+                return joint;
+            }
+        }
+        return null;
+    }
+
+    public FeedConnectJobInfo getFeedConnectJobInfo(FeedConnectionId connectionId) {
+        return connectJobInfos.get(connectionId);
+    }
+
+    private void setLocations(FeedConnectJobInfo cInfo) {
+        JobSpecification jobSpec = cInfo.getSpec();
+
+        List<OperatorDescriptorId> collectOperatorIds = new ArrayList<OperatorDescriptorId>();
+        List<OperatorDescriptorId> computeOperatorIds = new ArrayList<OperatorDescriptorId>();
+        List<OperatorDescriptorId> storageOperatorIds = new ArrayList<OperatorDescriptorId>();
+
+        Map<OperatorDescriptorId, IOperatorDescriptor> operators = jobSpec.getOperatorMap();
+        for (Entry<OperatorDescriptorId, IOperatorDescriptor> entry : operators.entrySet()) {
+            IOperatorDescriptor opDesc = entry.getValue();
+            IOperatorDescriptor actualOp = null;
+            if (opDesc instanceof FeedMetaOperatorDescriptor) {
+                actualOp = ((FeedMetaOperatorDescriptor) opDesc).getCoreOperator();
+            } else {
+                actualOp = opDesc;
+            }
+
+            if (actualOp instanceof AlgebricksMetaOperatorDescriptor) {
+                AlgebricksMetaOperatorDescriptor op = ((AlgebricksMetaOperatorDescriptor) actualOp);
+                IPushRuntimeFactory[] runtimeFactories = op.getPipeline().getRuntimeFactories();
+                boolean computeOp = false;
+                for (IPushRuntimeFactory rf : runtimeFactories) {
+                    if (rf instanceof AssignRuntimeFactory) {
+                        IConnectorDescriptor connDesc = jobSpec.getOperatorInputMap().get(op.getOperatorId()).get(0);
+                        IOperatorDescriptor sourceOp = jobSpec.getConnectorOperatorMap().get(connDesc.getConnectorId())
+                                .getLeft().getLeft();
+                        if (sourceOp instanceof FeedCollectOperatorDescriptor) {
+                            computeOp = true;
+                            break;
+                        }
+                    }
+                }
+                if (computeOp) {
+                    computeOperatorIds.add(entry.getKey());
+                }
+            } else if (actualOp instanceof LSMTreeIndexInsertUpdateDeleteOperatorDescriptor) {
+                storageOperatorIds.add(entry.getKey());
+            } else if (actualOp instanceof FeedCollectOperatorDescriptor) {
+                collectOperatorIds.add(entry.getKey());
+            }
+        }
+
+        try {
+            IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+            JobInfo info = hcc.getJobInfo(cInfo.getJobId());
+            List<String> collectLocations = new ArrayList<String>();
+            for (OperatorDescriptorId collectOpId : collectOperatorIds) {
+                Map<Integer, String> operatorLocations = info.getOperatorLocations().get(collectOpId);
+                int nOperatorInstances = operatorLocations.size();
+                for (int i = 0; i < nOperatorInstances; i++) {
+                    collectLocations.add(operatorLocations.get(i));
+                }
+            }
+
+            List<String> computeLocations = new ArrayList<String>();
+            for (OperatorDescriptorId computeOpId : computeOperatorIds) {
+                Map<Integer, String> operatorLocations = info.getOperatorLocations().get(computeOpId);
+                if (operatorLocations != null) {
+                    int nOperatorInstances = operatorLocations.size();
+                    for (int i = 0; i < nOperatorInstances; i++) {
+                        computeLocations.add(operatorLocations.get(i));
+                    }
+                } else {
+                    computeLocations.clear();
+                    computeLocations.addAll(collectLocations);
+                }
+            }
+
+            List<String> storageLocations = new ArrayList<String>();
+            for (OperatorDescriptorId storageOpId : storageOperatorIds) {
+                Map<Integer, String> operatorLocations = info.getOperatorLocations().get(storageOpId);
+                if (operatorLocations == null) {
+                    continue;
+                }
+                int nOperatorInstances = operatorLocations.size();
+                for (int i = 0; i < nOperatorInstances; i++) {
+                    storageLocations.add(operatorLocations.get(i));
+                }
+            }
+            cInfo.setCollectLocations(collectLocations);
+            cInfo.setComputeLocations(computeLocations);
+            cInfo.setStorageLocations(storageLocations);
+
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/feeds/FeedJoint.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedJoint.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedJoint.java
new file mode 100644
index 0000000..a0411ce
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedJoint.java
@@ -0,0 +1,186 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.feeds;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionRequest;
+import edu.uci.ics.asterix.common.feeds.FeedId;
+import edu.uci.ics.asterix.common.feeds.FeedJointKey;
+import edu.uci.ics.asterix.common.feeds.api.IFeedJoint;
+import edu.uci.ics.asterix.common.feeds.api.IFeedLifecycleListener.ConnectionLocation;
+
+public class FeedJoint implements IFeedJoint {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedJoint.class.getName());
+
+    /** A unique key associated with the feed point **/
+    private final FeedJointKey key;
+
+    /** The state associated with the FeedJoint **/
+    private State state;
+
+    /** A list of subscribers that receive data from this FeedJoint **/
+    private final List<FeedConnectionId> receivers;
+
+    /** The feedId on which the feedPoint resides **/
+    private final FeedId ownerFeedId;
+
+    /** A list of feed subscription requests submitted for subscribing to the FeedPoint's data **/
+    private final List<FeedConnectionRequest> connectionRequests;
+
+    private final ConnectionLocation connectionLocation;
+
+    private final FeedJointType type;
+
+    private FeedConnectionId provider;
+
+    public FeedJoint(FeedJointKey key, FeedId ownerFeedId, ConnectionLocation subscriptionLocation, FeedJointType type,
+            FeedConnectionId provider) {
+        this.key = key;
+        this.ownerFeedId = ownerFeedId;
+        this.type = type;
+        this.receivers = new ArrayList<FeedConnectionId>();
+        this.state = State.CREATED;
+        this.connectionLocation = subscriptionLocation;
+        this.connectionRequests = new ArrayList<FeedConnectionRequest>();
+        this.provider = provider;
+    }
+
+    @Override
+    public int hashCode() {
+        return key.hashCode();
+    }
+
+    public void addReceiver(FeedConnectionId connectionId) {
+        receivers.add(connectionId);
+    }
+
+    public void removeReceiver(FeedConnectionId connectionId) {
+        receivers.remove(connectionId);
+    }
+
+    public synchronized void addConnectionRequest(FeedConnectionRequest request) {
+        connectionRequests.add(request);
+        if (state.equals(State.ACTIVE)) {
+            handlePendingConnectionRequest();
+        }
+    }
+
+    public synchronized void setState(State state) {
+        if (this.state.equals(state)) {
+            return;
+        }
+        this.state = state;
+        if (this.state.equals(State.ACTIVE)) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Feed joint " + this + " is now " + State.ACTIVE);
+            }
+            handlePendingConnectionRequest();
+        }
+    }
+
+    private void handlePendingConnectionRequest() {
+        for (FeedConnectionRequest connectionRequest : connectionRequests) {
+            FeedConnectionId connectionId = new FeedConnectionId(connectionRequest.getReceivingFeedId(),
+                    connectionRequest.getTargetDataset());
+            try {
+                FeedLifecycleListener.INSTANCE.submitFeedConnectionRequest(this, connectionRequest);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Submitted feed connection request " + connectionRequest + " at feed joint " + this);
+                }
+                addReceiver(connectionId);
+            } catch (Exception e) {
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("Unsuccessful attempt at submitting connection request " + connectionRequest
+                            + " at feed joint " + this + ". Message " + e.getMessage());
+                }
+                e.printStackTrace();
+            }
+        }
+        connectionRequests.clear();
+    }
+
+    public FeedConnectionId getReceiver(FeedConnectionId connectionId) {
+        for (FeedConnectionId cid : receivers) {
+            if (cid.equals(connectionId)) {
+                return cid;
+            }
+        }
+        return null;
+    }
+
+    @Override
+    public String toString() {
+        return key.toString() + " [" + connectionLocation + "]" + "[" + state + "]";
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (o == null) {
+            return false;
+        }
+        if (o == this) {
+            return true;
+        }
+        if (!(o instanceof FeedJoint)) {
+            return false;
+        }
+        return ((FeedJoint) o).getFeedJointKey().equals(this.key);
+    }
+
+    public FeedId getOwnerFeedId() {
+        return ownerFeedId;
+    }
+
+    public List<FeedConnectionRequest> getConnectionRequests() {
+        return connectionRequests;
+    }
+
+    public ConnectionLocation getConnectionLocation() {
+        return connectionLocation;
+    }
+
+    public FeedJointType getType() {
+        return type;
+    }
+
+    @Override
+    public FeedConnectionId getProvider() {
+        return provider;
+    }
+
+    public List<FeedConnectionId> getReceivers() {
+        return receivers;
+    }
+
+    public FeedJointKey getKey() {
+        return key;
+    }
+
+    public synchronized State getState() {
+        return state;
+    }
+
+    @Override
+    public FeedJointKey getFeedJointKey() {
+        return key;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/feeds/FeedLifecycleListener.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedLifecycleListener.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedLifecycleListener.java
new file mode 100644
index 0000000..432a56d
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedLifecycleListener.java
@@ -0,0 +1,486 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.feeds;
+
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.api.common.SessionConfig;
+import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
+import edu.uci.ics.asterix.aql.base.Statement;
+import edu.uci.ics.asterix.aql.expression.DataverseDecl;
+import edu.uci.ics.asterix.aql.expression.DisconnectFeedStatement;
+import edu.uci.ics.asterix.aql.expression.Identifier;
+import edu.uci.ics.asterix.aql.translator.AqlTranslator;
+import edu.uci.ics.asterix.common.api.IClusterManagementWork;
+import edu.uci.ics.asterix.common.api.IClusterManagementWork.ClusterState;
+import edu.uci.ics.asterix.common.api.IClusterManagementWorkResponse;
+import edu.uci.ics.asterix.common.feeds.FeedConnectJobInfo;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionRequest;
+import edu.uci.ics.asterix.common.feeds.FeedId;
+import edu.uci.ics.asterix.common.feeds.FeedIntakeInfo;
+import edu.uci.ics.asterix.common.feeds.FeedJobInfo;
+import edu.uci.ics.asterix.common.feeds.FeedJobInfo.FeedJobState;
+import edu.uci.ics.asterix.common.feeds.FeedJointKey;
+import edu.uci.ics.asterix.common.feeds.api.IFeedJoint;
+import edu.uci.ics.asterix.common.feeds.api.IFeedLifecycleEventSubscriber;
+import edu.uci.ics.asterix.common.feeds.api.IFeedLifecycleListener;
+import edu.uci.ics.asterix.common.feeds.api.IIntakeProgressTracker;
+import edu.uci.ics.asterix.common.feeds.message.StorageReportFeedMessage;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.cluster.AddNodeWork;
+import edu.uci.ics.asterix.metadata.cluster.ClusterManager;
+import edu.uci.ics.asterix.metadata.feeds.FeedCollectOperatorDescriptor;
+import edu.uci.ics.asterix.metadata.feeds.FeedIntakeOperatorDescriptor;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.IActivityClusterGraphGeneratorFactory;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+/**
+ * A listener that subscribes to events associated with cluster membership
+ * (nodes joining/leaving the cluster) and job lifecycle (start/end of a job).
+ * Subscription to such events allows keeping track of feed ingestion jobs and
+ * take any corrective action that may be required when a node involved in a
+ * feed leaves the cluster.
+ */
+public class FeedLifecycleListener implements IFeedLifecycleListener {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedLifecycleListener.class.getName());
+
+    public static FeedLifecycleListener INSTANCE = new FeedLifecycleListener();
+
+    private final LinkedBlockingQueue<Message> jobEventInbox;
+    private final LinkedBlockingQueue<IClusterManagementWorkResponse> responseInbox;
+    private final Map<FeedCollectInfo, List<String>> dependentFeeds = new HashMap<FeedCollectInfo, List<String>>();
+    private final Map<FeedConnectionId, LinkedBlockingQueue<String>> feedReportQueue;
+    private final FeedJobNotificationHandler feedJobNotificationHandler;
+    private final FeedWorkRequestResponseHandler feedWorkRequestResponseHandler;
+    private final ExecutorService executorService;
+
+    private ClusterState state;
+
+    private FeedLifecycleListener() {
+        this.jobEventInbox = new LinkedBlockingQueue<Message>();
+        this.feedJobNotificationHandler = new FeedJobNotificationHandler(jobEventInbox);
+        this.responseInbox = new LinkedBlockingQueue<IClusterManagementWorkResponse>();
+        this.feedWorkRequestResponseHandler = new FeedWorkRequestResponseHandler(responseInbox);
+        this.feedReportQueue = new HashMap<FeedConnectionId, LinkedBlockingQueue<String>>();
+        this.executorService = Executors.newCachedThreadPool();
+        this.executorService.execute(feedJobNotificationHandler);
+        this.executorService.execute(feedWorkRequestResponseHandler);
+        ClusterManager.INSTANCE.registerSubscriber(this);
+        this.state = AsterixClusterProperties.INSTANCE.getState();
+    }
+
+    @Override
+    public void notifyJobStart(JobId jobId) throws HyracksException {
+        if (feedJobNotificationHandler.isRegisteredFeedJob(jobId)) {
+            jobEventInbox.add(new Message(jobId, Message.MessageKind.JOB_START));
+        }
+    }
+
+    @Override
+    public void notifyJobFinish(JobId jobId) throws HyracksException {
+        if (feedJobNotificationHandler.isRegisteredFeedJob(jobId)) {
+            jobEventInbox.add(new Message(jobId, Message.MessageKind.JOB_FINISH));
+        } else {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("NO NEED TO NOTIFY JOB FINISH!");
+            }
+        }
+    }
+
+    public FeedConnectJobInfo getFeedConnectJobInfo(FeedConnectionId connectionId) {
+        return feedJobNotificationHandler.getFeedConnectJobInfo(connectionId);
+    }
+
+    public void registerFeedIntakeProgressTracker(FeedConnectionId connectionId,
+            IIntakeProgressTracker feedIntakeProgressTracker) {
+        feedJobNotificationHandler.registerFeedIntakeProgressTracker(connectionId, feedIntakeProgressTracker);
+    }
+
+    public void deregisterFeedIntakeProgressTracker(FeedConnectionId connectionId) {
+        feedJobNotificationHandler.deregisterFeedIntakeProgressTracker(connectionId);
+    }
+
+    public void updateTrackingInformation(StorageReportFeedMessage srm) {
+        feedJobNotificationHandler.updateTrackingInformation(srm);
+    }
+
+    /*
+     * Traverse job specification to categorize job as a feed intake job or a feed collection job 
+     */
+    @Override
+    public void notifyJobCreation(JobId jobId, IActivityClusterGraphGeneratorFactory acggf) throws HyracksException {
+        JobSpecification spec = acggf.getJobSpecification();
+        FeedConnectionId feedConnectionId = null;
+        Map<String, String> feedPolicy = null;
+        for (IOperatorDescriptor opDesc : spec.getOperatorMap().values()) {
+            if (opDesc instanceof FeedCollectOperatorDescriptor) {
+                feedConnectionId = ((FeedCollectOperatorDescriptor) opDesc).getFeedConnectionId();
+                feedPolicy = ((FeedCollectOperatorDescriptor) opDesc).getFeedPolicyProperties();
+                feedJobNotificationHandler.registerFeedCollectionJob(
+                        ((FeedCollectOperatorDescriptor) opDesc).getSourceFeedId(), feedConnectionId, jobId, spec,
+                        feedPolicy);
+                break;
+            } else if (opDesc instanceof FeedIntakeOperatorDescriptor) {
+                feedJobNotificationHandler.registerFeedIntakeJob(((FeedIntakeOperatorDescriptor) opDesc).getFeedId(),
+                        jobId, spec);
+                break;
+            }
+        }
+    }
+
+    public void setJobState(FeedConnectionId connectionId, FeedJobState jobState) {
+        feedJobNotificationHandler.setJobState(connectionId, jobState);
+    }
+
+    public FeedJobState getFeedJobState(FeedConnectionId connectionId) {
+        return feedJobNotificationHandler.getFeedJobState(connectionId);
+    }
+
+    public static class Message {
+        public JobId jobId;
+
+        public enum MessageKind {
+            JOB_START,
+            JOB_FINISH
+        }
+
+        public MessageKind messageKind;
+
+        public Message(JobId jobId, MessageKind msgKind) {
+            this.jobId = jobId;
+            this.messageKind = msgKind;
+        }
+    }
+
+    @Override
+    public Set<IClusterManagementWork> notifyNodeFailure(Set<String> deadNodeIds) {
+        Set<IClusterManagementWork> workToBeDone = new HashSet<IClusterManagementWork>();
+
+        Collection<FeedIntakeInfo> intakeInfos = feedJobNotificationHandler.getFeedIntakeInfos();
+        Collection<FeedConnectJobInfo> connectJobInfos = feedJobNotificationHandler.getFeedConnectInfos();
+
+        Map<String, List<FeedJobInfo>> impactedJobs = new HashMap<String, List<FeedJobInfo>>();
+
+        for (String deadNode : deadNodeIds) {
+            for (FeedIntakeInfo intakeInfo : intakeInfos) {
+                if (intakeInfo.getIntakeLocation().contains(deadNode)) {
+                    List<FeedJobInfo> infos = impactedJobs.get(deadNode);
+                    if (infos == null) {
+                        infos = new ArrayList<FeedJobInfo>();
+                        impactedJobs.put(deadNode, infos);
+                    }
+                    infos.add(intakeInfo);
+                    intakeInfo.setState(FeedJobState.UNDER_RECOVERY);
+                }
+            }
+
+            for (FeedConnectJobInfo connectInfo : connectJobInfos) {
+                if (connectInfo.getStorageLocations().contains(deadNode)) {
+                    continue;
+                }
+                if (connectInfo.getComputeLocations().contains(deadNode)
+                        || connectInfo.getCollectLocations().contains(deadNode)) {
+                    List<FeedJobInfo> infos = impactedJobs.get(deadNode);
+                    if (infos == null) {
+                        infos = new ArrayList<FeedJobInfo>();
+                        impactedJobs.put(deadNode, infos);
+                    }
+                    infos.add(connectInfo);
+                    connectInfo.setState(FeedJobState.UNDER_RECOVERY);
+                    feedJobNotificationHandler.deregisterFeedActivity(connectInfo);
+                }
+            }
+
+        }
+
+        if (impactedJobs.size() > 0) {
+            AddNodeWork addNodeWork = new AddNodeWork(deadNodeIds, deadNodeIds.size(), this);
+            feedWorkRequestResponseHandler.registerFeedWork(addNodeWork.getWorkId(), impactedJobs);
+            workToBeDone.add(addNodeWork);
+        }
+        return workToBeDone;
+
+    }
+
+    public static class FailureReport {
+
+        private final List<Pair<FeedConnectJobInfo, List<String>>> recoverableConnectJobs;
+        private final Map<IFeedJoint, List<String>> recoverableIntakeFeedIds;
+
+        public FailureReport(Map<IFeedJoint, List<String>> recoverableIntakeFeedIds,
+                List<Pair<FeedConnectJobInfo, List<String>>> recoverableSubscribers) {
+            this.recoverableConnectJobs = recoverableSubscribers;
+            this.recoverableIntakeFeedIds = recoverableIntakeFeedIds;
+        }
+
+        public List<Pair<FeedConnectJobInfo, List<String>>> getRecoverableSubscribers() {
+            return recoverableConnectJobs;
+        }
+
+        public Map<IFeedJoint, List<String>> getRecoverableIntakeFeedIds() {
+            return recoverableIntakeFeedIds;
+        }
+
+    }
+
+    @Override
+    public Set<IClusterManagementWork> notifyNodeJoin(String joinedNodeId) {
+        ClusterState newState = AsterixClusterProperties.INSTANCE.getState();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(joinedNodeId + " joined the cluster. " + "Asterix state: " + newState);
+        }
+
+        boolean needToReActivateFeeds = !newState.equals(state) && (newState == ClusterState.ACTIVE);
+        if (needToReActivateFeeds) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info(joinedNodeId + " Resuming loser feeds (if any)");
+            }
+            try {
+                FeedsActivator activator = new FeedsActivator();
+                (new Thread(activator)).start();
+            } catch (Exception e) {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Exception in resuming feeds" + e.getMessage());
+                }
+            }
+            state = newState;
+        } else {
+            List<FeedCollectInfo> feedsThatCanBeRevived = new ArrayList<FeedCollectInfo>();
+            for (Entry<FeedCollectInfo, List<String>> entry : dependentFeeds.entrySet()) {
+                List<String> requiredNodeIds = entry.getValue();
+                if (requiredNodeIds.contains(joinedNodeId)) {
+                    requiredNodeIds.remove(joinedNodeId);
+                    if (requiredNodeIds.isEmpty()) {
+                        feedsThatCanBeRevived.add(entry.getKey());
+                    }
+                }
+            }
+            if (!feedsThatCanBeRevived.isEmpty()) {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info(joinedNodeId + " Resuming feeds after rejoining of node " + joinedNodeId);
+                }
+                FeedsActivator activator = new FeedsActivator(feedsThatCanBeRevived);
+                (new Thread(activator)).start();
+            }
+        }
+        return null;
+    }
+
+    @Override
+    public void notifyRequestCompletion(IClusterManagementWorkResponse response) {
+        try {
+            responseInbox.put(response);
+        } catch (InterruptedException e) {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Interrupted exception");
+            }
+        }
+    }
+
+    @Override
+    public void notifyStateChange(ClusterState previousState, ClusterState newState) {
+        switch (newState) {
+            case ACTIVE:
+                if (previousState.equals(ClusterState.UNUSABLE)) {
+                    try {
+                        FeedsActivator activator = new FeedsActivator();
+                        // (new Thread(activator)).start();
+                    } catch (Exception e) {
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info("Exception in resuming feeds" + e.getMessage());
+                        }
+                    }
+                }
+                break;
+        }
+
+    }
+
+    public static class FeedsDeActivator implements Runnable {
+
+        private List<FeedConnectJobInfo> failedConnectjobs;
+
+        public FeedsDeActivator(List<FeedConnectJobInfo> failedConnectjobs) {
+            this.failedConnectjobs = failedConnectjobs;
+        }
+
+        @Override
+        public void run() {
+            for (FeedConnectJobInfo failedConnectJob : failedConnectjobs) {
+                endFeed(failedConnectJob);
+            }
+        }
+
+        private void endFeed(FeedConnectJobInfo cInfo) {
+            MetadataTransactionContext ctx = null;
+            PrintWriter writer = new PrintWriter(System.out, true);
+            SessionConfig pc = new SessionConfig(writer, OutputFormat.ADM);
+
+            try {
+                ctx = MetadataManager.INSTANCE.beginTransaction();
+                FeedId feedId = cInfo.getConnectionId().getFeedId();
+                DisconnectFeedStatement stmt = new DisconnectFeedStatement(new Identifier(feedId.getDataverse()),
+                        new Identifier(feedId.getFeedName()), new Identifier(cInfo.getConnectionId().getDatasetName()));
+                List<Statement> statements = new ArrayList<Statement>();
+                DataverseDecl dataverseDecl = new DataverseDecl(new Identifier(feedId.getDataverse()));
+                statements.add(dataverseDecl);
+                statements.add(stmt);
+                AqlTranslator translator = new AqlTranslator(statements, pc);
+                translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null,
+                        AqlTranslator.ResultDelivery.SYNC);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("End irrecoverable feed: " + cInfo.getConnectionId());
+                }
+                MetadataManager.INSTANCE.commitTransaction(ctx);
+            } catch (Exception e) {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Exception in ending loser feed: " + cInfo.getConnectionId() + " Exception "
+                            + e.getMessage());
+                }
+                e.printStackTrace();
+                try {
+                    MetadataManager.INSTANCE.abortTransaction(ctx);
+                } catch (Exception e2) {
+                    e2.addSuppressed(e);
+                    if (LOGGER.isLoggable(Level.SEVERE)) {
+                        LOGGER.severe("Exception in aborting transaction! System is in inconsistent state");
+                    }
+                }
+
+            }
+
+        }
+    }
+
+    public void submitFeedConnectionRequest(IFeedJoint feedPoint, FeedConnectionRequest subscriptionRequest)
+            throws Exception {
+        feedJobNotificationHandler.submitFeedConnectionRequest(feedPoint, subscriptionRequest);
+    }
+
+    @Override
+    public List<FeedConnectionId> getActiveFeedConnections(FeedId feedId) {
+        List<FeedConnectionId> connections = new ArrayList<FeedConnectionId>();
+        Collection<FeedConnectionId> activeConnections = feedJobNotificationHandler.getActiveFeedConnections();
+        if (feedId != null) {
+            for (FeedConnectionId connectionId : activeConnections) {
+                if (connectionId.getFeedId().equals(feedId)) {
+                    connections.add(connectionId);
+                }
+            }
+        } else {
+            connections.addAll(activeConnections);
+        }
+        return connections;
+    }
+
+    @Override
+    public List<String> getComputeLocations(FeedId feedId) {
+        return feedJobNotificationHandler.getFeedComputeLocations(feedId);
+    }
+
+    @Override
+    public List<String> getIntakeLocations(FeedId feedId) {
+        return feedJobNotificationHandler.getFeedIntakeLocations(feedId);
+    }
+
+    @Override
+    public List<String> getStoreLocations(FeedConnectionId feedConnectionId) {
+        return feedJobNotificationHandler.getFeedStorageLocations(feedConnectionId);
+    }
+
+    @Override
+    public List<String> getCollectLocations(FeedConnectionId feedConnectionId) {
+        return feedJobNotificationHandler.getFeedCollectLocations(feedConnectionId);
+    }
+
+    @Override
+    public boolean isFeedConnectionActive(FeedConnectionId connectionId) {
+        return feedJobNotificationHandler.isFeedConnectionActive(connectionId);
+    }
+
+    public void reportPartialDisconnection(FeedConnectionId connectionId) {
+        feedJobNotificationHandler.removeFeedJointsPostPipelineTermination(connectionId);
+    }
+
+    public void registerFeedReportQueue(FeedConnectionId feedId, LinkedBlockingQueue<String> queue) {
+        feedReportQueue.put(feedId, queue);
+    }
+
+    public void deregisterFeedReportQueue(FeedConnectionId feedId, LinkedBlockingQueue<String> queue) {
+        feedReportQueue.remove(feedId);
+    }
+
+    public LinkedBlockingQueue<String> getFeedReportQueue(FeedConnectionId feedId) {
+        return feedReportQueue.get(feedId);
+    }
+
+    @Override
+    public IFeedJoint getAvailableFeedJoint(FeedJointKey feedJointKey) {
+        return feedJobNotificationHandler.getAvailableFeedJoint(feedJointKey);
+    }
+
+    @Override
+    public boolean isFeedJointAvailable(FeedJointKey feedJointKey) {
+        return feedJobNotificationHandler.isFeedPointAvailable(feedJointKey);
+    }
+
+    public void registerFeedJoint(IFeedJoint feedJoint) {
+        feedJobNotificationHandler.registerFeedJoint(feedJoint);
+    }
+
+    public IFeedJoint getFeedJoint(FeedJointKey feedJointKey) {
+        return feedJobNotificationHandler.getFeedJoint(feedJointKey);
+    }
+
+    public void registerFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
+        feedJobNotificationHandler.registerFeedEventSubscriber(connectionId, subscriber);
+    }
+
+    public void deregisterFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
+        feedJobNotificationHandler.deregisterFeedEventSubscriber(connectionId, subscriber);
+
+    }
+
+    public JobSpecification getCollectJobSpecification(FeedConnectionId connectionId) {
+        return feedJobNotificationHandler.getCollectJobSpecification(connectionId);
+    }
+
+    public JobId getFeedCollectJobId(FeedConnectionId connectionId) {
+        return feedJobNotificationHandler.getFeedCollectJobId(connectionId);
+    }
+
+}


[02/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ConstructorType.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ConstructorType.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ConstructorType.java
deleted file mode 100644
index 0f61c86..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ConstructorType.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-public enum ConstructorType {
-    ORDERED_LIST_CONSTRUCTOR,
-    UNORDERED_LIST_CONSTRUCTOR,
-    RECORD_CONSTRUCTOR,
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateDataverseStatement.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateDataverseStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateDataverseStatement.java
deleted file mode 100644
index 356663b..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateDataverseStatement.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class CreateDataverseStatement implements Statement {
-
-    private Identifier dataverseName;
-    private String format;
-    private boolean ifNotExists;
-
-    public CreateDataverseStatement(Identifier dataverseName, String format, boolean ifNotExists) {
-        this.dataverseName = dataverseName;
-        if (format == null)
-            this.format = "edu.uci.ics.asterix.runtime.formats.NonTaggedDataFormat";
-        else
-            this.format = format;
-        this.ifNotExists = ifNotExists;
-    }
-
-    public Identifier getDataverseName() {
-        return dataverseName;
-    }
-
-    public String getFormat() {
-        return format;
-    }
-
-    public boolean getIfNotExists() {
-        return this.ifNotExists;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.CREATE_DATAVERSE;
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        // TODO
-        return null;
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateFeedPolicyStatement.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateFeedPolicyStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateFeedPolicyStatement.java
deleted file mode 100644
index 9023f72..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateFeedPolicyStatement.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class CreateFeedPolicyStatement implements Statement {
-
-    private final String policyName;
-    private final String sourcePolicyName;
-    private final Map<String, String> properties;
-    private final String sourcePolicyFile;
-    private final String description;
-    private final boolean ifNotExists;
-
-    public CreateFeedPolicyStatement(String policyName, String sourcePolicyName, Map<String, String> properties,
-            String description, boolean ifNotExists) {
-        this.policyName = policyName;
-        this.sourcePolicyName = sourcePolicyName;
-        this.properties = properties;
-        this.description = description;
-        this.ifNotExists = ifNotExists;
-        sourcePolicyFile = null;
-    }
-
-    public CreateFeedPolicyStatement(String policyName, String sourcePolicyFile, String description, boolean ifNotExists) {
-        this.policyName = policyName;
-        this.sourcePolicyName = null;
-        this.sourcePolicyFile = sourcePolicyFile;
-        this.description = description;
-        this.properties = null;
-        this.ifNotExists = ifNotExists;
-    }
-
-    public boolean getIfNotExists() {
-        return this.ifNotExists;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Statement.Kind.CREATE_FEED_POLICY;
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitCreateFeedPolicyStatement(this, arg);
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-    public String getPolicyName() {
-        return policyName;
-    }
-
-    public String getSourcePolicyName() {
-        return sourcePolicyName;
-    }
-
-    public Map<String, String> getProperties() {
-        return properties;
-    }
-
-    public String getSourcePolicyFile() {
-        return sourcePolicyFile;
-    }
-
-    public String getDescription() {
-        return description;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateFeedStatement.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateFeedStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateFeedStatement.java
deleted file mode 100644
index 6eac757..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateFeedStatement.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-
-public abstract class CreateFeedStatement implements Statement {
-
-    private final Pair<Identifier, Identifier> qName;
-    private final FunctionSignature appliedFunction;
-    private final boolean ifNotExists;
-
-    public CreateFeedStatement(Pair<Identifier, Identifier> qName, FunctionSignature appliedFunction,
-            boolean ifNotExists) {
-        this.qName = qName;
-        this.appliedFunction = appliedFunction;
-        this.ifNotExists = ifNotExists;
-    }
-
-    public Identifier getDataverseName() {
-        return qName.first;
-    }
-
-    public Identifier getFeedName() {
-        return qName.second;
-    }
-
-    public FunctionSignature getAppliedFunction() {
-        return appliedFunction;
-    }
-
-    public boolean getIfNotExists() {
-        return this.ifNotExists;
-    }
-
-    @Override
-    public abstract Kind getKind();
-
-    @Override
-    public abstract <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException;
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateFunctionStatement.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateFunctionStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateFunctionStatement.java
deleted file mode 100644
index f08d0e9..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateFunctionStatement.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
-
-public class CreateFunctionStatement implements Statement {
-
-    private final FunctionSignature signature;
-    private final String functionBody;
-    private final boolean ifNotExists;
-    private final List<String> paramList;
-
-    public FunctionSignature getaAterixFunction() {
-        return signature;
-    }
-
-    public String getFunctionBody() {
-        return functionBody;
-    }
-
-    public CreateFunctionStatement(FunctionSignature signature, List<VarIdentifier> parameterList, String functionBody,
-            boolean ifNotExists) {
-        this.signature = signature;
-        this.functionBody = functionBody;
-        this.ifNotExists = ifNotExists;
-        this.paramList = new ArrayList<String>();
-        for (VarIdentifier varId : parameterList) {
-            this.paramList.add(varId.getValue());
-        }
-    }
-
-    public boolean getIfNotExists() {
-        return this.ifNotExists;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.CREATE_FUNCTION;
-    }
-
-    public List<String> getParamList() {
-        return paramList;
-    }
-
-    public FunctionSignature getSignature() {
-        return signature;
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visit(this, arg);
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateIndexStatement.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateIndexStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateIndexStatement.java
deleted file mode 100644
index e215f13..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateIndexStatement.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.asterix.aql.base.Expression;
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-
-public class CreateIndexStatement implements Statement {
-
-    private Identifier indexName;
-    private Identifier dataverseName;
-    private Identifier datasetName;
-    private List<Pair<List<String>, TypeExpression>> fieldExprs = new ArrayList<Pair<List<String>, TypeExpression>>();
-    private IndexType indexType = IndexType.BTREE;
-    private boolean enforced;
-    private boolean ifNotExists;
-
-    // Specific to NGram indexes.
-    private int gramLength;
-
-    public CreateIndexStatement() {
-    }
-
-    public void setGramLength(int gramLength) {
-        this.gramLength = gramLength;
-    }
-
-    public int getGramLength() {
-        return gramLength;
-    }
-
-    public Identifier getIndexName() {
-        return indexName;
-    }
-
-    public void setIndexName(Identifier indexName) {
-        this.indexName = indexName;
-    }
-
-    public Identifier getDataverseName() {
-        return dataverseName;
-    }
-
-    public void setDataverseName(Identifier dataverseName) {
-        this.dataverseName = dataverseName;
-    }
-
-    public Identifier getDatasetName() {
-        return datasetName;
-    }
-
-    public void setDatasetName(Identifier datasetName) {
-        this.datasetName = datasetName;
-    }
-
-    public List<Pair<List<String>, TypeExpression>> getFieldExprs() {
-        return fieldExprs;
-    }
-
-    public void addFieldExprPair(Pair<List<String>, TypeExpression> fp) {
-        this.fieldExprs.add(fp);
-    }
-
-    public IndexType getIndexType() {
-        return indexType;
-    }
-
-    public void setIndexType(IndexType indexType) {
-        this.indexType = indexType;
-    }
-
-    public boolean isEnforced() {
-        return enforced;
-    }
-
-    public void setEnforced(boolean isEnforced) {
-        this.enforced = isEnforced;
-    }
-
-    public void setIfNotExists(boolean ifNotExists) {
-        this.ifNotExists = ifNotExists;
-    }
-
-    public boolean getIfNotExists() {
-        return this.ifNotExists;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.CREATE_INDEX;
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitCreateIndexStatement(this, arg);
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreatePrimaryFeedStatement.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreatePrimaryFeedStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreatePrimaryFeedStatement.java
deleted file mode 100644
index 5710a23..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreatePrimaryFeedStatement.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-
-public class CreatePrimaryFeedStatement extends CreateFeedStatement implements Statement {
-
-    private final String adaptorName;
-    private final Map<String, String> adaptorConfiguration;
-
-    public CreatePrimaryFeedStatement(Pair<Identifier, Identifier> qName, String adaptorName,
-            Map<String, String> adaptorConfiguration, FunctionSignature appliedFunction, boolean ifNotExists) {
-        super(qName, appliedFunction, ifNotExists);
-        this.adaptorName = adaptorName;
-        this.adaptorConfiguration = adaptorConfiguration;
-    }
-
-    public String getAdaptorName() {
-        return adaptorName;
-    }
-
-    public Map<String, String> getAdaptorConfiguration() {
-        return adaptorConfiguration;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.CREATE_PRIMARY_FEED;
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitCreatePrimaryFeedStatement(this, arg);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateSecondaryFeedStatement.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateSecondaryFeedStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateSecondaryFeedStatement.java
deleted file mode 100644
index 16d48f1..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateSecondaryFeedStatement.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-
-/**
- * Represents the AQL statement for creating a secondary feed.
- * A secondary feed is one that derives its data from another (primary/secondary) feed.
- */
-public class CreateSecondaryFeedStatement extends CreateFeedStatement implements Statement {
-
-    /** The source feed that provides data for this secondary feed. */
-    private final Pair<Identifier, Identifier> sourceQName;
-
-    public CreateSecondaryFeedStatement(Pair<Identifier, Identifier> qName, Pair<Identifier, Identifier> sourceQName,
-            FunctionSignature appliedFunction, boolean ifNotExists) {
-        super(qName, appliedFunction, ifNotExists);
-        this.sourceQName = sourceQName;
-    }
-
-    public String getSourceFeedDataverse() {
-        return sourceQName.first != null ? sourceQName.first.toString()
-                : getDataverseName() != null ? getDataverseName().getValue() : null;
-    }
-
-    public String getSourceFeedName() {
-        return sourceQName.second != null ? sourceQName.second.toString() : null;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.CREATE_SECONDARY_FEED;
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitCreateSecondaryFeedStatement(this, arg);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DatasetDecl.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DatasetDecl.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DatasetDecl.java
deleted file mode 100644
index 9d52e01..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DatasetDecl.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
-
-public class DatasetDecl implements Statement {
-    protected final Identifier name;
-    protected final Identifier dataverse;
-    protected final Identifier itemTypeName;
-    protected final Identifier nodegroupName;
-    protected final String compactionPolicy;
-    protected final Map<String, String> compactionPolicyProperties;
-    protected final DatasetType datasetType;
-    protected final IDatasetDetailsDecl datasetDetailsDecl;
-    protected final Map<String, String> hints;
-    protected final boolean ifNotExists;
-
-    public DatasetDecl(Identifier dataverse, Identifier name, Identifier itemTypeName, Identifier nodeGroupName,
-            String compactionPolicy, Map<String, String> compactionPolicyProperties, Map<String, String> hints,
-            DatasetType datasetType, IDatasetDetailsDecl idd, boolean ifNotExists) {
-        this.dataverse = dataverse;
-        this.name = name;
-        this.itemTypeName = itemTypeName;
-        this.nodegroupName = nodeGroupName == null ? new Identifier(MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME)
-                : nodeGroupName;
-        this.compactionPolicy = compactionPolicy;
-        this.compactionPolicyProperties = compactionPolicyProperties;
-        this.hints = hints;
-        this.ifNotExists = ifNotExists;
-        this.datasetType = datasetType;
-        this.datasetDetailsDecl = idd;
-    }
-
-    public boolean getIfNotExists() {
-        return this.ifNotExists;
-    }
-
-    public DatasetType getDatasetType() {
-        return datasetType;
-    }
-
-    public Identifier getName() {
-        return name;
-    }
-
-    public Identifier getItemTypeName() {
-        return itemTypeName;
-    }
-
-    public Identifier getNodegroupName() {
-        return nodegroupName;
-    }
-
-    public String getCompactionPolicy() {
-        return compactionPolicy;
-    }
-
-    public Map<String, String> getCompactionPolicyProperties() {
-        return compactionPolicyProperties;
-    }
-
-    public Map<String, String> getHints() {
-        return hints;
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitDatasetDecl(this, arg);
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.DATASET_DECL;
-    }
-
-    public IDatasetDetailsDecl getDatasetDetailsDecl() {
-        return datasetDetailsDecl;
-    }
-
-    public Identifier getDataverse() {
-        return dataverse;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DataverseDecl.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DataverseDecl.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DataverseDecl.java
deleted file mode 100644
index a462b1c..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DataverseDecl.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class DataverseDecl implements Statement {
-
-    private Identifier dataverseName;
-
-    public DataverseDecl(Identifier dataverseName) {
-        this.dataverseName = dataverseName;
-    }
-
-    public Identifier getDataverseName() {
-        return dataverseName;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.DATAVERSE_DECL;
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        // TODO
-        return null;
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DataverseDropStatement.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DataverseDropStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DataverseDropStatement.java
deleted file mode 100644
index 4e193d6..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DataverseDropStatement.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class DataverseDropStatement implements Statement {
-
-    private Identifier dataverseName;
-    private boolean ifExists;
-
-    public DataverseDropStatement(Identifier dataverseName, boolean ifExists) {
-        this.dataverseName = dataverseName;
-        this.ifExists = ifExists;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.DATAVERSE_DROP;
-    }
-
-    public Identifier getDataverseName() {
-        return dataverseName;
-    }
-
-    public boolean getIfExists() {
-        return ifExists;
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitDataverseDropStatement(this, arg);
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DeleteStatement.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DeleteStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DeleteStatement.java
deleted file mode 100644
index bf410b5..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DeleteStatement.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import java.util.List;
-
-import edu.uci.ics.asterix.aql.base.Expression;
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class DeleteStatement implements Statement {
-
-    private VariableExpr vars;
-    private Identifier dataverseName;
-    private Identifier datasetName;
-    private Expression condition;
-    private int varCounter;
-    private List<String> dataverses;
-    private List<String> datasets;
-
-    public DeleteStatement(VariableExpr vars, Identifier dataverseName, Identifier datasetName, Expression condition,
-            int varCounter, List<String> dataverses, List<String> datasets) {
-        this.vars = vars;
-        this.dataverseName = dataverseName;
-        this.datasetName = datasetName;
-        this.condition = condition;
-        this.varCounter = varCounter;
-        this.dataverses = dataverses;
-        this.datasets = datasets;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.DELETE;
-    }
-
-    public VariableExpr getVariableExpr() {
-        return vars;
-    }
-
-    public Identifier getDataverseName() {
-        return dataverseName;
-    }
-
-    public Identifier getDatasetName() {
-        return datasetName;
-    }
-
-    public Expression getCondition() {
-        return condition;
-    }
-
-    public int getVarCounter() {
-        return varCounter;
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitDeleteStatement(this, arg);
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-    public List<String> getDataverses() {
-        return dataverses;
-    }
-
-    public List<String> getDatasets() {
-        return datasets;
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DisconnectFeedStatement.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DisconnectFeedStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DisconnectFeedStatement.java
deleted file mode 100644
index b140cd6..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DisconnectFeedStatement.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-
-public class DisconnectFeedStatement implements Statement {
-
-    private final Identifier dataverseName;
-    private final Identifier feedName;
-    private final Identifier datasetName;
-
-    public DisconnectFeedStatement(Identifier dataverseName, Identifier feedName, Identifier datasetName) {
-        this.feedName = feedName;
-        this.datasetName = datasetName;
-        this.dataverseName = dataverseName;
-    }
-
-    public DisconnectFeedStatement(Pair<Identifier, Identifier> feedNameComponent,
-            Pair<Identifier, Identifier> datasetNameComponent) {
-        if (feedNameComponent.first != null && datasetNameComponent.first != null
-                && !feedNameComponent.first.getValue().equals(datasetNameComponent.first.getValue())) {
-            throw new IllegalArgumentException("Dataverse for source feed and target dataset do not match");
-        }
-        this.dataverseName = feedNameComponent.first != null ? feedNameComponent.first
-                : datasetNameComponent.first != null ? datasetNameComponent.first : null;
-        this.datasetName = datasetNameComponent.second;
-        this.feedName = feedNameComponent.second;
-    }
-
-    public Identifier getDataverseName() {
-        return dataverseName;
-    }
-
-    public Identifier getFeedName() {
-        return feedName;
-    }
-
-    public Identifier getDatasetName() {
-        return datasetName;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.DISCONNECT_FEED;
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitDisconnectFeedStatement(this, arg);
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-    @Override
-    public String toString() {
-        return "disconnect feed " + feedName + " from " + datasetName;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DistinctClause.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DistinctClause.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DistinctClause.java
deleted file mode 100644
index 0b96fbc..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DistinctClause.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import java.util.List;
-
-import edu.uci.ics.asterix.aql.base.Clause;
-import edu.uci.ics.asterix.aql.base.Expression;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class DistinctClause implements Clause {
-
-    private List<Expression> distinctByExprs;
-
-    public DistinctClause(List<Expression> distinctByExpr) {
-        this.distinctByExprs = distinctByExpr;
-    }
-
-    public List<Expression> getDistinctByExpr() {
-        return distinctByExprs;
-    }
-
-    @Override
-    public ClauseType getClauseType() {
-        return ClauseType.DISTINCT_BY_CLAUSE;
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitDistinctClause(this, arg);
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DropStatement.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DropStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DropStatement.java
deleted file mode 100644
index 2d661d4..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DropStatement.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class DropStatement implements Statement {
-
-    private final Identifier dataverseName;
-    private final Identifier datasetName;
-    private boolean ifExists;
-
-    public DropStatement(Identifier dataverseName, Identifier datasetName, boolean ifExists) {
-        this.dataverseName = dataverseName;
-        this.datasetName = datasetName;
-        this.ifExists = ifExists;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.DATASET_DROP;
-    }
-
-    public Identifier getDataverseName() {
-        return dataverseName;
-    }
-
-    public Identifier getDatasetName() {
-        return datasetName;
-    }
-
-    public boolean getIfExists() {
-        return ifExists;
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitDropStatement(this, arg);
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ExternalDetailsDecl.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ExternalDetailsDecl.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ExternalDetailsDecl.java
deleted file mode 100644
index 69d5201..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ExternalDetailsDecl.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import java.util.Map;
-
-public class ExternalDetailsDecl implements IDatasetDetailsDecl {
-    private Map<String, String> properties;
-    private String adapter;
-
-    public void setAdapter(String adapter) {
-        this.adapter = adapter;
-    }
-
-    public void setProperties(Map<String, String> properties) {
-        this.properties = properties;
-    }
-
-    public String getAdapter() {
-        return adapter;
-    }
-
-    public Map<String, String> getProperties() {
-        return properties;
-    }
-
-    @Override
-    public boolean isTemp() {
-        return false;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FLWOGRExpression.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FLWOGRExpression.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FLWOGRExpression.java
deleted file mode 100644
index 3a29f25..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FLWOGRExpression.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import java.util.List;
-
-import edu.uci.ics.asterix.aql.base.Clause;
-import edu.uci.ics.asterix.aql.base.Clause.ClauseType;
-import edu.uci.ics.asterix.aql.base.Expression;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class FLWOGRExpression implements Expression {
-    private List<Clause> clauseList;
-    private Expression returnExpr;
-
-    public FLWOGRExpression() {
-        super();
-    }
-
-    public FLWOGRExpression(List<Clause> clauseList, Expression returnExpr) {
-        super();
-        this.clauseList = clauseList;
-        this.returnExpr = returnExpr;
-    }
-
-    public List<Clause> getClauseList() {
-        return clauseList;
-    }
-
-    public void setClauseList(List<Clause> clauseList) {
-        this.clauseList = clauseList;
-    }
-
-    public Expression getReturnExpr() {
-        return returnExpr;
-    }
-
-    public void setReturnExpr(Expression returnExpr) {
-        this.returnExpr = returnExpr;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.FLWOGR_EXPRESSION;
-    }
-
-    public boolean noForClause() {
-        for (Clause c : clauseList) {
-            if (c.getClauseType() == ClauseType.FOR_CLAUSE) {
-                return false;
-            }
-        }
-        return true;
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitFlworExpression(this, arg);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedDetailsDecl.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedDetailsDecl.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedDetailsDecl.java
deleted file mode 100644
index 0c773ba..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedDetailsDecl.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import java.util.List;
-import java.util.Map;
-
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
-
-public class FeedDetailsDecl extends InternalDetailsDecl {
-    private final Map<String, String> configuration;
-    private final String adapterFactoryClassname;
-    private final FunctionSignature functionSignature;
-
-    public FeedDetailsDecl(String adapterFactoryClassname, Map<String, String> configuration,
-            FunctionSignature signature, List<List<String>> partitioningExpr, List<String> filterField) {
-        super(partitioningExpr, false, filterField, false);
-        this.adapterFactoryClassname = adapterFactoryClassname;
-        this.configuration = configuration;
-        this.functionSignature = signature;
-    }
-
-    public Map<String, String> getConfiguration() {
-        return configuration;
-    }
-
-    public String getAdapterFactoryClassname() {
-        return adapterFactoryClassname;
-    }
-
-    public FunctionSignature getSignature() {
-        return functionSignature;
-    }
-
-    public FunctionSignature getFunctionSignature() {
-        return functionSignature;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedDropStatement.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedDropStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedDropStatement.java
deleted file mode 100644
index 956a5b8..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedDropStatement.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class FeedDropStatement implements Statement {
-
-    private final Identifier dataverseName;
-    private final Identifier feedName;
-    private boolean ifExists;
-
-    public FeedDropStatement(Identifier dataverseName, Identifier feedName, boolean ifExists) {
-        this.dataverseName = dataverseName;
-        this.feedName = feedName;
-        this.ifExists = ifExists;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.DROP_FEED;
-    }
-
-    public Identifier getDataverseName() {
-        return dataverseName;
-    }
-
-    public Identifier getFeedName() {
-        return feedName;
-    }
-
-    public boolean getIfExists() {
-        return ifExists;
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitDropFeedStatement(this, arg);
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedPolicyDropStatement.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedPolicyDropStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedPolicyDropStatement.java
deleted file mode 100644
index bcf216f..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedPolicyDropStatement.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class FeedPolicyDropStatement implements Statement {
-
-    private final Identifier dataverseName;
-    private final Identifier policyName;
-    private boolean ifExists;
-
-    public FeedPolicyDropStatement(Identifier dataverseName, Identifier policyName, boolean ifExists) {
-        this.dataverseName = dataverseName;
-        this.policyName = policyName;
-        this.ifExists = ifExists;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.DROP_FEED_POLICY;
-    }
-
-    public Identifier getDataverseName() {
-        return dataverseName;
-    }
-
-    public Identifier getPolicyName() {
-        return policyName;
-    }
-
-    public boolean getIfExists() {
-        return ifExists;
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitDropFeedPolicyStatement(this, arg);
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FieldAccessor.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FieldAccessor.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FieldAccessor.java
deleted file mode 100644
index 041dbb7..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FieldAccessor.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import edu.uci.ics.asterix.aql.base.Expression;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class FieldAccessor extends AbstractAccessor {
-    private Identifier ident;
-
-    public FieldAccessor(Expression expr, Identifier ident) {
-        super(expr);
-        this.ident = ident;
-    }
-
-    public Identifier getIdent() {
-        return ident;
-    }
-
-    public void setIdent(Identifier ident) {
-        this.ident = ident;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.FIELD_ACCESSOR_EXPRESSION;
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitFieldAccessor(this, arg);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FieldBinding.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FieldBinding.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FieldBinding.java
deleted file mode 100644
index 99844ca..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FieldBinding.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import edu.uci.ics.asterix.aql.base.Expression;
-
-public class FieldBinding {
-    private Expression leftExpr;
-    private Expression rightExpr;
-
-    public FieldBinding() {
-    }
-
-    public FieldBinding(Expression leftExpr, Expression rightExpr) {
-        super();
-        this.leftExpr = leftExpr;
-        this.rightExpr = rightExpr;
-    }
-
-    public Expression getLeftExpr() {
-        return leftExpr;
-    }
-
-    public void setLeftExpr(Expression leftExpr) {
-        this.leftExpr = leftExpr;
-    }
-
-    public Expression getRightExpr() {
-        return rightExpr;
-    }
-
-    public void setRightExpr(Expression rightExpr) {
-        this.rightExpr = rightExpr;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ForClause.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ForClause.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ForClause.java
deleted file mode 100644
index 2bcb504..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ForClause.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import edu.uci.ics.asterix.aql.base.Clause;
-import edu.uci.ics.asterix.aql.base.Expression;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class ForClause implements Clause {
-    private VariableExpr varExpr;
-    private VariableExpr posExpr;
-    private Expression inExpr;
-
-    public ForClause() {
-        super();
-    }
-
-    public ForClause(VariableExpr varExpr, Expression inExpr) {
-        super();
-        this.varExpr = varExpr;
-        this.inExpr = inExpr;
-    }
-
-    public VariableExpr getVarExpr() {
-        return varExpr;
-    }
-
-    public void setVarExpr(VariableExpr varExpr) {
-        this.varExpr = varExpr;
-    }
-
-    public Expression getInExpr() {
-        return inExpr;
-    }
-
-    public void setInExpr(Expression inExpr) {
-        this.inExpr = inExpr;
-    }
-
-    @Override
-    public ClauseType getClauseType() {
-        return ClauseType.FOR_CLAUSE;
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitForClause(this, arg);
-    }
-
-    public void setPosExpr(VariableExpr posExpr) {
-        this.posExpr = posExpr;
-    }
-
-    public VariableExpr getPosVarExpr() {
-        return posExpr;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FunctionDecl.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FunctionDecl.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FunctionDecl.java
deleted file mode 100644
index c8bebe1..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FunctionDecl.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import java.util.List;
-
-import edu.uci.ics.asterix.aql.base.Expression;
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
-
-public class FunctionDecl implements Statement {
-    private FunctionSignature signature;
-    private List<VarIdentifier> paramList;
-    private Expression funcBody;
-
-    public FunctionDecl(FunctionSignature signature, List<VarIdentifier> paramList, Expression funcBody) {
-        this.signature = signature;
-        this.paramList = paramList;
-        this.funcBody = funcBody;
-    }
-
-    public FunctionSignature getSignature() {
-        return signature;
-    }
-
-    public List<VarIdentifier> getParamList() {
-        return paramList;
-    }
-
-    public Expression getFuncBody() {
-        return funcBody;
-    }
-
-    public void setFuncBody(Expression funcBody) {
-        this.funcBody = funcBody;
-    }
-
-    public void setSignature(FunctionSignature signature) {
-        this.signature = signature;
-    }
-
-    public void setParamList(List<VarIdentifier> paramList) {
-        this.paramList = paramList;
-    }
-
-    @Override
-    public int hashCode() {
-        return signature.hashCode();
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        return (o instanceof FunctionDecl && ((FunctionDecl) o).getSignature().equals(signature));
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.FUNCTION_DECL;
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitFunctionDecl(this, arg);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FunctionDropStatement.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FunctionDropStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FunctionDropStatement.java
deleted file mode 100644
index 3dcf746..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FunctionDropStatement.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import edu.uci.ics.asterix.aql.base.Statement;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
-
-public class FunctionDropStatement implements Statement {
-
-    private final FunctionSignature signature;
-    private boolean ifExists;
-
-    public FunctionDropStatement(FunctionSignature signature, boolean ifExists) {
-        this.signature = signature;
-        this.ifExists = ifExists;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.FUNCTION_DROP;
-    }
-
-    public FunctionSignature getFunctionSignature() {
-        return signature;
-    }
-
-    public boolean getIfExists() {
-        return ifExists;
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitFunctionDropStatement(this, arg);
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/GbyVariableExpressionPair.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/GbyVariableExpressionPair.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/GbyVariableExpressionPair.java
deleted file mode 100644
index 3dff292..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/GbyVariableExpressionPair.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import edu.uci.ics.asterix.aql.base.Expression;
-
-public class GbyVariableExpressionPair {
-    private VariableExpr var; // can be null
-    private Expression expr;
-
-    public GbyVariableExpressionPair() {
-        super();
-    }
-
-    public GbyVariableExpressionPair(VariableExpr var, Expression expr) {
-        super();
-        this.var = var;
-        this.expr = expr;
-    }
-
-    public VariableExpr getVar() {
-        return var;
-    }
-
-    public void setVar(VariableExpr var) {
-        this.var = var;
-    }
-
-    public Expression getExpr() {
-        return expr;
-    }
-
-    public void setExpr(Expression expr) {
-        this.expr = expr;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/GroupbyClause.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/GroupbyClause.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/GroupbyClause.java
deleted file mode 100644
index 331c391..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/GroupbyClause.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import java.util.List;
-
-import edu.uci.ics.asterix.aql.base.Clause;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class GroupbyClause implements Clause {
-
-    private List<GbyVariableExpressionPair> gbyPairList;
-    private List<GbyVariableExpressionPair> decorPairList;
-    private List<VariableExpr> withVarList;
-    private boolean hashGroupByHint;
-
-    public GroupbyClause() {
-    }
-
-    public GroupbyClause(List<GbyVariableExpressionPair> gbyPairList, List<GbyVariableExpressionPair> decorPairList,
-            List<VariableExpr> withVarList, boolean hashGroupByHint) {
-        this.gbyPairList = gbyPairList;
-        this.setDecorPairList(decorPairList);
-        this.withVarList = withVarList;
-        this.hashGroupByHint = hashGroupByHint;
-    }
-
-    public List<GbyVariableExpressionPair> getGbyPairList() {
-        return gbyPairList;
-    }
-
-    public void setGbyPairList(List<GbyVariableExpressionPair> vePairList) {
-        this.gbyPairList = vePairList;
-    }
-
-    public List<VariableExpr> getWithVarList() {
-        return withVarList;
-    }
-
-    public void setWithVarList(List<VariableExpr> withVarList) {
-        this.withVarList = withVarList;
-    }
-
-    @Override
-    public ClauseType getClauseType() {
-        return ClauseType.GROUP_BY_CLAUSE;
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitGroupbyClause(this, arg);
-    }
-
-    public void setDecorPairList(List<GbyVariableExpressionPair> decorPairList) {
-        this.decorPairList = decorPairList;
-    }
-
-    public List<GbyVariableExpressionPair> getDecorPairList() {
-        return decorPairList;
-    }
-
-    public void setHashGroupByHint(boolean hashGroupByHint) {
-        this.hashGroupByHint = hashGroupByHint;
-    }
-
-    public boolean hasHashGroupByHint() {
-        return hashGroupByHint;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/HdfsPathInfo.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/HdfsPathInfo.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/HdfsPathInfo.java
deleted file mode 100644
index 0fab39f..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/HdfsPathInfo.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-public class HdfsPathInfo {
-    public String hdfsPath;
-
-    public HdfsPathInfo(String hdfsPath) {
-        this.hdfsPath = hdfsPath;
-    }
-
-    public String getHdfsPathInfo() {
-        return hdfsPath;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IDatasetDetailsDecl.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IDatasetDetailsDecl.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IDatasetDetailsDecl.java
deleted file mode 100644
index b86a5f6..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IDatasetDetailsDecl.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-
-public interface IDatasetDetailsDecl {
-
-    public boolean isTemp();
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/Identifier.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/Identifier.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/Identifier.java
deleted file mode 100644
index 0d8dfcb..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/Identifier.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-public class Identifier {
-    protected String value;
-
-    public Identifier() {
-    }
-
-    public Identifier(String value) {
-        this.value = value;
-    }
-
-    public final String getValue() {
-        return value;
-    }
-
-    public final void setValue(String value) {
-        this.value = value;
-    }
-
-    public String toString() {
-        return value;
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (!(o instanceof Identifier)) {
-            return false;
-        } else {
-            Identifier i = (Identifier) o;
-            return this.value.equals(i.value);
-        }
-    }
-
-    @Override
-    public int hashCode() {
-        return value.hashCode();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IfExpr.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IfExpr.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IfExpr.java
deleted file mode 100644
index 04814a1..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IfExpr.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import edu.uci.ics.asterix.aql.base.Expression;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class IfExpr implements Expression {
-    private Expression condExpr;
-    private Expression thenExpr;
-    private Expression elseExpr;
-
-    public IfExpr() {
-    }
-
-    public IfExpr(Expression condExpr, Expression thenExpr, Expression elseExpr) {
-        this.condExpr = condExpr;
-        this.thenExpr = thenExpr;
-        this.elseExpr = elseExpr;
-    }
-
-    public Expression getCondExpr() {
-        return condExpr;
-    }
-
-    public void setCondExpr(Expression condExpr) {
-        this.condExpr = condExpr;
-    }
-
-    public Expression getThenExpr() {
-        return thenExpr;
-    }
-
-    public void setThenExpr(Expression thenExpr) {
-        this.thenExpr = thenExpr;
-    }
-
-    public Expression getElseExpr() {
-        return elseExpr;
-    }
-
-    public void setElseExpr(Expression elseExpr) {
-        this.elseExpr = elseExpr;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.IF_EXPRESSION;
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitIfExpr(this, arg);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IndexAccessor.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IndexAccessor.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IndexAccessor.java
deleted file mode 100644
index 0b5e2fd..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IndexAccessor.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-import edu.uci.ics.asterix.aql.base.Expression;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class IndexAccessor extends AbstractAccessor {
-    private boolean any;
-    private Expression indexExpr;
-
-    public final static int ANY = -1;
-
-    public IndexAccessor(Expression expr, Expression indexExpr) {
-        super(expr);
-        if (indexExpr == null)
-            this.any = true;
-        this.indexExpr = indexExpr;
-    }
-
-    public boolean isAny() {
-        return any;
-    }
-
-    public void setAny(boolean any) {
-        this.any = any;
-    }
-
-    public Expression getIndexExpr() {
-        return indexExpr;
-    }
-
-    public void setIndexExpr(Expression indexExpr) {
-        this.indexExpr = indexExpr;
-    }
-
-    @Override
-    public Kind getKind() {
-        return Kind.INDEX_ACCESSOR_EXPRESSION;
-    }
-
-    @Override
-    public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
-        visitor.visit(this, arg);
-    }
-
-    @Override
-    public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
-        return visitor.visitIndexAccessor(this, arg);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IndexDecl.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IndexDecl.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IndexDecl.java
deleted file mode 100644
index bbeb9c4..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IndexDecl.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.aql.expression;
-
-public class IndexDecl extends CreateIndexStatement {
-    @Override
-    public Kind getKind() {
-        return Kind.INDEX_DECL;
-    }
-}


[05/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/file/SecondaryRTreeOperationsHelper.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/SecondaryRTreeOperationsHelper.java b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryRTreeOperationsHelper.java
new file mode 100644
index 0000000..d91e820
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryRTreeOperationsHelper.java
@@ -0,0 +1,408 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.file;
+
+import java.util.List;
+
+import edu.uci.ics.asterix.common.api.ILocalResourceMetadata;
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
+import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
+import edu.uci.ics.asterix.common.context.AsterixVirtualBufferCacheProvider;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.ioopcallbacks.LSMRTreeIOOperationCallbackFactory;
+import edu.uci.ics.asterix.dataflow.data.nontagged.valueproviders.AqlPrimitiveValueProviderFactory;
+import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.formats.nontagged.AqlTypeTraitProvider;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.metadata.external.IndexingConstants;
+import edu.uci.ics.asterix.metadata.feeds.ExternalDataScanOperatorDescriptor;
+import edu.uci.ics.asterix.metadata.utils.ExternalDatasetsRegistry;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
+import edu.uci.ics.asterix.transaction.management.opcallbacks.SecondaryIndexOperationTrackerProvider;
+import edu.uci.ics.asterix.transaction.management.resource.ExternalRTreeLocalResourceMetadata;
+import edu.uci.ics.asterix.transaction.management.resource.LSMRTreeLocalResourceMetadata;
+import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceFactoryProvider;
+import edu.uci.ics.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.ConnectorPolicyAssignmentPolicy;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
+import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.operators.base.SinkRuntimeFactory;
+import edu.uci.ics.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexCompactOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.lsm.rtree.dataflow.ExternalRTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.rtree.dataflow.LSMRTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
+import edu.uci.ics.hyracks.storage.common.file.ILocalResourceFactoryProvider;
+import edu.uci.ics.hyracks.storage.common.file.LocalResource;
+
+@SuppressWarnings("rawtypes")
+public class SecondaryRTreeOperationsHelper extends SecondaryIndexOperationsHelper {
+
+    protected IPrimitiveValueProviderFactory[] valueProviderFactories;
+    protected int numNestedSecondaryKeyFields;
+    protected ATypeTag keyType;
+    protected int[] primaryKeyFields;
+    protected int[] rtreeFields;
+
+    protected SecondaryRTreeOperationsHelper(PhysicalOptimizationConfig physOptConf,
+            IAsterixPropertiesProvider propertiesProvider) {
+        super(physOptConf, propertiesProvider);
+    }
+
+    @Override
+    public JobSpecification buildCreationJobSpec() throws AsterixException, AlgebricksException {
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+
+        AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
+        boolean temp = dataset.getDatasetDetails().isTemp();
+        IIndexDataflowHelperFactory indexDataflowHelperFactory;
+        ILocalResourceFactoryProvider localResourceFactoryProvider;
+        if (dataset.getDatasetType() == DatasetType.INTERNAL) {
+            //prepare a LocalResourceMetadata which will be stored in NC's local resource repository
+            ILocalResourceMetadata localResourceMetadata = new LSMRTreeLocalResourceMetadata(secondaryTypeTraits,
+                    secondaryComparatorFactories, primaryComparatorFactories, valueProviderFactories,
+                    RTreePolicyType.RTREE, AqlMetadataProvider.proposeLinearizer(keyType,
+                            secondaryComparatorFactories.length), dataset.getDatasetId(), mergePolicyFactory,
+                    mergePolicyFactoryProperties, filterTypeTraits, filterCmpFactories, rtreeFields, primaryKeyFields,
+                    secondaryFilterFields);
+            localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(localResourceMetadata,
+                    LocalResource.LSMRTreeResource);
+            indexDataflowHelperFactory = new LSMRTreeDataflowHelperFactory(valueProviderFactories,
+                    RTreePolicyType.RTREE, primaryComparatorFactories, new AsterixVirtualBufferCacheProvider(
+                            dataset.getDatasetId()), mergePolicyFactory, mergePolicyFactoryProperties,
+                    new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMRTreeIOOperationCallbackFactory.INSTANCE,
+                    AqlMetadataProvider.proposeLinearizer(keyType, secondaryComparatorFactories.length),
+                    storageProperties.getBloomFilterFalsePositiveRate(), rtreeFields, primaryKeyFields,
+                    filterTypeTraits, filterCmpFactories, secondaryFilterFields, !temp);
+        } else {
+            // External dataset
+            // Prepare a LocalResourceMetadata which will be stored in NC's local resource repository
+            ILocalResourceMetadata localResourceMetadata = new ExternalRTreeLocalResourceMetadata(secondaryTypeTraits,
+                    secondaryComparatorFactories, ExternalIndexingOperations.getBuddyBtreeComparatorFactories(),
+                    valueProviderFactories, RTreePolicyType.RTREE, AqlMetadataProvider.proposeLinearizer(keyType,
+                            secondaryComparatorFactories.length), dataset.getDatasetId(), mergePolicyFactory,
+                    mergePolicyFactoryProperties, primaryKeyFields);
+            localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(localResourceMetadata,
+                    LocalResource.ExternalRTreeResource);
+
+            indexDataflowHelperFactory = new ExternalRTreeDataflowHelperFactory(valueProviderFactories,
+                    RTreePolicyType.RTREE, ExternalIndexingOperations.getBuddyBtreeComparatorFactories(),
+                    mergePolicyFactory, mergePolicyFactoryProperties, new SecondaryIndexOperationTrackerProvider(
+                            dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                    LSMRTreeIOOperationCallbackFactory.INSTANCE, AqlMetadataProvider.proposeLinearizer(keyType,
+                            secondaryComparatorFactories.length), storageProperties.getBloomFilterFalsePositiveRate(),
+                    new int[] { numNestedSecondaryKeyFields },
+                    ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true);
+        }
+
+        TreeIndexCreateOperatorDescriptor secondaryIndexCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                secondaryFileSplitProvider, secondaryTypeTraits, secondaryComparatorFactories, null,
+                indexDataflowHelperFactory, localResourceFactoryProvider, NoOpOperationCallbackFactory.INSTANCE);
+
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, secondaryIndexCreateOp,
+                secondaryPartitionConstraint);
+        spec.addRoot(secondaryIndexCreateOp);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        return spec;
+    }
+
+    @Override
+    protected int getNumSecondaryKeys() {
+        return numNestedSecondaryKeyFields;
+    }
+
+    @Override
+    protected void setSecondaryRecDescAndComparators(IndexType indexType, List<List<String>> secondaryKeyFields,
+            List<IAType> secondaryKeyTypes, int gramLength, AqlMetadataProvider metadata) throws AlgebricksException,
+            AsterixException {
+        int numSecondaryKeys = secondaryKeyFields.size();
+        if (numSecondaryKeys != 1) {
+            throw new AsterixException(
+                    "Cannot use "
+                            + numSecondaryKeys
+                            + " fields as a key for the R-tree index. There can be only one field as a key for the R-tree index.");
+        }
+        Pair<IAType, Boolean> spatialTypePair = Index.getNonNullableOpenFieldType(secondaryKeyTypes.get(0),
+                secondaryKeyFields.get(0), itemType);
+        IAType spatialType = spatialTypePair.first;
+        anySecondaryKeyIsNullable = spatialTypePair.second;
+        if (spatialType == null) {
+            throw new AsterixException("Could not find field " + secondaryKeyFields.get(0) + " in the schema.");
+        }
+        int numDimensions = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
+        numNestedSecondaryKeyFields = numDimensions * 2;
+        int recordColumn = dataset.getDatasetType() == DatasetType.INTERNAL ? numPrimaryKeys : 0;
+        secondaryFieldAccessEvalFactories = metadata.getFormat().createMBRFactory(
+                isEnforcingKeyTypes ? enforcedItemType : itemType, secondaryKeyFields.get(0), recordColumn,
+                numDimensions, filterFieldName);
+        secondaryComparatorFactories = new IBinaryComparatorFactory[numNestedSecondaryKeyFields];
+        valueProviderFactories = new IPrimitiveValueProviderFactory[numNestedSecondaryKeyFields];
+        ISerializerDeserializer[] secondaryRecFields = new ISerializerDeserializer[numPrimaryKeys
+                + numNestedSecondaryKeyFields + numFilterFields];
+        ISerializerDeserializer[] enforcedRecFields = new ISerializerDeserializer[1 + numPrimaryKeys + numFilterFields];
+        secondaryTypeTraits = new ITypeTraits[numNestedSecondaryKeyFields + numPrimaryKeys];
+        ITypeTraits[] enforcedTypeTraits = new ITypeTraits[1 + numPrimaryKeys];
+        IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(spatialType.getTypeTag());
+        keyType = nestedKeyType.getTypeTag();
+        for (int i = 0; i < numNestedSecondaryKeyFields; i++) {
+            ISerializerDeserializer keySerde = AqlSerializerDeserializerProvider.INSTANCE
+                    .getSerializerDeserializer(nestedKeyType);
+            secondaryRecFields[i] = keySerde;
+            secondaryComparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(
+                    nestedKeyType, true);
+            secondaryTypeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(nestedKeyType);
+            valueProviderFactories[i] = AqlPrimitiveValueProviderFactory.INSTANCE;
+
+        }
+        // Add serializers and comparators for primary index fields.
+        if (dataset.getDatasetType() == DatasetType.INTERNAL) {
+            for (int i = 0; i < numPrimaryKeys; i++) {
+                secondaryRecFields[numNestedSecondaryKeyFields + i] = primaryRecDesc.getFields()[i];
+                secondaryTypeTraits[numNestedSecondaryKeyFields + i] = primaryRecDesc.getTypeTraits()[i];
+                enforcedRecFields[i] = primaryRecDesc.getFields()[i];
+                enforcedTypeTraits[i] = primaryRecDesc.getTypeTraits()[i];
+            }
+        } else {
+            for (int i = 0; i < numPrimaryKeys; i++) {
+                secondaryRecFields[numNestedSecondaryKeyFields + i] = IndexingConstants.getSerializerDeserializer(i);
+                secondaryTypeTraits[numNestedSecondaryKeyFields + i] = IndexingConstants.getTypeTraits(i);
+                enforcedRecFields[i] = IndexingConstants.getSerializerDeserializer(i);
+                enforcedTypeTraits[i] = IndexingConstants.getTypeTraits(i);
+            }
+        }
+        enforcedRecFields[numPrimaryKeys] = AqlSerializerDeserializerProvider.INSTANCE
+                .getSerializerDeserializer(itemType);
+        enforcedRecDesc = new RecordDescriptor(enforcedRecFields, enforcedTypeTraits);
+        if (numFilterFields > 0) {
+            rtreeFields = new int[numNestedSecondaryKeyFields + numPrimaryKeys];
+            for (int i = 0; i < rtreeFields.length; i++) {
+                rtreeFields[i] = i;
+            }
+
+            Pair<IAType, Boolean> typePair = Index.getNonNullableKeyFieldType(filterFieldName, itemType);
+            IAType type = typePair.first;
+            ISerializerDeserializer serde = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(type);
+            secondaryRecFields[numPrimaryKeys + numNestedSecondaryKeyFields] = serde;
+        }
+        secondaryRecDesc = new RecordDescriptor(secondaryRecFields);
+        primaryKeyFields = new int[numPrimaryKeys];
+        for (int i = 0; i < primaryKeyFields.length; i++) {
+            primaryKeyFields[i] = i + numNestedSecondaryKeyFields;
+        }
+    }
+
+    @Override
+    public JobSpecification buildLoadingJobSpec() throws AsterixException, AlgebricksException {
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        boolean temp = dataset.getDatasetDetails().isTemp();
+        if (dataset.getDatasetType() == DatasetType.INTERNAL) {
+            // Create dummy key provider for feeding the primary index scan. 
+            AbstractOperatorDescriptor keyProviderOp = createDummyKeyProviderOp(spec);
+
+            // Create primary index scan op.
+            BTreeSearchOperatorDescriptor primaryScanOp = createPrimaryIndexScanOp(spec);
+
+            // Assign op.
+            AbstractOperatorDescriptor sourceOp = primaryScanOp;
+            if (isEnforcingKeyTypes) {
+                sourceOp = createCastOp(spec, primaryScanOp, numSecondaryKeys, dataset.getDatasetType());
+                spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, sourceOp, 0);
+            }
+            AlgebricksMetaOperatorDescriptor asterixAssignOp = createAssignOp(spec, sourceOp,
+                    numNestedSecondaryKeyFields);
+
+            // If any of the secondary fields are nullable, then add a select op that filters nulls.
+            AlgebricksMetaOperatorDescriptor selectOp = null;
+            if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
+                selectOp = createFilterNullsSelectOp(spec, numNestedSecondaryKeyFields);
+            }
+
+            // Sort by secondary keys.
+            ExternalSortOperatorDescriptor sortOp = createSortOp(spec,
+                    new IBinaryComparatorFactory[] { AqlMetadataProvider.proposeLinearizer(keyType,
+                            secondaryComparatorFactories.length) }, secondaryRecDesc);
+
+            AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
+            // Create secondary RTree bulk load op.
+            TreeIndexBulkLoadOperatorDescriptor secondaryBulkLoadOp = createTreeIndexBulkLoadOp(
+                    spec,
+                    numNestedSecondaryKeyFields,
+                    new LSMRTreeDataflowHelperFactory(valueProviderFactories, RTreePolicyType.RTREE,
+                            primaryComparatorFactories, new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
+                            mergePolicyFactory, mergePolicyFactoryProperties,
+                            new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                            LSMRTreeIOOperationCallbackFactory.INSTANCE, AqlMetadataProvider.proposeLinearizer(keyType,
+                                    secondaryComparatorFactories.length), storageProperties
+                                    .getBloomFilterFalsePositiveRate(), rtreeFields, primaryKeyFields,
+                            filterTypeTraits, filterCmpFactories, secondaryFilterFields, !temp),
+                    GlobalConfig.DEFAULT_TREE_FILL_FACTOR);
+            AlgebricksMetaOperatorDescriptor metaOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
+                    new IPushRuntimeFactory[] { new SinkRuntimeFactory() }, new RecordDescriptor[] {});
+            // Connect the operators.
+            spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryScanOp, 0);
+            spec.connect(new OneToOneConnectorDescriptor(spec), sourceOp, 0, asterixAssignOp, 0);
+            if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
+                spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, selectOp, 0);
+                spec.connect(new OneToOneConnectorDescriptor(spec), selectOp, 0, sortOp, 0);
+            } else {
+                spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, sortOp, 0);
+            }
+            spec.connect(new OneToOneConnectorDescriptor(spec), sortOp, 0, secondaryBulkLoadOp, 0);
+            spec.connect(new OneToOneConnectorDescriptor(spec), secondaryBulkLoadOp, 0, metaOp, 0);
+            spec.addRoot(metaOp);
+            spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        } else {
+            // External dataset
+            /*
+             * In case of external data, this method is used to build loading jobs for both initial load on index creation
+             * and transaction load on dataset referesh
+             */
+            // Create external indexing scan operator
+            ExternalDataScanOperatorDescriptor primaryScanOp = createExternalIndexingOp(spec);
+            AbstractOperatorDescriptor sourceOp = primaryScanOp;
+            if (isEnforcingKeyTypes) {
+                sourceOp = createCastOp(spec, primaryScanOp, numSecondaryKeys, dataset.getDatasetType());
+                spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, sourceOp, 0);
+            }
+            // Assign op.
+            AlgebricksMetaOperatorDescriptor asterixAssignOp = createExternalAssignOp(spec, numNestedSecondaryKeyFields);
+
+            // If any of the secondary fields are nullable, then add a select op that filters nulls.
+            AlgebricksMetaOperatorDescriptor selectOp = null;
+            if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
+                selectOp = createFilterNullsSelectOp(spec, numSecondaryKeys);
+            }
+
+            // Sort by secondary keys.
+            ExternalSortOperatorDescriptor sortOp = createSortOp(spec,
+                    new IBinaryComparatorFactory[] { AqlMetadataProvider.proposeLinearizer(keyType,
+                            secondaryComparatorFactories.length) }, secondaryRecDesc);
+            AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
+
+            // Create the dataflow helper factory
+            ExternalRTreeDataflowHelperFactory dataflowHelperFactory = new ExternalRTreeDataflowHelperFactory(
+                    valueProviderFactories, RTreePolicyType.RTREE, primaryComparatorFactories, mergePolicyFactory,
+                    mergePolicyFactoryProperties, new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMRTreeIOOperationCallbackFactory.INSTANCE,
+                    AqlMetadataProvider.proposeLinearizer(keyType, secondaryComparatorFactories.length),
+                    storageProperties.getBloomFilterFalsePositiveRate(), new int[] { numNestedSecondaryKeyFields },
+                    ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true);
+            // Create secondary RTree bulk load op.
+            IOperatorDescriptor root;
+            AbstractTreeIndexOperatorDescriptor secondaryBulkLoadOp;
+            if (externalFiles != null) {
+                // Transaction load
+                secondaryBulkLoadOp = createExternalIndexBulkModifyOp(spec, numNestedSecondaryKeyFields,
+                        dataflowHelperFactory, GlobalConfig.DEFAULT_TREE_FILL_FACTOR);
+                root = secondaryBulkLoadOp;
+            } else {
+                // Initial load
+                secondaryBulkLoadOp = createTreeIndexBulkLoadOp(spec, numNestedSecondaryKeyFields,
+                        dataflowHelperFactory, GlobalConfig.DEFAULT_TREE_FILL_FACTOR);
+                AlgebricksMetaOperatorDescriptor metaOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
+                        new IPushRuntimeFactory[] { new SinkRuntimeFactory() },
+                        new RecordDescriptor[] { secondaryRecDesc });
+                spec.connect(new OneToOneConnectorDescriptor(spec), secondaryBulkLoadOp, 0, metaOp, 0);
+                root = metaOp;
+            }
+
+            spec.connect(new OneToOneConnectorDescriptor(spec), sourceOp, 0, asterixAssignOp, 0);
+            if (anySecondaryKeyIsNullable || isEnforcingKeyTypes) {
+                spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, selectOp, 0);
+                spec.connect(new OneToOneConnectorDescriptor(spec), selectOp, 0, sortOp, 0);
+            } else {
+                spec.connect(new OneToOneConnectorDescriptor(spec), asterixAssignOp, 0, sortOp, 0);
+            }
+            spec.connect(new OneToOneConnectorDescriptor(spec), sortOp, 0, secondaryBulkLoadOp, 0);
+            spec.addRoot(root);
+            spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        }
+        return spec;
+    }
+
+    @Override
+    public JobSpecification buildCompactJobSpec() throws AsterixException, AlgebricksException {
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+
+        AsterixStorageProperties storageProperties = propertiesProvider.getStorageProperties();
+        boolean temp = dataset.getDatasetDetails().isTemp();
+        LSMTreeIndexCompactOperatorDescriptor compactOp;
+        if (dataset.getDatasetType() == DatasetType.INTERNAL) {
+            compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec,
+                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, secondaryFileSplitProvider, secondaryTypeTraits,
+                    secondaryComparatorFactories, secondaryBloomFilterKeyFields, new LSMRTreeDataflowHelperFactory(
+                            valueProviderFactories, RTreePolicyType.RTREE, primaryComparatorFactories,
+                            new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()), mergePolicyFactory,
+                            mergePolicyFactoryProperties, new SecondaryIndexOperationTrackerProvider(
+                                    dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                            LSMRTreeIOOperationCallbackFactory.INSTANCE, AqlMetadataProvider.proposeLinearizer(keyType,
+                                    secondaryComparatorFactories.length),
+                            storageProperties.getBloomFilterFalsePositiveRate(), rtreeFields, primaryKeyFields,
+                            filterTypeTraits, filterCmpFactories, secondaryFilterFields, !temp),
+                    NoOpOperationCallbackFactory.INSTANCE);
+        } else {
+            // External dataset
+            compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec,
+                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, secondaryFileSplitProvider, secondaryTypeTraits,
+                    secondaryComparatorFactories, secondaryBloomFilterKeyFields,
+                    new ExternalRTreeDataflowHelperFactory(valueProviderFactories, RTreePolicyType.RTREE,
+                            primaryComparatorFactories, mergePolicyFactory, mergePolicyFactoryProperties,
+                            new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                            LSMRTreeIOOperationCallbackFactory.INSTANCE, AqlMetadataProvider.proposeLinearizer(keyType,
+                                    secondaryComparatorFactories.length), storageProperties
+                                    .getBloomFilterFalsePositiveRate(), new int[] { numNestedSecondaryKeyFields },
+                            ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true),
+                    NoOpOperationCallbackFactory.INSTANCE);
+        }
+
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp,
+                secondaryPartitionConstraint);
+        spec.addRoot(compactOp);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        return spec;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/AsterixGlobalRecoveryManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/AsterixGlobalRecoveryManager.java b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/AsterixGlobalRecoveryManager.java
new file mode 100644
index 0000000..8e633a9
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/AsterixGlobalRecoveryManager.java
@@ -0,0 +1,213 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.hyracks.bootstrap;
+
+import java.util.List;
+import java.util.Set;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.api.IClusterEventsSubscriber;
+import edu.uci.ics.asterix.common.api.IClusterManagementWork;
+import edu.uci.ics.asterix.common.api.IClusterManagementWorkResponse;
+import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.config.DatasetConfig.ExternalDatasetTransactionState;
+import edu.uci.ics.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
+import edu.uci.ics.asterix.feeds.CentralFeedManager;
+import edu.uci.ics.asterix.file.ExternalIndexingOperations;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.Dataverse;
+import edu.uci.ics.asterix.metadata.entities.ExternalDatasetDetails;
+import edu.uci.ics.asterix.metadata.entities.ExternalFile;
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
+import edu.uci.ics.asterix.common.api.IClusterManagementWork.ClusterState;
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+public class AsterixGlobalRecoveryManager implements IClusterEventsSubscriber {
+
+    private static ClusterState state;
+    private static final Logger LOGGER = Logger.getLogger(AsterixGlobalRecoveryManager.class.getName());
+    private HyracksConnection hcc;
+    public static AsterixGlobalRecoveryManager INSTANCE;
+
+    public AsterixGlobalRecoveryManager(HyracksConnection hcc) throws Exception {
+        state = AsterixClusterProperties.INSTANCE.getState();
+        this.hcc = hcc;
+    }
+
+    @Override
+    public Set<IClusterManagementWork> notifyNodeFailure(Set<String> deadNodeIds) {
+        state = AsterixClusterProperties.INSTANCE.getState();
+        AsterixClusterProperties.INSTANCE.setGlobalRecoveryCompleted(false);
+        return null;
+    }
+
+    @Override
+    public Set<IClusterManagementWork> notifyNodeJoin(String joinedNodeId) {
+        // perform global recovery if state changed to active
+        final ClusterState newState = AsterixClusterProperties.INSTANCE.getState();
+        boolean needToRecover = !newState.equals(state) && (newState == ClusterState.ACTIVE);
+        if (needToRecover) {
+            Thread recoveryThread = new Thread(new Runnable() {
+                @Override
+                public void run() {
+                    LOGGER.info("Starting AsterixDB's Global Recovery");
+                    MetadataTransactionContext mdTxnCtx = null;
+                    try {
+                        Thread.sleep(4000);
+                        MetadataManager.INSTANCE.init();
+                        // Loop over datasets
+                        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                        List<Dataverse> dataverses = MetadataManager.INSTANCE.getDataverses(mdTxnCtx);
+                        for (Dataverse dataverse : dataverses) {
+                            if (!dataverse.getDataverseName().equals(MetadataConstants.METADATA_DATAVERSE_NAME)) {
+                                AqlMetadataProvider metadataProvider = new AqlMetadataProvider(dataverse, CentralFeedManager.getInstance());
+                                List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx,
+                                        dataverse.getDataverseName());
+                                for (Dataset dataset : datasets) {
+                                    if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
+                                        // External dataset
+                                        // Get indexes
+                                        List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx,
+                                                dataset.getDataverseName(), dataset.getDatasetName());
+                                        if (indexes.size() > 0) {
+                                            // Get the state of the dataset
+                                            ExternalDatasetDetails dsd = (ExternalDatasetDetails) dataset
+                                                    .getDatasetDetails();
+                                            ExternalDatasetTransactionState datasetState = dsd.getState();
+                                            if (datasetState == ExternalDatasetTransactionState.BEGIN) {
+                                                List<ExternalFile> files = MetadataManager.INSTANCE
+                                                        .getDatasetExternalFiles(mdTxnCtx, dataset);
+                                                // if persumed abort, roll backward
+                                                // 1. delete all pending files
+                                                for (ExternalFile file : files) {
+                                                    if (file.getPendingOp() != ExternalFilePendingOp.PENDING_NO_OP) {
+                                                        MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
+                                                    }
+                                                }
+                                                // 2. clean artifacts in NCs
+                                                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                                                JobSpecification jobSpec = ExternalIndexingOperations.buildAbortOp(
+                                                        dataset, indexes, metadataProvider);
+                                                executeHyracksJob(jobSpec);
+                                                // 3. correct the dataset state
+                                                ((ExternalDatasetDetails) dataset.getDatasetDetails())
+                                                        .setState(ExternalDatasetTransactionState.COMMIT);
+                                                MetadataManager.INSTANCE.updateDataset(mdTxnCtx, dataset);
+                                                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                                                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                                            } else if (datasetState == ExternalDatasetTransactionState.READY_TO_COMMIT) {
+                                                List<ExternalFile> files = MetadataManager.INSTANCE
+                                                        .getDatasetExternalFiles(mdTxnCtx, dataset);
+                                                // if ready to commit, roll forward
+                                                // 1. commit indexes in NCs
+                                                metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                                                JobSpecification jobSpec = ExternalIndexingOperations.buildRecoverOp(
+                                                        dataset, indexes, metadataProvider);
+                                                executeHyracksJob(jobSpec);
+                                                // 2. add pending files in metadata
+                                                for (ExternalFile file : files) {
+                                                    if (file.getPendingOp() == ExternalFilePendingOp.PENDING_ADD_OP) {
+                                                        MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
+                                                        file.setPendingOp(ExternalFilePendingOp.PENDING_NO_OP);
+                                                        MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
+                                                    } else if (file.getPendingOp() == ExternalFilePendingOp.PENDING_DROP_OP) {
+                                                        // find original file
+                                                        for (ExternalFile originalFile : files) {
+                                                            if (originalFile.getFileName().equals(file.getFileName())) {
+                                                                MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
+                                                                        file);
+                                                                MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
+                                                                        originalFile);
+                                                                break;
+                                                            }
+                                                        }
+                                                    } else if (file.getPendingOp() == ExternalFilePendingOp.PENDING_APPEND_OP) {
+                                                        // find original file
+                                                        for (ExternalFile originalFile : files) {
+                                                            if (originalFile.getFileName().equals(file.getFileName())) {
+                                                                MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
+                                                                        file);
+                                                                MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
+                                                                        originalFile);
+                                                                originalFile.setSize(file.getSize());
+                                                                MetadataManager.INSTANCE.addExternalFile(mdTxnCtx,
+                                                                        originalFile);
+                                                            }
+                                                        }
+                                                    }
+                                                }
+                                                // 3. correct the dataset state
+                                                ((ExternalDatasetDetails) dataset.getDatasetDetails())
+                                                        .setState(ExternalDatasetTransactionState.COMMIT);
+                                                MetadataManager.INSTANCE.updateDataset(mdTxnCtx, dataset);
+                                                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                                                mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                                            }
+                                        }
+                                    }
+                                }
+                            }
+                        }
+                        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                    } catch (Exception e) {
+                        // This needs to be fixed <-- Needs to shutdown the system -->
+                        /*
+                         * Note: Throwing this illegal state exception will terminate this thread
+                         * and feeds listeners will not be notified.
+                         */
+                        LOGGER.severe("Global recovery was not completed successfully" + e);
+                        try {
+                            MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+                        } catch (Exception e1) {
+                            if (LOGGER.isLoggable(Level.SEVERE)) {
+                                LOGGER.severe("Exception in aborting" + e.getMessage());
+                            }
+                            throw new IllegalStateException(e1);
+                        }
+                    }
+                    AsterixClusterProperties.INSTANCE.setGlobalRecoveryCompleted(true);
+                    LOGGER.info("Global Recovery Completed");
+                }
+            });
+            state = newState;
+            recoveryThread.start();
+        }
+        return null;
+    }
+
+    private void executeHyracksJob(JobSpecification spec) throws Exception {
+        spec.setMaxReattempts(0);
+        JobId jobId = hcc.startJob(spec);
+        hcc.waitForCompletion(jobId);
+    }
+
+    @Override
+    public void notifyRequestCompletion(IClusterManagementWorkResponse response) {
+        // Do nothing
+    }
+
+    @Override
+    public void notifyStateChange(ClusterState previousState, ClusterState newState) {
+        // Do nothing?
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/AsterixStateDumpHandler.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/AsterixStateDumpHandler.java b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/AsterixStateDumpHandler.java
new file mode 100644
index 0000000..ac760bd
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/AsterixStateDumpHandler.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.hyracks.bootstrap;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+import edu.uci.ics.hyracks.api.application.IStateDumpHandler;
+import edu.uci.ics.hyracks.api.lifecycle.ILifeCycleComponentManager;
+
+public class AsterixStateDumpHandler implements IStateDumpHandler {
+    private final String nodeId;
+    private final Path dumpPath;
+    private final ILifeCycleComponentManager lccm;
+
+    public AsterixStateDumpHandler(String nodeId, String dumpPath, ILifeCycleComponentManager lccm) {
+        this.nodeId = nodeId;
+        this.dumpPath = Paths.get(dumpPath);
+        this.lccm = lccm;
+    }
+
+    @Override
+    public void dumpState(OutputStream os) throws IOException {
+        dumpPath.toFile().mkdirs();
+        File df = dumpPath.resolve(nodeId + "-" + System.currentTimeMillis() + ".dump").toFile();
+        try (FileOutputStream fos = new FileOutputStream(df)) {
+            lccm.dumpState(fos);
+        }
+        os.write(df.getAbsolutePath().getBytes("UTF-8"));
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
new file mode 100644
index 0000000..99d883c
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
@@ -0,0 +1,184 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.hyracks.bootstrap;
+
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.eclipse.jetty.util.component.AbstractLifeCycle;
+
+import edu.uci.ics.asterix.api.http.servlet.APIServlet;
+import edu.uci.ics.asterix.api.http.servlet.AQLAPIServlet;
+import edu.uci.ics.asterix.api.http.servlet.ConnectorAPIServlet;
+import edu.uci.ics.asterix.api.http.servlet.DDLAPIServlet;
+import edu.uci.ics.asterix.api.http.servlet.FeedServlet;
+import edu.uci.ics.asterix.api.http.servlet.QueryAPIServlet;
+import edu.uci.ics.asterix.api.http.servlet.QueryResultAPIServlet;
+import edu.uci.ics.asterix.api.http.servlet.QueryStatusAPIServlet;
+import edu.uci.ics.asterix.api.http.servlet.ShutdownAPIServlet;
+import edu.uci.ics.asterix.api.http.servlet.UpdateAPIServlet;
+import edu.uci.ics.asterix.common.api.AsterixThreadFactory;
+import edu.uci.ics.asterix.common.config.AsterixExternalProperties;
+import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
+import edu.uci.ics.asterix.common.feeds.api.ICentralFeedManager;
+import edu.uci.ics.asterix.feeds.CentralFeedManager;
+import edu.uci.ics.asterix.feeds.FeedLifecycleListener;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.api.IAsterixStateProxy;
+import edu.uci.ics.asterix.metadata.bootstrap.AsterixStateProxy;
+import edu.uci.ics.asterix.metadata.cluster.ClusterManager;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
+import edu.uci.ics.hyracks.api.application.ICCApplicationEntryPoint;
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.lifecycle.LifeCycleComponentManager;
+
+public class CCApplicationEntryPoint implements ICCApplicationEntryPoint {
+    private static final Logger LOGGER = Logger.getLogger(CCApplicationEntryPoint.class.getName());
+
+    private static final String HYRACKS_CONNECTION_ATTR = "edu.uci.ics.asterix.HYRACKS_CONNECTION";
+
+    private Server webServer;
+    private Server jsonAPIServer;
+    private Server feedServer;
+    private ICentralFeedManager centralFeedManager;
+
+    private static IAsterixStateProxy proxy;
+    private ICCApplicationContext appCtx;
+
+    @Override
+    public void start(ICCApplicationContext ccAppCtx, String[] args) throws Exception {
+        this.appCtx = ccAppCtx;
+
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Starting Asterix cluster controller");
+        }
+
+        appCtx.setThreadFactory(new AsterixThreadFactory(new LifeCycleComponentManager()));
+        AsterixAppContextInfo.initialize(appCtx, getNewHyracksClientConnection());
+
+        proxy = AsterixStateProxy.registerRemoteObject();
+        appCtx.setDistributedState(proxy);
+
+        AsterixMetadataProperties metadataProperties = AsterixAppContextInfo.getInstance().getMetadataProperties();
+        MetadataManager.INSTANCE = new MetadataManager(proxy, metadataProperties);
+
+        AsterixAppContextInfo.getInstance().getCCApplicationContext()
+        .addJobLifecycleListener(FeedLifecycleListener.INSTANCE);
+
+        AsterixExternalProperties externalProperties = AsterixAppContextInfo.getInstance().getExternalProperties();
+        setupWebServer(externalProperties);
+        webServer.start();
+
+        setupJSONAPIServer(externalProperties);
+        jsonAPIServer.start();
+        ExternalLibraryBootstrap.setUpExternaLibraries(false);
+
+        setupFeedServer(externalProperties);
+        feedServer.start();
+        centralFeedManager = CentralFeedManager.getInstance(); 
+        centralFeedManager.start();
+
+        waitUntilServerStart(webServer);
+        waitUntilServerStart(jsonAPIServer);
+        waitUntilServerStart(feedServer);
+
+        AsterixGlobalRecoveryManager.INSTANCE = new AsterixGlobalRecoveryManager(
+                (HyracksConnection) getNewHyracksClientConnection());
+        ClusterManager.INSTANCE.registerSubscriber(AsterixGlobalRecoveryManager.INSTANCE);
+
+        ccAppCtx.addClusterLifecycleListener(ClusterLifecycleListener.INSTANCE);
+    }
+
+    private void waitUntilServerStart(AbstractLifeCycle webServer) throws Exception {
+        while (!webServer.isStarted()) {
+            if (webServer.isFailed()) {
+                throw new Exception("Server failed to start");
+            }
+            wait(1000);
+        }
+    }
+
+    @Override
+    public void stop() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Stopping Asterix cluster controller");
+        }
+        AsterixStateProxy.unregisterRemoteObject();
+
+        webServer.stop();
+        jsonAPIServer.stop();
+        feedServer.stop();
+    }
+
+    private IHyracksClientConnection getNewHyracksClientConnection() throws Exception {
+        String strIP = appCtx.getCCContext().getClusterControllerInfo().getClientNetAddress();
+        int port = appCtx.getCCContext().getClusterControllerInfo().getClientNetPort();
+        return new HyracksConnection(strIP, port);
+    }
+
+    private void setupWebServer(AsterixExternalProperties externalProperties) throws Exception {
+
+        webServer = new Server(externalProperties.getWebInterfacePort());
+
+        ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
+        context.setContextPath("/");
+
+        IHyracksClientConnection hcc = getNewHyracksClientConnection();
+        context.setAttribute(HYRACKS_CONNECTION_ATTR, hcc);
+
+        webServer.setHandler(context);
+        context.addServlet(new ServletHolder(new APIServlet()), "/*");
+    }
+
+    private void setupJSONAPIServer(AsterixExternalProperties externalProperties) throws Exception {
+        jsonAPIServer = new Server(externalProperties.getAPIServerPort());
+
+        ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
+        context.setContextPath("/");
+
+        IHyracksClientConnection hcc = getNewHyracksClientConnection();
+        context.setAttribute(HYRACKS_CONNECTION_ATTR, hcc);
+
+        jsonAPIServer.setHandler(context);
+        context.addServlet(new ServletHolder(new QueryAPIServlet()), "/query");
+        context.addServlet(new ServletHolder(new QueryStatusAPIServlet()), "/query/status");
+        context.addServlet(new ServletHolder(new QueryResultAPIServlet()), "/query/result");
+        context.addServlet(new ServletHolder(new UpdateAPIServlet()), "/update");
+        context.addServlet(new ServletHolder(new DDLAPIServlet()), "/ddl");
+        context.addServlet(new ServletHolder(new AQLAPIServlet()), "/aql");
+        context.addServlet(new ServletHolder(new ConnectorAPIServlet()), "/connector");
+        context.addServlet(new ServletHolder(new ShutdownAPIServlet()), "/admin/shutdown");
+    }
+
+    private void setupFeedServer(AsterixExternalProperties externalProperties) throws Exception {
+        feedServer = new Server(externalProperties.getFeedServerPort());
+
+        ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
+        context.setContextPath("/");
+
+        IHyracksClientConnection hcc = getNewHyracksClientConnection();
+        context.setAttribute(HYRACKS_CONNECTION_ATTR, hcc);
+
+        feedServer.setHandler(context);
+        context.addServlet(new ServletHolder(new FeedServlet()), "/");
+   
+        // add paths here
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
new file mode 100644
index 0000000..c7821b2
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
@@ -0,0 +1,204 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.hyracks.bootstrap;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.api.IClusterEventsSubscriber;
+import edu.uci.ics.asterix.common.api.IClusterManagementWork;
+import edu.uci.ics.asterix.common.api.IClusterManagementWorkResponse;
+import edu.uci.ics.asterix.common.api.IClusterManagementWorkResponse.Status;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.event.schema.cluster.Node;
+import edu.uci.ics.asterix.metadata.cluster.AddNodeWork;
+import edu.uci.ics.asterix.metadata.cluster.AddNodeWorkResponse;
+import edu.uci.ics.asterix.metadata.cluster.ClusterManager;
+import edu.uci.ics.asterix.metadata.cluster.RemoveNodeWork;
+import edu.uci.ics.asterix.metadata.cluster.RemoveNodeWorkResponse;
+import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
+import edu.uci.ics.hyracks.api.application.IClusterLifecycleListener;
+
+public class ClusterLifecycleListener implements IClusterLifecycleListener {
+
+    private static final Logger LOGGER = Logger.getLogger(ClusterLifecycleListener.class.getName());
+
+    private static final LinkedBlockingQueue<Set<IClusterManagementWork>> workRequestQueue = new LinkedBlockingQueue<Set<IClusterManagementWork>>();
+
+    private static ClusterWorkExecutor eventHandler = new ClusterWorkExecutor(workRequestQueue);
+
+    private static List<IClusterManagementWorkResponse> pendingWorkResponses = new ArrayList<IClusterManagementWorkResponse>();
+
+    public static ClusterLifecycleListener INSTANCE = new ClusterLifecycleListener();
+
+    private ClusterLifecycleListener() {
+        Thread t = new Thread(eventHandler);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Starting cluster event handler");
+        }
+        t.start();
+    }
+
+    public enum ClusterEventType {
+        NODE_JOIN,
+        NODE_FAILURE
+    }
+
+    @Override
+    public void notifyNodeJoin(String nodeId, Map<String, String> ncConfiguration) {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("NC: " + nodeId + " joined");
+        }
+        AsterixClusterProperties.INSTANCE.addNCConfiguration(nodeId, ncConfiguration);
+        Set<String> nodeAddition = new HashSet<String>();
+        nodeAddition.add(nodeId);
+        updateProgress(ClusterEventType.NODE_JOIN, nodeAddition);
+        Set<IClusterEventsSubscriber> subscribers = ClusterManager.INSTANCE.getRegisteredClusterEventSubscribers();
+        Set<IClusterManagementWork> work = new HashSet<IClusterManagementWork>();
+        for (IClusterEventsSubscriber sub : subscribers) {
+            Set<IClusterManagementWork> workRequest = sub.notifyNodeJoin(nodeId);
+            if (workRequest != null && !workRequest.isEmpty()) {
+                work.addAll(workRequest);
+            }
+        }
+        if (!work.isEmpty()) {
+            executeWorkSet(work);
+        }
+
+    }
+
+    public void notifyNodeFailure(Set<String> deadNodeIds) {
+        for (String deadNode : deadNodeIds) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("NC: " + deadNode + " left");
+            }
+            AsterixClusterProperties.INSTANCE.removeNCConfiguration(deadNode);
+        }
+        updateProgress(ClusterEventType.NODE_FAILURE, deadNodeIds);
+        Set<IClusterEventsSubscriber> subscribers = ClusterManager.INSTANCE.getRegisteredClusterEventSubscribers();
+        Set<IClusterManagementWork> work = new HashSet<IClusterManagementWork>();
+        for (IClusterEventsSubscriber sub : subscribers) {
+            Set<IClusterManagementWork> workRequest = sub.notifyNodeFailure(deadNodeIds);
+            if (workRequest != null && !workRequest.isEmpty()) {
+                work.addAll(workRequest);
+            }
+        }
+        if (!work.isEmpty()) {
+            executeWorkSet(work);
+        }
+    }
+
+    private void updateProgress(ClusterEventType eventType, Set<String> nodeIds) {
+        List<IClusterManagementWorkResponse> completedResponses = new ArrayList<IClusterManagementWorkResponse>();
+        boolean isComplete = false;
+        for (IClusterManagementWorkResponse resp : pendingWorkResponses) {
+            switch (eventType) {
+                case NODE_FAILURE:
+                    isComplete = ((RemoveNodeWorkResponse) resp).updateProgress(nodeIds);
+                    if (isComplete) {
+                        resp.setStatus(Status.SUCCESS);
+                        resp.getWork().getSourceSubscriber().notifyRequestCompletion(resp);
+                        completedResponses.add(resp);
+                    }
+                    break;
+
+                case NODE_JOIN:
+                    isComplete = ((AddNodeWorkResponse) resp).updateProgress(nodeIds.iterator().next());
+                    if (isComplete) {
+                        resp.setStatus(Status.SUCCESS);
+                        resp.getWork().getSourceSubscriber().notifyRequestCompletion(resp);
+                        completedResponses.add(resp);
+                    }
+                    break;
+            }
+        }
+        pendingWorkResponses.removeAll(completedResponses);
+    }
+
+    private void executeWorkSet(Set<IClusterManagementWork> workSet) {
+        int nodesToAdd = 0;
+        Set<String> nodesToRemove = new HashSet<String>();
+        Set<AddNodeWork> nodeAdditionRequests = new HashSet<AddNodeWork>();
+        Set<IClusterManagementWork> nodeRemovalRequests = new HashSet<IClusterManagementWork>();
+        for (IClusterManagementWork w : workSet) {
+            switch (w.getClusterManagementWorkType()) {
+                case ADD_NODE:
+                    if (nodesToAdd < ((AddNodeWork) w).getNumberOfNodesRequested()) {
+                        nodesToAdd = ((AddNodeWork) w).getNumberOfNodesRequested();
+                    }
+                    nodeAdditionRequests.add((AddNodeWork) w);
+                    break;
+                case REMOVE_NODE:
+                    nodesToRemove.addAll(((RemoveNodeWork) w).getNodesToBeRemoved());
+                    nodeRemovalRequests.add(w);
+                    RemoveNodeWorkResponse response = new RemoveNodeWorkResponse((RemoveNodeWork) w, Status.IN_PROGRESS);
+                    pendingWorkResponses.add(response);
+                    break;
+            }
+        }
+
+        List<String> addedNodes = new ArrayList<String>();
+        String asterixInstanceName = AsterixClusterProperties.INSTANCE.getCluster().getInstanceName();
+        for (int i = 0; i < nodesToAdd; i++) {
+            Node node = AsterixClusterProperties.INSTANCE.getAvailableSubstitutionNode();
+            if (node != null) {
+                try {
+                    ClusterManager.INSTANCE.addNode(node);
+                    addedNodes.add(asterixInstanceName + "_" + node.getId());
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info("Added NC at:" + node.getId());
+                    }
+                } catch (AsterixException e) {
+                    if (LOGGER.isLoggable(Level.WARNING)) {
+                        LOGGER.warning("Unable to add NC at:" + node.getId());
+                    }
+                    e.printStackTrace();
+                }
+            } else {
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("Unable to add NC: no more available nodes");
+                }
+
+            }
+        }
+
+        for (AddNodeWork w : nodeAdditionRequests) {
+            int n = w.getNumberOfNodesRequested();
+            List<String> nodesToBeAddedForWork = new ArrayList<String>();
+            for (int i = 0; i < n && i < addedNodes.size(); i++) {
+                nodesToBeAddedForWork.add(addedNodes.get(i));
+            }
+            if (nodesToBeAddedForWork.isEmpty()) {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Unable to satisfy request by " + w);
+                }
+                AddNodeWorkResponse response = new AddNodeWorkResponse(w, nodesToBeAddedForWork);
+                response.setStatus(Status.FAILURE);
+                w.getSourceSubscriber().notifyRequestCompletion(response);
+
+            } else {
+                AddNodeWorkResponse response = new AddNodeWorkResponse(w, nodesToBeAddedForWork);
+                pendingWorkResponses.add(response);
+            }
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterWorkExecutor.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterWorkExecutor.java b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterWorkExecutor.java
new file mode 100644
index 0000000..a4faea7
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterWorkExecutor.java
@@ -0,0 +1,102 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.hyracks.bootstrap;
+
+import java.util.HashSet;
+import java.util.Set;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.api.IClusterManagementWork;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.event.schema.cluster.Node;
+import edu.uci.ics.asterix.metadata.cluster.AddNodeWork;
+import edu.uci.ics.asterix.metadata.cluster.ClusterManager;
+import edu.uci.ics.asterix.metadata.cluster.RemoveNodeWork;
+import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
+
+public class ClusterWorkExecutor implements Runnable {
+
+    private static final Logger LOGGER = Logger.getLogger(ClusterWorkExecutor.class.getName());
+
+    private final LinkedBlockingQueue<Set<IClusterManagementWork>> inbox;
+
+    public ClusterWorkExecutor(LinkedBlockingQueue<Set<IClusterManagementWork>> inbox) {
+        this.inbox = inbox;
+    }
+
+    @Override
+    public void run() {
+        while (true) {
+            try {
+                Set<IClusterManagementWork> workSet = inbox.take();
+                int nodesToAdd = 0;
+                Set<String> nodesToRemove = new HashSet<String>();
+                Set<IClusterManagementWork> nodeAdditionRequests = new HashSet<IClusterManagementWork>();
+                Set<IClusterManagementWork> nodeRemovalRequests = new HashSet<IClusterManagementWork>();
+                for (IClusterManagementWork w : workSet) {
+                    switch (w.getClusterManagementWorkType()) {
+                        case ADD_NODE:
+                            if (nodesToAdd < ((AddNodeWork) w).getNumberOfNodesRequested()) {
+                                nodesToAdd = ((AddNodeWork) w).getNumberOfNodesRequested();
+                            }
+                            nodeAdditionRequests.add(w);
+                            break;
+                        case REMOVE_NODE:
+                            nodesToRemove.addAll(((RemoveNodeWork) w).getNodesToBeRemoved());
+                            nodeRemovalRequests.add(w);
+                            break;
+                    }
+                }
+
+                Set<Node> addedNodes = new HashSet<Node>();
+                for (int i = 0; i < nodesToAdd; i++) {
+                    Node node = AsterixClusterProperties.INSTANCE.getAvailableSubstitutionNode();
+                    if (node != null) {
+                        try {
+                            ClusterManager.INSTANCE.addNode(node);
+                            addedNodes.add(node);
+                            if (LOGGER.isLoggable(Level.INFO)) {
+                                LOGGER.info("Added NC at:" + node.getId());
+                            }
+                        } catch (AsterixException e) {
+                            if (LOGGER.isLoggable(Level.WARNING)) {
+                                LOGGER.warning("Unable to add NC at:" + node.getId());
+                            }
+                            e.printStackTrace();
+                        }
+                    } else {
+                        if (LOGGER.isLoggable(Level.WARNING)) {
+                            LOGGER.warning("Unable to add NC: no more available nodes");
+                        }
+                    }
+                }
+
+            } catch (InterruptedException e) {
+                if (LOGGER.isLoggable(Level.SEVERE)) {
+                    LOGGER.severe("interruped" + e.getMessage());
+                }
+                throw new IllegalStateException(e);
+            } catch (Exception e) {
+                if (LOGGER.isLoggable(Level.SEVERE)) {
+                    LOGGER.severe("Unexpected exception in handling cluster event" + e.getMessage());
+                }
+            }
+
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ExternalLibraryBootstrap.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ExternalLibraryBootstrap.java b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ExternalLibraryBootstrap.java
new file mode 100755
index 0000000..31aca3d
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ExternalLibraryBootstrap.java
@@ -0,0 +1,318 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.hyracks.bootstrap;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.rmi.RemoteException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.Unmarshaller;
+
+import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.functions.FunctionSignature;
+import edu.uci.ics.asterix.external.library.ExternalLibrary;
+import edu.uci.ics.asterix.external.library.LibraryAdapter;
+import edu.uci.ics.asterix.external.library.LibraryFunction;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.api.IMetadataEntity;
+import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter;
+import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter.AdapterType;
+import edu.uci.ics.asterix.metadata.entities.Dataverse;
+import edu.uci.ics.asterix.metadata.feeds.AdapterIdentifier;
+import edu.uci.ics.asterix.metadata.functions.ExternalLibraryManager;
+import edu.uci.ics.asterix.runtime.formats.NonTaggedDataFormat;
+
+public class ExternalLibraryBootstrap {
+
+    private static Logger LOGGER = Logger.getLogger(ExternalLibraryBootstrap.class.getName());
+
+    public static void setUpExternaLibraries(boolean isMetadataNode) throws Exception {
+
+        Map<String, List<String>> uninstalledLibs = null;
+        if (isMetadataNode) {
+            uninstalledLibs = uninstallLibraries();
+        }
+
+        File installLibDir = getLibraryInstallDir();
+        if (installLibDir.exists()) {
+            for (String dataverse : installLibDir.list()) {
+                File dataverseDir = new File(installLibDir, dataverse);
+                String[] libraries = dataverseDir.list();
+                for (String library : libraries) {
+                    registerLibrary(dataverse, library, isMetadataNode, installLibDir);
+                    if (isMetadataNode) {
+                        File libraryDir = new File(installLibDir.getAbsolutePath() + File.separator + dataverse
+                                + File.separator + library);
+                        installLibraryIfNeeded(dataverse, libraryDir, uninstalledLibs);
+                    }
+                }
+            }
+        }
+    }
+
+    private static Map<String, List<String>> uninstallLibraries() throws Exception {
+        Map<String, List<String>> uninstalledLibs = new HashMap<String, List<String>>();
+        File uninstallLibDir = getLibraryUninstallDir();
+        String[] uninstallLibNames;
+        if (uninstallLibDir.exists()) {
+            uninstallLibNames = uninstallLibDir.list();
+            for (String uninstallLibName : uninstallLibNames) {
+                String[] components = uninstallLibName.split("\\.");
+                String dataverse = components[0];
+                String libName = components[1];
+                uninstallLibrary(dataverse, libName);
+                new File(uninstallLibDir, uninstallLibName).delete();
+                List<String> uinstalledLibsInDv = uninstalledLibs.get(dataverse);
+                if (uinstalledLibsInDv == null) {
+                    uinstalledLibsInDv = new ArrayList<String>();
+                    uninstalledLibs.put(dataverse, uinstalledLibsInDv);
+                }
+                uinstalledLibsInDv.add(libName);
+            }
+        }
+        return uninstalledLibs;
+    }
+
+    private static boolean uninstallLibrary(String dataverse, String libraryName) throws AsterixException,
+            RemoteException, ACIDException {
+        MetadataTransactionContext mdTxnCtx = null;
+        try {
+            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+            Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
+            if (dv == null) {
+                return false;
+            }
+
+            edu.uci.ics.asterix.metadata.entities.Library library = MetadataManager.INSTANCE.getLibrary(mdTxnCtx,
+                    dataverse, libraryName);
+            if (library == null) {
+                return false;
+            }
+
+            List<edu.uci.ics.asterix.metadata.entities.Function> functions = MetadataManager.INSTANCE
+                    .getDataverseFunctions(mdTxnCtx, dataverse);
+            for (edu.uci.ics.asterix.metadata.entities.Function function : functions) {
+                if (function.getName().startsWith(libraryName + "#")) {
+                    MetadataManager.INSTANCE.dropFunction(mdTxnCtx, new FunctionSignature(dataverse,
+                            function.getName(), function.getArity()));
+                }
+            }
+
+            List<edu.uci.ics.asterix.metadata.entities.DatasourceAdapter> adapters = MetadataManager.INSTANCE
+                    .getDataverseAdapters(mdTxnCtx, dataverse);
+            for (edu.uci.ics.asterix.metadata.entities.DatasourceAdapter adapter : adapters) {
+                if (adapter.getAdapterIdentifier().getName().startsWith(libraryName + "#")) {
+                    MetadataManager.INSTANCE.dropAdapter(mdTxnCtx, dataverse, adapter.getAdapterIdentifier().getName());
+                }
+            }
+
+            MetadataManager.INSTANCE.dropLibrary(mdTxnCtx, dataverse, libraryName);
+            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+        } catch (Exception e) {
+            MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+            throw new AsterixException(e);
+        }
+        return true;
+    }
+
+    // Each element of a library is installed as part of a transaction. Any
+    // failure in installing an element does not effect installation of other
+    // libraries
+    private static void installLibraryIfNeeded(String dataverse, final File libraryDir,
+            Map<String, List<String>> uninstalledLibs) throws Exception {
+
+        String libraryName = libraryDir.getName().trim();
+        List<String> uninstalledLibsInDv = uninstalledLibs.get(dataverse);
+        boolean wasUninstalled = uninstalledLibsInDv != null && uninstalledLibsInDv.contains(libraryName);
+
+        MetadataTransactionContext mdTxnCtx = null;
+        MetadataManager.INSTANCE.acquireWriteLatch();
+        try {
+            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+            edu.uci.ics.asterix.metadata.entities.Library libraryInMetadata = MetadataManager.INSTANCE.getLibrary(
+                    mdTxnCtx, dataverse, libraryName);
+            if (libraryInMetadata != null && !wasUninstalled) {
+                return;
+            }
+
+            String[] libraryDescriptors = libraryDir.list(new FilenameFilter() {
+                @Override
+                public boolean accept(File dir, String name) {
+                    return name.endsWith(".xml");
+                }
+            });
+
+            ExternalLibrary library = getLibrary(new File(libraryDir + File.separator + libraryDescriptors[0]));
+
+            if (libraryDescriptors.length == 0) {
+                throw new Exception("No library descriptor defined");
+            } else if (libraryDescriptors.length > 1) {
+                throw new Exception("More than 1 library descriptors defined");
+            }
+
+            Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
+            if (dv == null) {
+                MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dataverse,
+                        NonTaggedDataFormat.NON_TAGGED_DATA_FORMAT, IMetadataEntity.PENDING_NO_OP));
+            }
+            if (library.getLibraryFunctions() != null) {
+                for (LibraryFunction function : library.getLibraryFunctions().getLibraryFunction()) {
+                    String[] fargs = function.getArguments().trim().split(",");
+                    List<String> args = new ArrayList<String>();
+                    for (String arg : fargs) {
+                        args.add(arg);
+                    }
+                    edu.uci.ics.asterix.metadata.entities.Function f = new edu.uci.ics.asterix.metadata.entities.Function(
+                            dataverse, libraryName + "#" + function.getName().trim(), args.size(), args, function
+                                    .getReturnType().trim(), function.getDefinition().trim(), library.getLanguage()
+                                    .trim(), function.getFunctionType().trim());
+                    MetadataManager.INSTANCE.addFunction(mdTxnCtx, f);
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info("Installed function: " + libraryName + "#" + function.getName().trim());
+                    }
+                }
+            }
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Installed functions contain in library :" + libraryName);
+            }
+
+            if (library.getLibraryAdapters() != null) {
+                for (LibraryAdapter adapter : library.getLibraryAdapters().getLibraryAdapter()) {
+                    String adapterFactoryClass = adapter.getFactoryClass().trim();
+                    String adapterName = libraryName + "#" + adapter.getName().trim();
+                    AdapterIdentifier aid = new AdapterIdentifier(dataverse, adapterName);
+                    DatasourceAdapter dsa = new DatasourceAdapter(aid, adapterFactoryClass, AdapterType.EXTERNAL);
+                    MetadataManager.INSTANCE.addAdapter(mdTxnCtx, dsa);
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info("Installed adapter: " + adapterName);
+                    }
+                }
+            }
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Installed adapters contain in library :" + libraryName);
+            }
+
+            MetadataManager.INSTANCE.addLibrary(mdTxnCtx, new edu.uci.ics.asterix.metadata.entities.Library(dataverse,
+                    libraryName));
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Added library " + libraryName + "to Metadata");
+            }
+
+            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+        } catch (Exception e) {
+            e.printStackTrace();
+            if (LOGGER.isLoggable(Level.SEVERE)) {
+                LOGGER.info("Exception in installing library " + libraryName);
+            }
+            MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+        } finally {
+            MetadataManager.INSTANCE.releaseWriteLatch();
+        }
+    }
+
+    private static void registerLibrary(String dataverse, String libraryName, boolean isMetadataNode, File installLibDir)
+            throws Exception {
+        ClassLoader classLoader = getLibraryClassLoader(dataverse, libraryName);
+        ExternalLibraryManager.registerLibraryClassLoader(dataverse, libraryName, classLoader);
+    }
+
+    private static ExternalLibrary getLibrary(File libraryXMLPath) throws Exception {
+        JAXBContext configCtx = JAXBContext.newInstance(ExternalLibrary.class);
+        Unmarshaller unmarshaller = configCtx.createUnmarshaller();
+        ExternalLibrary library = (ExternalLibrary) unmarshaller.unmarshal(libraryXMLPath);
+        return library;
+    }
+
+    private static ClassLoader getLibraryClassLoader(String dataverse, String libraryName) throws Exception {
+
+        File installDir = getLibraryInstallDir();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Installing lirbary " + libraryName + " in dataverse " + dataverse + "."
+                    + " Install Directory: " + installDir.getAbsolutePath());
+        }
+
+        File libDir = new File(installDir.getAbsolutePath() + File.separator + dataverse + File.separator + libraryName);
+        FilenameFilter jarFileFilter = new FilenameFilter() {
+            public boolean accept(File dir, String name) {
+                return name.endsWith(".jar");
+            }
+        };
+
+        String[] jarsInLibDir = libDir.list(jarFileFilter);
+        if (jarsInLibDir.length > 1) {
+            throw new Exception("Incorrect library structure: found multiple library jars");
+        }
+        if (jarsInLibDir.length < 0) {
+            throw new Exception("Incorrect library structure: could not find library jar");
+        }
+
+        File libJar = new File(libDir, jarsInLibDir[0]);
+        File libDependencyDir = new File(libDir.getAbsolutePath() + File.separator + "lib");
+        int numDependencies = 1;
+        String[] libraryDependencies = null;
+        if (libDependencyDir.exists()) {
+            libraryDependencies = libDependencyDir.list(jarFileFilter);
+            numDependencies += libraryDependencies.length;
+        }
+
+        ClassLoader parentClassLoader = ExternalLibraryBootstrap.class.getClassLoader();
+        URL[] urls = new URL[numDependencies];
+        int count = 0;
+        urls[count++] = libJar.toURL();
+
+        if (libraryDependencies != null && libraryDependencies.length > 0) {
+            for (String dependency : libraryDependencies) {
+                File file = new File(libDependencyDir + File.separator + dependency);
+                urls[count++] = file.toURL();
+            }
+        }
+
+        if (LOGGER.isLoggable(Level.INFO)) {
+            StringBuilder logMesg = new StringBuilder("Classpath for library " + libraryName + "\n");
+            for (URL url : urls) {
+                logMesg.append(url.getFile() + "\n");
+            }
+            LOGGER.info(logMesg.toString());
+        }
+
+        ClassLoader classLoader = new URLClassLoader(urls, parentClassLoader);
+        return classLoader;
+    }
+
+    private static File getLibraryInstallDir() {
+        String workingDir = System.getProperty("user.dir");
+        return new File(workingDir + File.separator + "library");
+    }
+
+    private static File getLibraryUninstallDir() {
+        String workingDir = System.getProperty("user.dir");
+        return new File(workingDir + File.separator + "uninstall");
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/FeedBootstrap.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/FeedBootstrap.java b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/FeedBootstrap.java
new file mode 100644
index 0000000..7d22091
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/FeedBootstrap.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.hyracks.bootstrap;
+
+import edu.uci.ics.asterix.feeds.CentralFeedManager;
+import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+
+public class FeedBootstrap {
+
+    public final static String FEEDS_METADATA_DV = "feeds_metadata";
+    public final static String FAILED_TUPLE_DATASET = "failed_tuple";
+    public final static String FAILED_TUPLE_DATASET_TYPE = "FailedTupleType";
+    public final static String FAILED_TUPLE_DATASET_KEY = "id";
+
+    public static void setUpInitialArtifacts() throws Exception {
+
+        StringBuilder builder = new StringBuilder();
+        try {
+            builder.append("create dataverse " + FEEDS_METADATA_DV + ";" + "\n");
+            builder.append("use dataverse " + FEEDS_METADATA_DV + ";" + "\n");
+
+            builder.append("create type " + FAILED_TUPLE_DATASET_TYPE + " as open { ");
+
+            String[] fieldNames = new String[] { "id", "dataverseName", "feedName", "targetDataset", "tuple",
+                    "message", "timestamp" };
+            IAType[] fieldTypes = new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
+                    BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING };
+
+            for (int i = 0; i < fieldNames.length; i++) {
+                if (i > 0) {
+                    builder.append(",");
+                }
+                builder.append(fieldNames[i] + ":");
+                builder.append(fieldTypes[i].getTypeName());
+            }
+            builder.append("}" + ";" + "\n");
+
+            builder.append("create dataset " + FAILED_TUPLE_DATASET + " " + "(" + FAILED_TUPLE_DATASET_TYPE + ")" + " "
+                    + "primary key " + FAILED_TUPLE_DATASET_KEY + " on  " + MetadataConstants.METADATA_NODEGROUP_NAME
+                    + ";");
+
+            CentralFeedManager.AQLExecutor.executeAQL(builder.toString());
+        } catch (Exception e) {
+            e.printStackTrace();
+            System.out.println("Error: " + builder.toString());
+            throw e;
+        }
+    }
+
+}


[15/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/file/ExternalIndexingOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/ExternalIndexingOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/ExternalIndexingOperations.java
deleted file mode 100644
index 8f555cc..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/ExternalIndexingOperations.java
+++ /dev/null
@@ -1,761 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.file;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-
-import edu.uci.ics.asterix.common.api.ILocalResourceMetadata;
-import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.common.config.DatasetConfig.ExternalDatasetTransactionState;
-import edu.uci.ics.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
-import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
-import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
-import edu.uci.ics.asterix.common.context.AsterixVirtualBufferCacheProvider;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
-import edu.uci.ics.asterix.common.ioopcallbacks.LSMBTreeWithBuddyIOOperationCallbackFactory;
-import edu.uci.ics.asterix.common.ioopcallbacks.LSMRTreeIOOperationCallbackFactory;
-import edu.uci.ics.asterix.dataflow.data.nontagged.valueproviders.AqlPrimitiveValueProviderFactory;
-import edu.uci.ics.asterix.external.adapter.factory.HDFSAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.HDFSIndexingAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.HiveAdapterFactory;
-import edu.uci.ics.asterix.external.indexing.operators.ExternalDatasetIndexesAbortOperatorDescriptor;
-import edu.uci.ics.asterix.external.indexing.operators.ExternalDatasetIndexesCommitOperatorDescriptor;
-import edu.uci.ics.asterix.external.indexing.operators.ExternalDatasetIndexesRecoverOperatorDescriptor;
-import edu.uci.ics.asterix.external.indexing.operators.IndexInfoOperatorDescriptor;
-import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
-import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
-import edu.uci.ics.asterix.formats.nontagged.AqlTypeTraitProvider;
-import edu.uci.ics.asterix.metadata.MetadataException;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.ExternalDatasetDetails;
-import edu.uci.ics.asterix.metadata.entities.ExternalFile;
-import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.asterix.metadata.external.FilesIndexDescription;
-import edu.uci.ics.asterix.metadata.external.IndexingConstants;
-import edu.uci.ics.asterix.metadata.feeds.ExternalDataScanOperatorDescriptor;
-import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
-import edu.uci.ics.asterix.metadata.utils.ExternalDatasetsRegistry;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.asterix.tools.external.data.ExternalFilesIndexOperatorDescriptor;
-import edu.uci.ics.asterix.transaction.management.opcallbacks.SecondaryIndexOperationTrackerProvider;
-import edu.uci.ics.asterix.transaction.management.resource.ExternalBTreeLocalResourceMetadata;
-import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceFactoryProvider;
-import edu.uci.ics.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
-import edu.uci.ics.asterix.translator.CompiledStatements.CompiledCreateIndexStatement;
-import edu.uci.ics.asterix.translator.CompiledStatements.CompiledIndexDropStatement;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.ConnectorPolicyAssignmentPolicy;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow.ExternalBTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow.ExternalBTreeWithBuddyDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexCompactOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.dataflow.ExternalRTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import edu.uci.ics.hyracks.storage.common.file.LocalResource;
-
-public class ExternalIndexingOperations {
-
-    public static final List<List<String>> FILE_INDEX_FIELD_NAMES = new ArrayList<List<String>>();
-    public static final ArrayList<IAType> FILE_INDEX_FIELD_TYPES = new ArrayList<IAType>();
-    static {
-        FILE_INDEX_FIELD_NAMES.add(new ArrayList<String>(Arrays.asList("")));
-        FILE_INDEX_FIELD_TYPES.add(BuiltinType.ASTRING);
-    }
-
-    public static boolean isIndexible(ExternalDatasetDetails ds) {
-        String adapter = ds.getAdapter();
-        if (adapter.equalsIgnoreCase("hdfs") || adapter.equalsIgnoreCase("hive")
-                || adapter.equalsIgnoreCase("edu.uci.ics.asterix.external.dataset.adapter.HDFSAdapter")
-                || adapter.equalsIgnoreCase("edu.uci.ics.asterix.external.dataset.adapter.HIVEAdapter")) {
-            return true;
-        }
-        return false;
-    }
-
-    public static boolean isRefereshActive(ExternalDatasetDetails ds) {
-        return ds.getState() != ExternalDatasetTransactionState.COMMIT;
-    }
-
-    public static boolean datasetUsesHiveAdapter(ExternalDatasetDetails ds) {
-        String adapter = ds.getAdapter();
-        return (adapter.equalsIgnoreCase("hive") || adapter
-                .equalsIgnoreCase("edu.uci.ics.asterix.external.dataset.adapter.HIVEAdapter"));
-    }
-
-    public static boolean isValidIndexName(String datasetName, String indexName) {
-        return (!datasetName.concat(IndexingConstants.EXTERNAL_FILE_INDEX_NAME_SUFFIX).equals(indexName));
-    }
-
-    public static String getFilesIndexName(String datasetName) {
-        return datasetName.concat(IndexingConstants.EXTERNAL_FILE_INDEX_NAME_SUFFIX);
-    }
-
-    public static int getRIDSize(Dataset dataset) {
-        ExternalDatasetDetails dsd = ((ExternalDatasetDetails) dataset.getDatasetDetails());
-        return IndexingConstants.getRIDSize(dsd.getProperties().get(IndexingConstants.KEY_INPUT_FORMAT));
-    }
-
-    public static IBinaryComparatorFactory[] getComparatorFactories(Dataset dataset) {
-        ExternalDatasetDetails dsd = ((ExternalDatasetDetails) dataset.getDatasetDetails());
-        return IndexingConstants.getComparatorFactories((dsd.getProperties().get(IndexingConstants.KEY_INPUT_FORMAT)));
-    }
-
-    public static IBinaryComparatorFactory[] getBuddyBtreeComparatorFactories() {
-        return IndexingConstants.getBuddyBtreeComparatorFactories();
-    }
-
-    public static ArrayList<ExternalFile> getSnapshotFromExternalFileSystem(Dataset dataset) throws AlgebricksException {
-        ArrayList<ExternalFile> files = new ArrayList<ExternalFile>();
-        ExternalDatasetDetails datasetDetails = (ExternalDatasetDetails) dataset.getDatasetDetails();
-        try {
-            // Create the file system object
-            FileSystem fs = getFileSystemObject(datasetDetails.getProperties());
-            // If dataset uses hive adapter, add path to the dataset properties
-            if (datasetUsesHiveAdapter(datasetDetails)) {
-                HiveAdapterFactory.populateConfiguration(datasetDetails.getProperties());
-            }
-            // Get paths of dataset
-            String path = datasetDetails.getProperties().get(HDFSAdapterFactory.KEY_PATH);
-            String[] paths = path.split(",");
-
-            // Add fileStatuses to files
-            for (String aPath : paths) {
-                FileStatus[] fileStatuses = fs.listStatus(new Path(aPath));
-                for (int i = 0; i < fileStatuses.length; i++) {
-                    int nextFileNumber = files.size();
-                    if (fileStatuses[i].isDirectory()) {
-                        listSubFiles(dataset, fs, fileStatuses[i], files);
-                    } else {
-                        files.add(new ExternalFile(dataset.getDataverseName(), dataset.getDatasetName(),
-                                nextFileNumber, fileStatuses[i].getPath().toUri().getPath(), new Date(fileStatuses[i]
-                                        .getModificationTime()), fileStatuses[i].getLen(),
-                                ExternalFilePendingOp.PENDING_NO_OP));
-                    }
-                }
-            }
-            // Close file system
-            fs.close();
-            if (files.size() == 0) {
-                throw new AlgebricksException("File Snapshot retrieved from external file system is empty");
-            }
-            return files;
-        } catch (Exception e) {
-            e.printStackTrace();
-            throw new AlgebricksException("Unable to get list of HDFS files " + e);
-        }
-    }
-
-    /* list all files under the directory
-     * src is expected to be a folder
-     */
-    private static void listSubFiles(Dataset dataset, FileSystem srcFs, FileStatus src, ArrayList<ExternalFile> files)
-            throws IOException {
-        Path path = src.getPath();
-        FileStatus[] fileStatuses = srcFs.listStatus(path);
-        for (int i = 0; i < fileStatuses.length; i++) {
-            int nextFileNumber = files.size();
-            if (fileStatuses[i].isDirectory()) {
-                listSubFiles(dataset, srcFs, fileStatuses[i], files);
-            } else {
-                files.add(new ExternalFile(dataset.getDataverseName(), dataset.getDatasetName(), nextFileNumber,
-                        fileStatuses[i].getPath().toUri().getPath(), new Date(fileStatuses[i].getModificationTime()),
-                        fileStatuses[i].getLen(), ExternalFilePendingOp.PENDING_NO_OP));
-            }
-        }
-    }
-
-    public static FileSystem getFileSystemObject(Map<String, String> map) throws IOException {
-        Configuration conf = new Configuration();
-        conf.set("fs.default.name", map.get(HDFSAdapterFactory.KEY_HDFS_URL).trim());
-        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
-        return FileSystem.get(conf);
-    }
-
-    public static JobSpecification buildFilesIndexReplicationJobSpec(Dataset dataset,
-            ArrayList<ExternalFile> externalFilesSnapshot, AqlMetadataProvider metadataProvider, boolean createIndex)
-            throws MetadataException, AlgebricksException {
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
-        AsterixStorageProperties storageProperties = asterixPropertiesProvider.getStorageProperties();
-        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
-                metadataProvider.getMetadataTxnContext());
-        ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
-        Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider
-                .splitProviderAndPartitionConstraintsForFilesIndex(dataset.getDataverseName(),
-                        dataset.getDatasetName(), getFilesIndexName(dataset.getDatasetName()), true);
-        IFileSplitProvider secondaryFileSplitProvider = secondarySplitsAndConstraint.first;
-        ILocalResourceMetadata localResourceMetadata = new ExternalBTreeLocalResourceMetadata(
-                FilesIndexDescription.EXTERNAL_FILE_INDEX_TYPE_TRAITS,
-                FilesIndexDescription.FILES_INDEX_COMP_FACTORIES, new int[] { 0 }, false, dataset.getDatasetId(),
-                mergePolicyFactory, mergePolicyFactoryProperties);
-        PersistentLocalResourceFactoryProvider localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(
-                localResourceMetadata, LocalResource.ExternalBTreeResource);
-        ExternalBTreeDataflowHelperFactory indexDataflowHelperFactory = new ExternalBTreeDataflowHelperFactory(
-                mergePolicyFactory, mergePolicyFactoryProperties, new SecondaryIndexOperationTrackerProvider(
-                        dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                LSMBTreeIOOperationCallbackFactory.INSTANCE, storageProperties.getBloomFilterFalsePositiveRate(),
-                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true);
-        ExternalFilesIndexOperatorDescriptor externalFilesOp = new ExternalFilesIndexOperatorDescriptor(spec,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                secondaryFileSplitProvider, indexDataflowHelperFactory, localResourceFactoryProvider,
-                externalFilesSnapshot, createIndex);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, externalFilesOp,
-                secondarySplitsAndConstraint.second);
-        spec.addRoot(externalFilesOp);
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-        return spec;
-    }
-
-    /**
-     * This method create an indexing operator that index records in HDFS
-     * 
-     * @param jobSpec
-     * @param itemType
-     * @param dataset
-     * @param format
-     * @param files
-     * @param indexerDesc
-     * @return
-     * @throws Exception
-     */
-    private static Pair<ExternalDataScanOperatorDescriptor, AlgebricksPartitionConstraint> getExternalDataIndexingOperator(
-            JobSpecification jobSpec, IAType itemType, Dataset dataset, List<ExternalFile> files,
-            RecordDescriptor indexerDesc, AqlMetadataProvider metadataProvider) throws Exception {
-        HDFSIndexingAdapterFactory adapterFactory = new HDFSIndexingAdapterFactory();
-        adapterFactory.setFiles(files);
-        adapterFactory.configure(((ExternalDatasetDetails) dataset.getDatasetDetails()).getProperties(),
-                (ARecordType) itemType);
-        return new Pair<ExternalDataScanOperatorDescriptor, AlgebricksPartitionConstraint>(
-                new ExternalDataScanOperatorDescriptor(jobSpec, indexerDesc, adapterFactory),
-                adapterFactory.getPartitionConstraint());
-    }
-
-    public static Pair<ExternalDataScanOperatorDescriptor, AlgebricksPartitionConstraint> createExternalIndexingOp(
-            JobSpecification spec, AqlMetadataProvider metadataProvider, Dataset dataset, ARecordType itemType,
-            RecordDescriptor indexerDesc, List<ExternalFile> files) throws Exception {
-        if (files == null) {
-            files = MetadataManager.INSTANCE.getDatasetExternalFiles(metadataProvider.getMetadataTxnContext(), dataset);
-        }
-        return getExternalDataIndexingOperator(spec, itemType, dataset, files, indexerDesc, metadataProvider);
-    }
-
-    /**
-     * At the end of this method, we expect to have 4 sets as follows:
-     * metadataFiles should contain only the files that are appended in their original state
-     * addedFiles should contain new files that has number assigned starting after the max original file number
-     * deleteedFiles should contain files that are no longer there in the file system
-     * appendedFiles should have the new file information of existing files
-     * The method should return false in case of zero delta
-     * 
-     * @param dataset
-     * @param metadataFiles
-     * @param addedFiles
-     * @param deletedFiles
-     * @param appendedFiles
-     * @return
-     * @throws MetadataException
-     * @throws AlgebricksException
-     */
-    public static boolean isDatasetUptodate(Dataset dataset, List<ExternalFile> metadataFiles,
-            List<ExternalFile> addedFiles, List<ExternalFile> deletedFiles, List<ExternalFile> appendedFiles)
-            throws MetadataException, AlgebricksException {
-        boolean uptodate = true;
-        int newFileNumber = metadataFiles.get(metadataFiles.size() - 1).getFileNumber() + 1;
-
-        ArrayList<ExternalFile> fileSystemFiles = getSnapshotFromExternalFileSystem(dataset);
-
-        // Loop over file system files < taking care of added files >
-        for (ExternalFile fileSystemFile : fileSystemFiles) {
-            boolean fileFound = false;
-            Iterator<ExternalFile> mdFilesIterator = metadataFiles.iterator();
-            while (mdFilesIterator.hasNext()) {
-                ExternalFile metadataFile = mdFilesIterator.next();
-                if (fileSystemFile.getFileName().equals(metadataFile.getFileName())) {
-                    // Same file name
-                    if (fileSystemFile.getLastModefiedTime().equals(metadataFile.getLastModefiedTime())) {
-                        // Same timestamp
-                        if (fileSystemFile.getSize() == metadataFile.getSize()) {
-                            // Same size -> no op
-                            mdFilesIterator.remove();
-                            fileFound = true;
-                        } else {
-                            // Different size -> append op
-                            metadataFile.setPendingOp(ExternalFilePendingOp.PENDING_APPEND_OP);
-                            fileSystemFile.setPendingOp(ExternalFilePendingOp.PENDING_APPEND_OP);
-                            appendedFiles.add(fileSystemFile);
-                            fileFound = true;
-                            uptodate = false;
-                        }
-                    } else {
-                        // Same file name, Different file mod date -> delete and add
-                        metadataFile.setPendingOp(ExternalFilePendingOp.PENDING_DROP_OP);
-                        deletedFiles.add(new ExternalFile(metadataFile.getDataverseName(), metadataFile
-                                .getDatasetName(), 0, metadataFile.getFileName(), metadataFile.getLastModefiedTime(),
-                                metadataFile.getSize(), ExternalFilePendingOp.PENDING_DROP_OP));
-                        fileSystemFile.setPendingOp(ExternalFilePendingOp.PENDING_ADD_OP);
-                        fileSystemFile.setFileNumber(newFileNumber);
-                        addedFiles.add(fileSystemFile);
-                        newFileNumber++;
-                        fileFound = true;
-                        uptodate = false;
-                    }
-                }
-                if (fileFound)
-                    break;
-            }
-            if (!fileFound) {
-                // File not stored previously in metadata -> pending add op
-                fileSystemFile.setPendingOp(ExternalFilePendingOp.PENDING_ADD_OP);
-                fileSystemFile.setFileNumber(newFileNumber);
-                addedFiles.add(fileSystemFile);
-                newFileNumber++;
-                uptodate = false;
-            }
-        }
-
-        // Done with files from external file system -> metadata files now contain both deleted files and appended ones
-        // first, correct number assignment to deleted and updated files
-        for (ExternalFile deletedFile : deletedFiles) {
-            deletedFile.setFileNumber(newFileNumber);
-            newFileNumber++;
-        }
-        for (ExternalFile appendedFile : appendedFiles) {
-            appendedFile.setFileNumber(newFileNumber);
-            newFileNumber++;
-        }
-
-        // include the remaining deleted files
-        Iterator<ExternalFile> mdFilesIterator = metadataFiles.iterator();
-        while (mdFilesIterator.hasNext()) {
-            ExternalFile metadataFile = mdFilesIterator.next();
-            if (metadataFile.getPendingOp() == ExternalFilePendingOp.PENDING_NO_OP) {
-                metadataFile.setPendingOp(ExternalFilePendingOp.PENDING_DROP_OP);
-                deletedFiles.add(new ExternalFile(metadataFile.getDataverseName(), metadataFile.getDatasetName(),
-                        newFileNumber, metadataFile.getFileName(), metadataFile.getLastModefiedTime(), metadataFile
-                                .getSize(), metadataFile.getPendingOp()));
-                newFileNumber++;
-                uptodate = false;
-            }
-        }
-        return uptodate;
-    }
-
-    public static Dataset createTransactionDataset(Dataset dataset) {
-        ExternalDatasetDetails originalDsd = (ExternalDatasetDetails) dataset.getDatasetDetails();
-        ExternalDatasetDetails dsd = new ExternalDatasetDetails(originalDsd.getAdapter(), originalDsd.getProperties(),
-                originalDsd.getTimestamp(), ExternalDatasetTransactionState.BEGIN);
-        Dataset transactionDatset = new Dataset(dataset.getDataverseName(), dataset.getDatasetName(),
-                dataset.getItemTypeName(), dataset.getNodeGroupName(), dataset.getCompactionPolicy(),
-                dataset.getCompactionPolicyProperties(), dsd, dataset.getHints(), DatasetType.EXTERNAL,
-                dataset.getDatasetId(), dataset.getPendingOp());
-        return transactionDatset;
-    }
-
-    public static boolean isFileIndex(Index index) {
-        return (index.getIndexName().equals(getFilesIndexName(index.getDatasetName())));
-    }
-
-    public static JobSpecification buildDropFilesIndexJobSpec(CompiledIndexDropStatement indexDropStmt,
-            AqlMetadataProvider metadataProvider, Dataset dataset) throws AlgebricksException, MetadataException {
-        String dataverseName = indexDropStmt.getDataverseName() == null ? metadataProvider.getDefaultDataverseName()
-                : indexDropStmt.getDataverseName();
-        String datasetName = indexDropStmt.getDatasetName();
-        String indexName = indexDropStmt.getIndexName();
-        boolean temp = dataset.getDatasetDetails().isTemp();
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider
-                .splitProviderAndPartitionConstraintsForFilesIndex(dataverseName, datasetName, indexName, true);
-        AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
-        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
-                metadataProvider.getMetadataTxnContext());
-        IndexDropOperatorDescriptor btreeDrop = new IndexDropOperatorDescriptor(spec,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                splitsAndConstraint.first, new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
-                        dataset.getDatasetId()), compactionInfo.first, compactionInfo.second,
-                        new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
-                        storageProperties.getBloomFilterFalsePositiveRate(), false, null, null, null, null, !temp));
-        AlgebricksPartitionConstraintHelper
-                .setPartitionConstraintInJobSpec(spec, btreeDrop, splitsAndConstraint.second);
-        spec.addRoot(btreeDrop);
-
-        return spec;
-    }
-
-    public static JobSpecification buildFilesIndexUpdateOp(Dataset ds, List<ExternalFile> metadataFiles,
-            List<ExternalFile> deletedFiles, List<ExternalFile> addedFiles, List<ExternalFile> appendedFiles,
-            AqlMetadataProvider metadataProvider) throws MetadataException, AlgebricksException {
-        ArrayList<ExternalFile> files = new ArrayList<ExternalFile>();
-        for (ExternalFile file : metadataFiles) {
-            if (file.getPendingOp() == ExternalFilePendingOp.PENDING_DROP_OP)
-                files.add(file);
-            else if (file.getPendingOp() == ExternalFilePendingOp.PENDING_APPEND_OP) {
-                for (ExternalFile appendedFile : appendedFiles) {
-                    if (appendedFile.getFileName().equals(file.getFileName())) {
-                        files.add(new ExternalFile(file.getDataverseName(), file.getDatasetName(),
-                                file.getFileNumber(), file.getFileName(), file.getLastModefiedTime(), appendedFile
-                                        .getSize(), ExternalFilePendingOp.PENDING_NO_OP));
-                    }
-                }
-            }
-        }
-        for (ExternalFile file : addedFiles) {
-            files.add(file);
-        }
-        Collections.sort(files);
-        return buildFilesIndexReplicationJobSpec(ds, files, metadataProvider, false);
-    }
-
-    public static JobSpecification buildIndexUpdateOp(Dataset ds, Index index, List<ExternalFile> metadataFiles,
-            List<ExternalFile> deletedFiles, List<ExternalFile> addedFiles, List<ExternalFile> appendedFiles,
-            AqlMetadataProvider metadataProvider) throws AsterixException, AlgebricksException {
-        // Create files list
-        ArrayList<ExternalFile> files = new ArrayList<ExternalFile>();
-
-        for (ExternalFile metadataFile : metadataFiles) {
-            if (metadataFile.getPendingOp() != ExternalFilePendingOp.PENDING_APPEND_OP) {
-                files.add(metadataFile);
-            } else {
-                metadataFile.setPendingOp(ExternalFilePendingOp.PENDING_NO_OP);
-                files.add(metadataFile);
-            }
-        }
-        // add new files
-        for (ExternalFile file : addedFiles) {
-            files.add(file);
-        }
-        // add appended files
-        for (ExternalFile file : appendedFiles) {
-            files.add(file);
-        }
-
-        CompiledCreateIndexStatement ccis = new CompiledCreateIndexStatement(index.getIndexName(),
-                index.getDataverseName(), index.getDatasetName(), index.getKeyFieldNames(), index.getKeyFieldTypes(),
-                index.isEnforcingKeyFileds(), index.getGramLength(), index.getIndexType());
-        return IndexOperations.buildSecondaryIndexLoadingJobSpec(ccis, null, null, metadataProvider, files);
-    }
-
-    public static JobSpecification buildCommitJob(Dataset ds, List<Index> indexes, AqlMetadataProvider metadataProvider)
-            throws AlgebricksException, AsterixException {
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
-        AsterixStorageProperties storageProperties = asterixPropertiesProvider.getStorageProperties();
-        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(ds,
-                metadataProvider.getMetadataTxnContext());
-        boolean temp = ds.getDatasetDetails().isTemp();
-        ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
-        Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint = metadataProvider
-                .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
-                        getFilesIndexName(ds.getDatasetName()), temp);
-        IFileSplitProvider filesIndexSplitProvider = filesIndexSplitsAndConstraint.first;
-        ExternalBTreeDataflowHelperFactory filesIndexDataflowHelperFactory = getFilesIndexDataflowHelperFactory(ds,
-                mergePolicyFactory, mergePolicyFactoryProperties, storageProperties, spec);
-        IndexInfoOperatorDescriptor filesIndexInfo = new IndexInfoOperatorDescriptor(filesIndexSplitProvider,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER);
-
-        ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory> btreeDataflowHelperFactories = new ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory>();
-        ArrayList<IndexInfoOperatorDescriptor> btreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
-        ArrayList<ExternalRTreeDataflowHelperFactory> rtreeDataflowHelperFactories = new ArrayList<ExternalRTreeDataflowHelperFactory>();
-        ArrayList<IndexInfoOperatorDescriptor> rtreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
-
-        for (Index index : indexes) {
-            if (isValidIndexName(index.getDatasetName(), index.getIndexName())) {
-                Pair<IFileSplitProvider, AlgebricksPartitionConstraint> indexSplitsAndConstraint = metadataProvider
-                        .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
-                                index.getIndexName(), temp);
-                if (index.getIndexType() == IndexType.BTREE) {
-                    btreeDataflowHelperFactories.add(getBTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
-                            mergePolicyFactoryProperties, storageProperties, spec));
-                    btreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
-                } else if (index.getIndexType() == IndexType.RTREE) {
-                    rtreeDataflowHelperFactories.add(getRTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
-                            mergePolicyFactoryProperties, storageProperties, metadataProvider, spec));
-                    rtreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
-                }
-            }
-        }
-
-        ExternalDatasetIndexesCommitOperatorDescriptor op = new ExternalDatasetIndexesCommitOperatorDescriptor(spec,
-                filesIndexDataflowHelperFactory, filesIndexInfo, btreeDataflowHelperFactories, btreeInfos,
-                rtreeDataflowHelperFactories, rtreeInfos);
-
-        spec.addRoot(op);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op,
-                filesIndexSplitsAndConstraint.second);
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-        return spec;
-    }
-
-    private static ExternalBTreeDataflowHelperFactory getFilesIndexDataflowHelperFactory(Dataset ds,
-            ILSMMergePolicyFactory mergePolicyFactory, Map<String, String> mergePolicyFactoryProperties,
-            AsterixStorageProperties storageProperties, JobSpecification spec) {
-        return new ExternalBTreeDataflowHelperFactory(mergePolicyFactory, mergePolicyFactoryProperties,
-                new SecondaryIndexOperationTrackerProvider(ds.getDatasetId()),
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
-                storageProperties.getBloomFilterFalsePositiveRate(),
-                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(ds), true);
-    }
-
-    private static ExternalBTreeWithBuddyDataflowHelperFactory getBTreeDataflowHelperFactory(Dataset ds, Index index,
-            ILSMMergePolicyFactory mergePolicyFactory, Map<String, String> mergePolicyFactoryProperties,
-            AsterixStorageProperties storageProperties, JobSpecification spec) {
-        return new ExternalBTreeWithBuddyDataflowHelperFactory(mergePolicyFactory, mergePolicyFactoryProperties,
-                new SecondaryIndexOperationTrackerProvider(ds.getDatasetId()),
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                LSMBTreeWithBuddyIOOperationCallbackFactory.INSTANCE,
-                storageProperties.getBloomFilterFalsePositiveRate(), new int[] { index.getKeyFieldNames().size() },
-                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(ds), true);
-    }
-
-    @SuppressWarnings("rawtypes")
-    private static ExternalRTreeDataflowHelperFactory getRTreeDataflowHelperFactory(Dataset ds, Index index,
-            ILSMMergePolicyFactory mergePolicyFactory, Map<String, String> mergePolicyFactoryProperties,
-            AsterixStorageProperties storageProperties, AqlMetadataProvider metadataProvider, JobSpecification spec)
-            throws AlgebricksException, AsterixException {
-        int numPrimaryKeys = getRIDSize(ds);
-        List<List<String>> secondaryKeyFields = index.getKeyFieldNames();
-        secondaryKeyFields.size();
-        ARecordType itemType = (ARecordType) metadataProvider.findType(ds.getDataverseName(), ds.getItemTypeName());
-        Pair<IAType, Boolean> spatialTypePair = Index.getNonNullableKeyFieldType(secondaryKeyFields.get(0), itemType);
-        IAType spatialType = spatialTypePair.first;
-        if (spatialType == null) {
-            throw new AsterixException("Could not find field " + secondaryKeyFields.get(0) + " in the schema.");
-        }
-        int numDimensions = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
-        int numNestedSecondaryKeyFields = numDimensions * 2;
-        IPrimitiveValueProviderFactory[] valueProviderFactories = new IPrimitiveValueProviderFactory[numNestedSecondaryKeyFields];
-        IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[numNestedSecondaryKeyFields];
-
-        ISerializerDeserializer[] secondaryRecFields = new ISerializerDeserializer[numPrimaryKeys
-                + numNestedSecondaryKeyFields];
-        ITypeTraits[] secondaryTypeTraits = new ITypeTraits[numNestedSecondaryKeyFields + numPrimaryKeys];
-        IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(spatialType.getTypeTag());
-        ATypeTag keyType = nestedKeyType.getTypeTag();
-
-        keyType = nestedKeyType.getTypeTag();
-        for (int i = 0; i < numNestedSecondaryKeyFields; i++) {
-            ISerializerDeserializer keySerde = AqlSerializerDeserializerProvider.INSTANCE
-                    .getSerializerDeserializer(nestedKeyType);
-            secondaryRecFields[i] = keySerde;
-
-            secondaryComparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(
-                    nestedKeyType, true);
-            secondaryTypeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(nestedKeyType);
-            valueProviderFactories[i] = AqlPrimitiveValueProviderFactory.INSTANCE;
-        }
-        // Add serializers and comparators for primary index fields.
-        for (int i = 0; i < numPrimaryKeys; i++) {
-            secondaryRecFields[numNestedSecondaryKeyFields + i] = IndexingConstants.getSerializerDeserializer(i);
-            secondaryTypeTraits[numNestedSecondaryKeyFields + i] = IndexingConstants.getTypeTraits(i);
-        }
-        int[] primaryKeyFields = new int[numPrimaryKeys];
-        for (int i = 0; i < primaryKeyFields.length; i++) {
-            primaryKeyFields[i] = i + numNestedSecondaryKeyFields;
-        }
-
-        return new ExternalRTreeDataflowHelperFactory(valueProviderFactories, RTreePolicyType.RTREE,
-                getBuddyBtreeComparatorFactories(), mergePolicyFactory, mergePolicyFactoryProperties,
-                new SecondaryIndexOperationTrackerProvider(ds.getDatasetId()),
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMRTreeIOOperationCallbackFactory.INSTANCE,
-                AqlMetadataProvider.proposeLinearizer(keyType, secondaryComparatorFactories.length),
-                storageProperties.getBloomFilterFalsePositiveRate(), new int[] { index.getKeyFieldNames().size() },
-                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(ds), true);
-    }
-
-    public static JobSpecification buildAbortOp(Dataset ds, List<Index> indexes, AqlMetadataProvider metadataProvider)
-            throws AlgebricksException, AsterixException {
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
-        AsterixStorageProperties storageProperties = asterixPropertiesProvider.getStorageProperties();
-        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(ds,
-                metadataProvider.getMetadataTxnContext());
-        ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
-        Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
-
-        boolean temp = ds.getDatasetDetails().isTemp();
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint = metadataProvider
-                .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
-                        getFilesIndexName(ds.getDatasetName()), temp);
-        IFileSplitProvider filesIndexSplitProvider = filesIndexSplitsAndConstraint.first;
-        ExternalBTreeDataflowHelperFactory filesIndexDataflowHelperFactory = getFilesIndexDataflowHelperFactory(ds,
-                mergePolicyFactory, mergePolicyFactoryProperties, storageProperties, spec);
-        IndexInfoOperatorDescriptor filesIndexInfo = new IndexInfoOperatorDescriptor(filesIndexSplitProvider,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER);
-
-        ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory> btreeDataflowHelperFactories = new ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory>();
-        ArrayList<IndexInfoOperatorDescriptor> btreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
-        ArrayList<ExternalRTreeDataflowHelperFactory> rtreeDataflowHelperFactories = new ArrayList<ExternalRTreeDataflowHelperFactory>();
-        ArrayList<IndexInfoOperatorDescriptor> rtreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
-
-        for (Index index : indexes) {
-            if (isValidIndexName(index.getDatasetName(), index.getIndexName())) {
-                Pair<IFileSplitProvider, AlgebricksPartitionConstraint> indexSplitsAndConstraint = metadataProvider
-                        .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
-                                index.getIndexName(), temp);
-                if (index.getIndexType() == IndexType.BTREE) {
-                    btreeDataflowHelperFactories.add(getBTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
-                            mergePolicyFactoryProperties, storageProperties, spec));
-                    btreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
-                } else if (index.getIndexType() == IndexType.RTREE) {
-                    rtreeDataflowHelperFactories.add(getRTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
-                            mergePolicyFactoryProperties, storageProperties, metadataProvider, spec));
-                    rtreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
-                }
-            }
-        }
-
-        ExternalDatasetIndexesAbortOperatorDescriptor op = new ExternalDatasetIndexesAbortOperatorDescriptor(spec,
-                filesIndexDataflowHelperFactory, filesIndexInfo, btreeDataflowHelperFactories, btreeInfos,
-                rtreeDataflowHelperFactories, rtreeInfos);
-
-        spec.addRoot(op);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op,
-                filesIndexSplitsAndConstraint.second);
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-        return spec;
-
-    }
-
-    public static JobSpecification buildRecoverOp(Dataset ds, List<Index> indexes, AqlMetadataProvider metadataProvider)
-            throws AlgebricksException, AsterixException {
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
-        AsterixStorageProperties storageProperties = asterixPropertiesProvider.getStorageProperties();
-        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(ds,
-                metadataProvider.getMetadataTxnContext());
-        ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
-        Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
-        boolean temp = ds.getDatasetDetails().isTemp();
-
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint = metadataProvider
-                .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
-                        getFilesIndexName(ds.getDatasetName()), temp);
-        IFileSplitProvider filesIndexSplitProvider = filesIndexSplitsAndConstraint.first;
-        ExternalBTreeDataflowHelperFactory filesIndexDataflowHelperFactory = getFilesIndexDataflowHelperFactory(ds,
-                mergePolicyFactory, mergePolicyFactoryProperties, storageProperties, spec);
-        IndexInfoOperatorDescriptor filesIndexInfo = new IndexInfoOperatorDescriptor(filesIndexSplitProvider,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER);
-
-        ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory> btreeDataflowHelperFactories = new ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory>();
-        ArrayList<IndexInfoOperatorDescriptor> btreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
-        ArrayList<ExternalRTreeDataflowHelperFactory> rtreeDataflowHelperFactories = new ArrayList<ExternalRTreeDataflowHelperFactory>();
-        ArrayList<IndexInfoOperatorDescriptor> rtreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
-
-        for (Index index : indexes) {
-            if (isValidIndexName(index.getDatasetName(), index.getIndexName())) {
-                Pair<IFileSplitProvider, AlgebricksPartitionConstraint> indexSplitsAndConstraint = metadataProvider
-                        .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
-                                index.getIndexName(), temp);
-                if (index.getIndexType() == IndexType.BTREE) {
-                    btreeDataflowHelperFactories.add(getBTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
-                            mergePolicyFactoryProperties, storageProperties, spec));
-                    btreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
-                } else if (index.getIndexType() == IndexType.RTREE) {
-                    rtreeDataflowHelperFactories.add(getRTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
-                            mergePolicyFactoryProperties, storageProperties, metadataProvider, spec));
-                    rtreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
-                }
-            }
-        }
-
-        ExternalDatasetIndexesRecoverOperatorDescriptor op = new ExternalDatasetIndexesRecoverOperatorDescriptor(spec,
-                filesIndexDataflowHelperFactory, filesIndexInfo, btreeDataflowHelperFactories, btreeInfos,
-                rtreeDataflowHelperFactories, rtreeInfos);
-
-        spec.addRoot(op);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op,
-                filesIndexSplitsAndConstraint.second);
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-        return spec;
-    }
-
-    public static JobSpecification compactFilesIndexJobSpec(Dataset dataset, AqlMetadataProvider metadataProvider)
-            throws MetadataException, AlgebricksException {
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
-        AsterixStorageProperties storageProperties = asterixPropertiesProvider.getStorageProperties();
-        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
-                metadataProvider.getMetadataTxnContext());
-        ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
-        Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider
-                .splitProviderAndPartitionConstraintsForFilesIndex(dataset.getDataverseName(),
-                        dataset.getDatasetName(), getFilesIndexName(dataset.getDatasetName()), true);
-        IFileSplitProvider secondaryFileSplitProvider = secondarySplitsAndConstraint.first;
-        ExternalBTreeDataflowHelperFactory indexDataflowHelperFactory = new ExternalBTreeDataflowHelperFactory(
-                mergePolicyFactory, mergePolicyFactoryProperties, new SecondaryIndexOperationTrackerProvider(
-                        dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                LSMBTreeIOOperationCallbackFactory.INSTANCE, storageProperties.getBloomFilterFalsePositiveRate(),
-                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true);
-        LSMTreeIndexCompactOperatorDescriptor compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                secondaryFileSplitProvider, FilesIndexDescription.EXTERNAL_FILE_INDEX_TYPE_TRAITS,
-                FilesIndexDescription.FILES_INDEX_COMP_FACTORIES, new int[] { 0 }, indexDataflowHelperFactory,
-                NoOpOperationCallbackFactory.INSTANCE);
-        spec.addRoot(compactOp);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp,
-                secondarySplitsAndConstraint.second);
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-        return spec;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/file/FeedOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/FeedOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/FeedOperations.java
deleted file mode 100644
index 379a4f0..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/FeedOperations.java
+++ /dev/null
@@ -1,251 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.file;
-
-import java.util.Collection;
-import java.util.List;
-
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.feeds.FeedConnectJobInfo;
-import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
-import edu.uci.ics.asterix.common.feeds.FeedConstants;
-import edu.uci.ics.asterix.common.feeds.FeedId;
-import edu.uci.ics.asterix.common.feeds.FeedPolicyAccessor;
-import edu.uci.ics.asterix.common.feeds.FeedTupleCommitResponseMessage;
-import edu.uci.ics.asterix.common.feeds.api.IFeedJoint;
-import edu.uci.ics.asterix.common.feeds.api.IFeedMessage;
-import edu.uci.ics.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import edu.uci.ics.asterix.common.feeds.message.EndFeedMessage;
-import edu.uci.ics.asterix.common.feeds.message.ThrottlingEnabledFeedMessage;
-import edu.uci.ics.asterix.feeds.FeedLifecycleListener;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.entities.PrimaryFeed;
-import edu.uci.ics.asterix.metadata.feeds.FeedMessageOperatorDescriptor;
-import edu.uci.ics.asterix.metadata.feeds.IFeedAdapterFactory;
-import edu.uci.ics.asterix.metadata.feeds.PrepareStallMessage;
-import edu.uci.ics.asterix.metadata.feeds.TerminateDataFlowMessage;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
-
-/**
- * Provides helper method(s) for creating JobSpec for operations on a feed.
- */
-public class FeedOperations {
-
-    /**
-     * Builds the job spec for ingesting a (primary) feed from its external source via the feed adaptor.
-     * 
-     * @param primaryFeed
-     * @param metadataProvider
-     * @return JobSpecification the Hyracks job specification for receiving data from external source
-     * @throws Exception
-     */
-    public static Pair<JobSpecification, IFeedAdapterFactory> buildFeedIntakeJobSpec(PrimaryFeed primaryFeed,
-            AqlMetadataProvider metadataProvider, FeedPolicyAccessor policyAccessor) throws Exception {
-
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        spec.setFrameSize(FeedConstants.JobConstants.DEFAULT_FRAME_SIZE);
-        IFeedAdapterFactory adapterFactory = null;
-        IOperatorDescriptor feedIngestor;
-        AlgebricksPartitionConstraint ingesterPc;
-
-        try {
-            Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, IFeedAdapterFactory> t = metadataProvider
-                    .buildFeedIntakeRuntime(spec, primaryFeed, policyAccessor);
-            feedIngestor = t.first;
-            ingesterPc = t.second;
-            adapterFactory = t.third;
-        } catch (AlgebricksException e) {
-            e.printStackTrace();
-            throw new AsterixException(e);
-        }
-
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, feedIngestor, ingesterPc);
-
-        NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, nullSink, ingesterPc);
-        spec.connect(new OneToOneConnectorDescriptor(spec), feedIngestor, 0, nullSink, 0);
-        spec.addRoot(nullSink);
-        return new Pair<JobSpecification, IFeedAdapterFactory>(spec, adapterFactory);
-    }
-
-    public static JobSpecification buildDiscontinueFeedSourceSpec(AqlMetadataProvider metadataProvider, FeedId feedId)
-            throws AsterixException, AlgebricksException {
-
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        IOperatorDescriptor feedMessenger = null;
-        AlgebricksPartitionConstraint messengerPc = null;
-
-        List<String> locations = FeedLifecycleListener.INSTANCE.getIntakeLocations(feedId);
-        Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = buildDiscontinueFeedMessengerRuntime(spec, feedId,
-                locations);
-
-        feedMessenger = p.first;
-        messengerPc = p.second;
-
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, feedMessenger, messengerPc);
-        NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, nullSink, messengerPc);
-        spec.connect(new OneToOneConnectorDescriptor(spec), feedMessenger, 0, nullSink, 0);
-        spec.addRoot(nullSink);
-
-        return spec;
-    }
-
-    /**
-     * Builds the job spec for sending message to an active feed to disconnect it from the
-     * its source.
-     */
-    public static Pair<JobSpecification, Boolean> buildDisconnectFeedJobSpec(AqlMetadataProvider metadataProvider,
-            FeedConnectionId connectionId) throws AsterixException, AlgebricksException {
-
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        IOperatorDescriptor feedMessenger;
-        AlgebricksPartitionConstraint messengerPc;
-        List<String> locations = null;
-        FeedRuntimeType sourceRuntimeType;
-        try {
-            FeedConnectJobInfo cInfo = FeedLifecycleListener.INSTANCE.getFeedConnectJobInfo(connectionId);
-            IFeedJoint sourceFeedJoint = cInfo.getSourceFeedJoint();
-            IFeedJoint computeFeedJoint = cInfo.getComputeFeedJoint();
-
-            boolean terminateIntakeJob = false;
-            boolean completeDisconnect = computeFeedJoint == null || computeFeedJoint.getReceivers().isEmpty();
-            if (completeDisconnect) {
-                sourceRuntimeType = FeedRuntimeType.INTAKE;
-                locations = cInfo.getCollectLocations();
-                terminateIntakeJob = sourceFeedJoint.getReceivers().size() == 1;
-            } else {
-                locations = cInfo.getComputeLocations();
-                sourceRuntimeType = FeedRuntimeType.COMPUTE;
-            }
-
-            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = buildDisconnectFeedMessengerRuntime(spec,
-                    connectionId, locations, sourceRuntimeType, completeDisconnect, sourceFeedJoint.getOwnerFeedId());
-
-            feedMessenger = p.first;
-            messengerPc = p.second;
-
-            AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, feedMessenger, messengerPc);
-            NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
-            AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, nullSink, messengerPc);
-            spec.connect(new OneToOneConnectorDescriptor(spec), feedMessenger, 0, nullSink, 0);
-            spec.addRoot(nullSink);
-            return new Pair<JobSpecification, Boolean>(spec, terminateIntakeJob);
-
-        } catch (AlgebricksException e) {
-            throw new AsterixException(e);
-        }
-
-    }
-
-    public static JobSpecification buildPrepareStallMessageJob(PrepareStallMessage stallMessage,
-            Collection<String> collectLocations) throws AsterixException {
-        JobSpecification messageJobSpec = JobSpecificationUtils.createJobSpecification();
-        try {
-            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = FeedOperations.buildSendFeedMessageRuntime(
-                    messageJobSpec, stallMessage.getConnectionId(), stallMessage, collectLocations);
-            buildSendFeedMessageJobSpec(p.first, p.second, messageJobSpec);
-        } catch (AlgebricksException ae) {
-            throw new AsterixException(ae);
-        }
-        return messageJobSpec;
-    }
-
-    public static JobSpecification buildNotifyThrottlingEnabledMessageJob(
-            ThrottlingEnabledFeedMessage throttlingEnabledMesg, Collection<String> locations) throws AsterixException {
-        JobSpecification messageJobSpec = JobSpecificationUtils.createJobSpecification();
-        try {
-            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = FeedOperations.buildSendFeedMessageRuntime(
-                    messageJobSpec, throttlingEnabledMesg.getConnectionId(), throttlingEnabledMesg, locations);
-            buildSendFeedMessageJobSpec(p.first, p.second, messageJobSpec);
-        } catch (AlgebricksException ae) {
-            throw new AsterixException(ae);
-        }
-        return messageJobSpec;
-    }
-
-    public static JobSpecification buildTerminateFlowMessageJob(TerminateDataFlowMessage terminateMessage,
-            List<String> collectLocations) throws AsterixException {
-        JobSpecification messageJobSpec = JobSpecificationUtils.createJobSpecification();
-        try {
-            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = FeedOperations.buildSendFeedMessageRuntime(
-                    messageJobSpec, terminateMessage.getConnectionId(), terminateMessage, collectLocations);
-            buildSendFeedMessageJobSpec(p.first, p.second, messageJobSpec);
-        } catch (AlgebricksException ae) {
-            throw new AsterixException(ae);
-        }
-        return messageJobSpec;
-    }
-
-    public static JobSpecification buildCommitAckResponseJob(FeedTupleCommitResponseMessage commitResponseMessage,
-            Collection<String> targetLocations) throws AsterixException {
-        JobSpecification messageJobSpec = JobSpecificationUtils.createJobSpecification();
-        try {
-            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = FeedOperations.buildSendFeedMessageRuntime(
-                    messageJobSpec, commitResponseMessage.getConnectionId(), commitResponseMessage, targetLocations);
-            buildSendFeedMessageJobSpec(p.first, p.second, messageJobSpec);
-        } catch (AlgebricksException ae) {
-            throw new AsterixException(ae);
-        }
-        return messageJobSpec;
-    }
-
-    public static Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildDiscontinueFeedMessengerRuntime(
-            JobSpecification jobSpec, FeedId feedId, List<String> locations) throws AlgebricksException {
-        FeedConnectionId feedConnectionId = new FeedConnectionId(feedId, null);
-        IFeedMessage feedMessage = new EndFeedMessage(feedConnectionId, FeedRuntimeType.INTAKE,
-                feedConnectionId.getFeedId(), true, EndFeedMessage.EndMessageType.DISCONTINUE_SOURCE);
-        return buildSendFeedMessageRuntime(jobSpec, feedConnectionId, feedMessage, locations);
-    }
-
-    private static Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildSendFeedMessageRuntime(
-            JobSpecification jobSpec, FeedConnectionId feedConenctionId, IFeedMessage feedMessage,
-            Collection<String> locations) throws AlgebricksException {
-        AlgebricksPartitionConstraint partitionConstraint = new AlgebricksAbsolutePartitionConstraint(
-                locations.toArray(new String[] {}));
-        FeedMessageOperatorDescriptor feedMessenger = new FeedMessageOperatorDescriptor(jobSpec, feedConenctionId,
-                feedMessage);
-        return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(feedMessenger, partitionConstraint);
-    }
-
-    private static JobSpecification buildSendFeedMessageJobSpec(IOperatorDescriptor operatorDescriptor,
-            AlgebricksPartitionConstraint messengerPc, JobSpecification messageJobSpec) {
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(messageJobSpec, operatorDescriptor,
-                messengerPc);
-        NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(messageJobSpec);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(messageJobSpec, nullSink, messengerPc);
-        messageJobSpec.connect(new OneToOneConnectorDescriptor(messageJobSpec), operatorDescriptor, 0, nullSink, 0);
-        messageJobSpec.addRoot(nullSink);
-        return messageJobSpec;
-    }
-
-    private static Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildDisconnectFeedMessengerRuntime(
-            JobSpecification jobSpec, FeedConnectionId feedConenctionId, List<String> locations,
-            FeedRuntimeType sourceFeedRuntimeType, boolean completeDisconnection, FeedId sourceFeedId)
-            throws AlgebricksException {
-        IFeedMessage feedMessage = new EndFeedMessage(feedConenctionId, sourceFeedRuntimeType, sourceFeedId,
-                completeDisconnection, EndFeedMessage.EndMessageType.DISCONNECT_FEED);
-        return buildSendFeedMessageRuntime(jobSpec, feedConenctionId, feedMessage, locations);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java
deleted file mode 100644
index 0642151..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.file;
-
-import java.util.List;
-import java.util.Map;
-
-import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
-import edu.uci.ics.asterix.common.config.OptimizationConfUtil;
-import edu.uci.ics.asterix.common.context.AsterixVirtualBufferCacheProvider;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
-import edu.uci.ics.asterix.metadata.MetadataException;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.ExternalFile;
-import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
-import edu.uci.ics.asterix.transaction.management.opcallbacks.SecondaryIndexOperationTrackerProvider;
-import edu.uci.ics.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
-import edu.uci.ics.asterix.translator.CompiledStatements.CompiledCreateIndexStatement;
-import edu.uci.ics.asterix.translator.CompiledStatements.CompiledIndexCompactStatement;
-import edu.uci.ics.asterix.translator.CompiledStatements.CompiledIndexDropStatement;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
-
-public class IndexOperations {
-
-    private static final PhysicalOptimizationConfig physicalOptimizationConfig = OptimizationConfUtil
-            .getPhysicalOptimizationConfig();
-
-    public static JobSpecification buildSecondaryIndexCreationJobSpec(CompiledCreateIndexStatement createIndexStmt,
-            ARecordType recType, ARecordType enforcedType, AqlMetadataProvider metadataProvider)
-            throws AsterixException, AlgebricksException {
-        SecondaryIndexOperationsHelper secondaryIndexHelper = SecondaryIndexOperationsHelper
-                .createIndexOperationsHelper(createIndexStmt.getIndexType(), createIndexStmt.getDataverseName(),
-                        createIndexStmt.getDatasetName(), createIndexStmt.getIndexName(),
-                        createIndexStmt.getKeyFields(), createIndexStmt.getKeyFieldTypes(),
-                        createIndexStmt.isEnforced(), createIndexStmt.getGramLength(), metadataProvider,
-                        physicalOptimizationConfig, recType, enforcedType);
-        return secondaryIndexHelper.buildCreationJobSpec();
-    }
-
-    public static JobSpecification buildSecondaryIndexLoadingJobSpec(CompiledCreateIndexStatement createIndexStmt,
-            ARecordType recType, ARecordType enforcedType, AqlMetadataProvider metadataProvider)
-            throws AsterixException, AlgebricksException {
-        SecondaryIndexOperationsHelper secondaryIndexHelper = SecondaryIndexOperationsHelper
-                .createIndexOperationsHelper(createIndexStmt.getIndexType(), createIndexStmt.getDataverseName(),
-                        createIndexStmt.getDatasetName(), createIndexStmt.getIndexName(),
-                        createIndexStmt.getKeyFields(), createIndexStmt.getKeyFieldTypes(),
-                        createIndexStmt.isEnforced(), createIndexStmt.getGramLength(), metadataProvider,
-                        physicalOptimizationConfig, recType, enforcedType);
-        return secondaryIndexHelper.buildLoadingJobSpec();
-    }
-
-    public static JobSpecification buildSecondaryIndexLoadingJobSpec(CompiledCreateIndexStatement createIndexStmt,
-            ARecordType recType, ARecordType enforcedType, AqlMetadataProvider metadataProvider,
-            List<ExternalFile> files) throws AsterixException, AlgebricksException {
-        SecondaryIndexOperationsHelper secondaryIndexHelper = SecondaryIndexOperationsHelper
-                .createIndexOperationsHelper(createIndexStmt.getIndexType(), createIndexStmt.getDataverseName(),
-                        createIndexStmt.getDatasetName(), createIndexStmt.getIndexName(),
-                        createIndexStmt.getKeyFields(), createIndexStmt.getKeyFieldTypes(),
-                        createIndexStmt.isEnforced(), createIndexStmt.getGramLength(), metadataProvider,
-                        physicalOptimizationConfig, recType, enforcedType);
-        secondaryIndexHelper.setExternalFiles(files);
-        return secondaryIndexHelper.buildLoadingJobSpec();
-    }
-
-    public static JobSpecification buildDropSecondaryIndexJobSpec(CompiledIndexDropStatement indexDropStmt,
-            AqlMetadataProvider metadataProvider, Dataset dataset) throws AlgebricksException, MetadataException {
-        String dataverseName = indexDropStmt.getDataverseName() == null ? metadataProvider.getDefaultDataverseName()
-                : indexDropStmt.getDataverseName();
-        String datasetName = indexDropStmt.getDatasetName();
-        String indexName = indexDropStmt.getIndexName();
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        boolean temp = dataset.getDatasetDetails().isTemp();
-
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider
-                .splitProviderAndPartitionConstraintsForDataset(dataverseName, datasetName, indexName, temp);
-        AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
-        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
-                metadataProvider.getMetadataTxnContext());
-
-        // The index drop operation should be persistent regardless of temp datasets or permanent dataset.
-        IndexDropOperatorDescriptor btreeDrop = new IndexDropOperatorDescriptor(spec,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                splitsAndConstraint.first, new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
-                        dataset.getDatasetId()), compactionInfo.first, compactionInfo.second,
-                        new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
-                        storageProperties.getBloomFilterFalsePositiveRate(), false, null, null, null, null, true));
-        AlgebricksPartitionConstraintHelper
-                .setPartitionConstraintInJobSpec(spec, btreeDrop, splitsAndConstraint.second);
-        spec.addRoot(btreeDrop);
-
-        return spec;
-    }
-
-    public static JobSpecification buildSecondaryIndexCompactJobSpec(CompiledIndexCompactStatement indexCompactStmt,
-            ARecordType recType, ARecordType enforcedType, AqlMetadataProvider metadataProvider, Dataset dataset)
-            throws AsterixException, AlgebricksException {
-        SecondaryIndexOperationsHelper secondaryIndexHelper = SecondaryIndexOperationsHelper
-                .createIndexOperationsHelper(indexCompactStmt.getIndexType(), indexCompactStmt.getDataverseName(),
-                        indexCompactStmt.getDatasetName(), indexCompactStmt.getIndexName(),
-                        indexCompactStmt.getKeyFields(), indexCompactStmt.getKeyTypes(), indexCompactStmt.isEnforced(),
-                        indexCompactStmt.getGramLength(), metadataProvider, physicalOptimizationConfig, recType,
-                        enforcedType);
-        return secondaryIndexHelper.buildCompactJobSpec();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/file/JobSpecificationUtils.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/JobSpecificationUtils.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/JobSpecificationUtils.java
deleted file mode 100644
index 5e76fdf..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/JobSpecificationUtils.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.file;
-
-import edu.uci.ics.asterix.common.config.AsterixCompilerProperties;
-import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-
-public class JobSpecificationUtils {
-    public static JobSpecification createJobSpecification() {
-        AsterixCompilerProperties compilerProperties = AsterixAppContextInfo.getInstance().getCompilerProperties();
-        int frameSize = compilerProperties.getFrameSize();
-        JobSpecification spec = new JobSpecification(frameSize);
-        return spec;
-    }
-}
\ No newline at end of file


[29/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveRedundantListifyRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveRedundantListifyRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveRedundantListifyRule.java
new file mode 100644
index 0000000..05c68f4
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveRedundantListifyRule.java
@@ -0,0 +1,254 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.ListSet;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.StatefulFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.UnpartitionedPropertyComputer;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/*
+ *
+ *  unnest $x [[ at $p ]] <- $y
+ *    aggregate $y <- function-call: listify@1(unresolved), Args:[$z]
+ *       Rest 
+ *   
+ * if $y is not used above these operators, 
+ * the plan fragment becomes
+ *
+ *  [[ runningaggregate $p <- tid]]
+ *  assign $x <- $z
+ *       Rest
+ *  
+ *
+ */
+
+public class RemoveRedundantListifyRule implements IAlgebraicRewriteRule {
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        // apply it only at the top of the plan
+        ILogicalOperator op = opRef.getValue();
+        if (context.checkIfInDontApplySet(this, op)) {
+            return false;
+        }
+        Set<LogicalVariable> varSet = new HashSet<LogicalVariable>();
+        return applyRuleDown(opRef, varSet, context);
+    }
+
+    private boolean applyRuleDown(Mutable<ILogicalOperator> opRef, Set<LogicalVariable> varSet,
+            IOptimizationContext context) throws AlgebricksException {
+        boolean changed = applies(opRef, varSet, context);
+        changed |= appliesForReverseCase(opRef, varSet, context);
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        VariableUtilities.getUsedVariables(op, varSet);
+        if (op.hasNestedPlans()) {
+            // Variables used by the parent operators should be live at op.
+            Set<LogicalVariable> localLiveVars = new ListSet<LogicalVariable>();
+            VariableUtilities.getLiveVariables(op, localLiveVars);
+            varSet.retainAll(localLiveVars);
+            AbstractOperatorWithNestedPlans aonp = (AbstractOperatorWithNestedPlans) op;
+            for (ILogicalPlan p : aonp.getNestedPlans()) {
+                for (Mutable<ILogicalOperator> r : p.getRoots()) {
+                    if (applyRuleDown(r, varSet, context)) {
+                        changed = true;
+                    }
+                    context.addToDontApplySet(this, r.getValue());
+                }
+            }
+        }
+        for (Mutable<ILogicalOperator> i : op.getInputs()) {
+            if (applyRuleDown(i, varSet, context)) {
+                changed = true;
+            }
+            context.addToDontApplySet(this, i.getValue());
+        }
+        return changed;
+    }
+
+    private boolean applies(Mutable<ILogicalOperator> opRef, Set<LogicalVariable> varUsedAbove,
+            IOptimizationContext context) throws AlgebricksException {
+        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
+        if (op1.getOperatorTag() != LogicalOperatorTag.UNNEST) {
+            return false;
+        }
+        UnnestOperator unnest1 = (UnnestOperator) op1;
+        ILogicalExpression expr = unnest1.getExpressionRef().getValue();
+        LogicalVariable unnestedVar;
+        switch (expr.getExpressionTag()) {
+            case VARIABLE:
+                unnestedVar = ((VariableReferenceExpression) expr).getVariableReference();
+                break;
+            case FUNCTION_CALL:
+                if (((AbstractFunctionCallExpression) expr).getFunctionIdentifier() != AsterixBuiltinFunctions.SCAN_COLLECTION) {
+                    return false;
+                }
+                AbstractFunctionCallExpression functionCall = (AbstractFunctionCallExpression) expr;
+                ILogicalExpression functionCallArgExpr = functionCall.getArguments().get(0).getValue();
+                if (functionCallArgExpr.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
+                    return false;
+                }
+                unnestedVar = ((VariableReferenceExpression) functionCallArgExpr).getVariableReference();
+                break;
+            default:
+                return false;
+        }
+        if (varUsedAbove.contains(unnestedVar)) {
+            return false;
+        }
+
+        Mutable<ILogicalOperator> opRef2 = op1.getInputs().get(0);
+        AbstractLogicalOperator r = (AbstractLogicalOperator) opRef2.getValue();
+
+        if (r.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
+            return false;
+        }
+        AggregateOperator agg = (AggregateOperator) r;
+        if (agg.getVariables().size() > 1) {
+            return false;
+        }
+        LogicalVariable aggVar = agg.getVariables().get(0);
+        ILogicalExpression aggFun = agg.getExpressions().get(0).getValue();
+        if (!aggVar.equals(unnestedVar)
+                || ((AbstractLogicalExpression) aggFun).getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return false;
+        }
+        AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) aggFun;
+        if (!AsterixBuiltinFunctions.LISTIFY.equals(f.getFunctionIdentifier())) {
+            return false;
+        }
+        if (f.getArguments().size() != 1) {
+            return false;
+        }
+        ILogicalExpression arg0 = f.getArguments().get(0).getValue();
+        if (((AbstractLogicalExpression) arg0).getExpressionTag() != LogicalExpressionTag.VARIABLE) {
+            return false;
+        }
+        LogicalVariable paramVar = ((VariableReferenceExpression) arg0).getVariableReference();
+
+        ArrayList<LogicalVariable> assgnVars = new ArrayList<LogicalVariable>(1);
+        assgnVars.add(unnest1.getVariable());
+        ArrayList<Mutable<ILogicalExpression>> assgnExprs = new ArrayList<Mutable<ILogicalExpression>>(1);
+        assgnExprs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(paramVar)));
+        AssignOperator assign = new AssignOperator(assgnVars, assgnExprs);
+        assign.getInputs().add(agg.getInputs().get(0));
+        context.computeAndSetTypeEnvironmentForOperator(assign);
+        LogicalVariable posVar = unnest1.getPositionalVariable();
+
+        if (posVar == null) {
+            opRef.setValue(assign);
+        } else {
+            ArrayList<LogicalVariable> raggVars = new ArrayList<LogicalVariable>(1);
+            raggVars.add(posVar);
+            ArrayList<Mutable<ILogicalExpression>> rAggExprs = new ArrayList<Mutable<ILogicalExpression>>(1);
+            StatefulFunctionCallExpression tidFun = new StatefulFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.TID), UnpartitionedPropertyComputer.INSTANCE);
+            rAggExprs.add(new MutableObject<ILogicalExpression>(tidFun));
+            RunningAggregateOperator rAgg = new RunningAggregateOperator(raggVars, rAggExprs);
+            rAgg.getInputs().add(new MutableObject<ILogicalOperator>(assign));
+            opRef.setValue(rAgg);
+            context.computeAndSetTypeEnvironmentForOperator(rAgg);
+        }
+
+        return true;
+    }
+
+    private boolean appliesForReverseCase(Mutable<ILogicalOperator> opRef, Set<LogicalVariable> varUsedAbove,
+            IOptimizationContext context) throws AlgebricksException {
+        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
+        if (op1.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
+            return false;
+        }
+        AggregateOperator agg = (AggregateOperator) op1;
+        if (agg.getVariables().size() > 1 || agg.getVariables().size() <= 0) {
+            return false;
+        }
+        LogicalVariable aggVar = agg.getVariables().get(0);
+        ILogicalExpression aggFun = agg.getExpressions().get(0).getValue();
+        AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) aggFun;
+        if (!AsterixBuiltinFunctions.LISTIFY.equals(f.getFunctionIdentifier())) {
+            return false;
+        }
+        if (f.getArguments().size() != 1) {
+            return false;
+        }
+        ILogicalExpression arg0 = f.getArguments().get(0).getValue();
+        if (((AbstractLogicalExpression) arg0).getExpressionTag() != LogicalExpressionTag.VARIABLE) {
+            return false;
+        }
+        LogicalVariable aggInputVar = ((VariableReferenceExpression) arg0).getVariableReference();
+
+        if (agg.getInputs().size() == 0) {
+            return false;
+        }
+        AbstractLogicalOperator op2 = (AbstractLogicalOperator) agg.getInputs().get(0).getValue();
+        if (op2.getOperatorTag() != LogicalOperatorTag.UNNEST) {
+            return false;
+        }
+        UnnestOperator unnest = (UnnestOperator) op2;
+        if (unnest.getPositionalVariable() != null) {
+            return false;
+        }
+        if (!unnest.getVariable().equals(aggInputVar)) {
+            return false;
+        }
+        List<LogicalVariable> unnestSource = new ArrayList<LogicalVariable>();
+        VariableUtilities.getUsedVariables(unnest, unnestSource);
+        if (unnestSource.size() > 1 || unnestSource.size() <= 0) {
+            return false;
+        }
+        ArrayList<LogicalVariable> assgnVars = new ArrayList<LogicalVariable>(1);
+        assgnVars.add(aggVar);
+        ArrayList<Mutable<ILogicalExpression>> assgnExprs = new ArrayList<Mutable<ILogicalExpression>>(1);
+        assgnExprs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(unnestSource.get(0))));
+        AssignOperator assign = new AssignOperator(assgnVars, assgnExprs);
+        assign.getInputs().add(unnest.getInputs().get(0));
+        context.computeAndSetTypeEnvironmentForOperator(assign);
+        opRef.setValue(assign);
+        return true;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveRedundantSelectRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveRedundantSelectRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveRedundantSelectRule.java
new file mode 100644
index 0000000..2ed2d2e
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveRedundantSelectRule.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.om.base.ABoolean;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * This rule removes redundant select operator, e.g., select operators
+ * in which the condition is TRUE.
+ * Note that the ConstantFoldingRule will evaluate the condition expression
+ * during compile time if it is possible.
+ *
+ * @author yingyib
+ */
+public class RemoveRedundantSelectRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
+            return false;
+        }
+        SelectOperator select = (SelectOperator) op;
+        ILogicalExpression cond = select.getCondition().getValue();
+        if (alwaysHold(cond)) {
+            opRef.setValue(select.getInputs().get(0).getValue());
+            return true;
+        }
+        return false;
+    }
+
+    /**
+     * Whether the condition expression always returns true.
+     * 
+     * @param cond
+     * @return true if the condition always holds; false otherwise.
+     */
+    private boolean alwaysHold(ILogicalExpression cond) {
+        if (cond.equals(ConstantExpression.TRUE)) {
+            return true;
+        }
+        if (cond.equals(new ConstantExpression(new AsterixConstantValue(ABoolean.TRUE)))) {
+            return true;
+        }
+        return false;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveSortInFeedIngestionRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveSortInFeedIngestionRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveSortInFeedIngestionRule.java
new file mode 100644
index 0000000..08daa40
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveSortInFeedIngestionRule.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class RemoveSortInFeedIngestionRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE) {
+            return false;
+        }
+
+        AbstractLogicalOperator insertOp = op;
+        AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
+        boolean isSourceAFeed = false;
+        while (descendantOp != null) {
+            if (descendantOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
+                AqlDataSource dataSource = (AqlDataSource) ((DataSourceScanOperator) descendantOp).getDataSource();
+                if (dataSource.getDatasourceType().equals(AqlDataSourceType.FEED)) {
+                    isSourceAFeed = true;
+                }
+                break;
+            }
+            if (descendantOp.getInputs().isEmpty()) {
+                break;
+            }
+            descendantOp = (AbstractLogicalOperator) descendantOp.getInputs().get(0).getValue();
+        }
+
+        if (isSourceAFeed) {
+            AbstractLogicalOperator prevOp = (AbstractLogicalOperator) insertOp.getInputs().get(0).getValue();
+            if (prevOp.getOperatorTag() == LogicalOperatorTag.ORDER) {
+                insertOp.getInputs().set(0, prevOp.getInputs().get(0));
+                return true;
+            }
+        }
+
+        return false;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveUnusedOneToOneEquiJoinRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveUnusedOneToOneEquiJoinRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveUnusedOneToOneEquiJoinRule.java
new file mode 100644
index 0000000..d8a8a02
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveUnusedOneToOneEquiJoinRule.java
@@ -0,0 +1,213 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
+import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
+import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * Removes join operators for which all of the following conditions are true:
+ * 1. The live variables of one input branch of the join are not used in the upstream plan
+ * 2. The join is an inner equi join
+ * 3. The join condition only uses variables that correspond to primary keys of the same dataset
+ * Notice that the last condition implies a 1:1 join, i.e., the join does not change the result cardinality.
+ * Joins that satisfy the above conditions may be introduced by other rules
+ * which use surrogate optimizations. Such an optimization aims to reduce data copies and communication costs by
+ * using the primary keys as surrogates for the desired data items. Typically,
+ * such a surrogate-based plan introduces a top-level join to finally resolve
+ * the surrogates to the desired data items.
+ * In case the upstream plan does not require the original data items at all, such a top-level join is unnecessary.
+ * The purpose of this rule is to remove such unnecessary joins.
+ */
+public class RemoveUnusedOneToOneEquiJoinRule implements IAlgebraicRewriteRule {
+
+    private final Set<LogicalVariable> parentsUsedVars = new HashSet<LogicalVariable>();
+    private final List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
+    private final List<LogicalVariable> liveVars = new ArrayList<LogicalVariable>();
+    private final List<LogicalVariable> pkVars = new ArrayList<LogicalVariable>();
+    private final List<DataSourceScanOperator> dataScans = new ArrayList<DataSourceScanOperator>();
+    private boolean hasRun = false;
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        if (hasRun) {
+            return false;
+        }
+        hasRun = true;
+        if (removeUnusedJoin(opRef)) {
+            return true;
+        }
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        return false;
+    }
+
+    private boolean removeUnusedJoin(Mutable<ILogicalOperator> opRef) throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        boolean modified = false;
+
+        usedVars.clear();
+        VariableUtilities.getUsedVariables(op, usedVars);
+        // Propagate used variables from parents downwards.
+        parentsUsedVars.addAll(usedVars);
+
+        int numInputs = op.getInputs().size();
+        for (int i = 0; i < numInputs; i++) {
+            Mutable<ILogicalOperator> childOpRef = op.getInputs().get(i);
+            int unusedJoinBranchIndex = removeJoinFromInputBranch(childOpRef);
+            if (unusedJoinBranchIndex >= 0) {
+                int usedBranchIndex = (unusedJoinBranchIndex == 0) ? 1 : 0;
+                // Remove join at input index i, by hooking up op's input i with 
+                // the join's branch at unusedJoinBranchIndex.
+                AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) childOpRef.getValue();
+                op.getInputs().set(i, joinOp.getInputs().get(usedBranchIndex));
+                modified = true;
+            }
+            // Descend into children.
+            if (removeUnusedJoin(childOpRef)) {
+                modified = true;
+            }
+        }
+        return modified;
+    }
+
+    private int removeJoinFromInputBranch(Mutable<ILogicalOperator> opRef) throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.INNERJOIN) {
+            return -1;
+        }
+
+        AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) op;
+        // Make sure the join is an equi-join.
+        if (!isEquiJoin(joinOp.getCondition())) {
+            return -1;
+        }
+
+        int unusedJoinBranchIndex = -1;
+        for (int i = 0; i < joinOp.getInputs().size(); i++) {
+            liveVars.clear();
+            VariableUtilities.getLiveVariables(joinOp.getInputs().get(i).getValue(), liveVars);
+            liveVars.retainAll(parentsUsedVars);
+            if (liveVars.isEmpty()) {
+                // None of the live variables from this branch are used by its parents.
+                unusedJoinBranchIndex = i;
+                break;
+            }
+        }
+        if (unusedJoinBranchIndex < 0) {
+            // The variables from both branches are used in the upstream plan. We cannot remove this join.
+            return -1;
+        }
+
+        // Check whether one of the join branches is unused.
+        usedVars.clear();
+        VariableUtilities.getUsedVariables(joinOp, usedVars);
+
+        // Check whether all used variables originate from primary keys of exactly the same dataset.
+        // Collect a list of datascans whose primary key variables are used in the join condition.
+        gatherProducingDataScans(opRef, usedVars, dataScans);
+
+        // Check that all datascans scan the same dataset, and that the join condition
+        // only used primary key variables of those datascans.
+        for (int i = 0; i < dataScans.size(); i++) {
+            if (i > 0) {
+                DatasetDataSource prevAqlDataSource = (DatasetDataSource) dataScans.get(i - 1).getDataSource();
+                DatasetDataSource currAqlDataSource = (DatasetDataSource) dataScans.get(i).getDataSource();
+                if (!prevAqlDataSource.getDataset().equals(currAqlDataSource.getDataset())) {
+                    return -1;
+                }
+            }
+            // Remove from the used variables all the primary key vars of this dataset.
+            fillPKVars(dataScans.get(i), pkVars);
+            usedVars.removeAll(pkVars);
+        }
+        if (!usedVars.isEmpty()) {
+            // The join condition also uses some other variables that are not primary
+            // keys from datasource scans of the same dataset.
+            return -1;
+        }
+        return unusedJoinBranchIndex;
+    }
+
+    private void gatherProducingDataScans(Mutable<ILogicalOperator> opRef, List<LogicalVariable> joinUsedVars,
+            List<DataSourceScanOperator> dataScans) {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.DATASOURCESCAN) {
+            for (Mutable<ILogicalOperator> inputOp : op.getInputs()) {
+                gatherProducingDataScans(inputOp, joinUsedVars, dataScans);
+            }
+            return;
+        }
+        DataSourceScanOperator dataScan = (DataSourceScanOperator) op;
+        fillPKVars(dataScan, pkVars);
+        // Check if join uses all PK vars.
+        if (joinUsedVars.containsAll(pkVars)) {
+            dataScans.add(dataScan);
+        }
+    }
+
+    private void fillPKVars(DataSourceScanOperator dataScan, List<LogicalVariable> pkVars) {
+        pkVars.clear();
+        DatasetDataSource datasetDataSource = (DatasetDataSource) dataScan.getDataSource();
+        pkVars.clear();
+        if (datasetDataSource.getDataset().getDatasetDetails() instanceof InternalDatasetDetails) {
+            int numPKs = DatasetUtils.getPartitioningKeys(datasetDataSource.getDataset()).size();
+            for (int i = 0; i < numPKs; i++) {
+                pkVars.add(dataScan.getVariables().get(i));
+            }
+        }
+    }
+
+    private boolean isEquiJoin(Mutable<ILogicalExpression> conditionExpr) {
+        AbstractLogicalExpression expr = (AbstractLogicalExpression) conditionExpr.getValue();
+        if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+            AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+            FunctionIdentifier funcIdent = funcExpr.getFunctionIdentifier();
+            if (funcIdent != AlgebricksBuiltinFunctions.AND && funcIdent != AlgebricksBuiltinFunctions.EQ) {
+                return false;
+            }
+        }
+        return true;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ReplaceSinkOpWithCommitOpRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ReplaceSinkOpWithCommitOpRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ReplaceSinkOpWithCommitOpRule.java
new file mode 100644
index 0000000..8de761a
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ReplaceSinkOpWithCommitOpRule.java
@@ -0,0 +1,116 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.algebra.operators.CommitOperator;
+import edu.uci.ics.asterix.algebra.operators.physical.CommitPOperator;
+import edu.uci.ics.asterix.common.transactions.JobId;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class ReplaceSinkOpWithCommitOpRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        // TODO Auto-generated method stub
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.SINK) {
+            return false;
+        }
+        SinkOperator sinkOperator = (SinkOperator) op;
+
+        List<Mutable<ILogicalExpression>> primaryKeyExprs = null;
+        int datasetId = 0;
+        AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) sinkOperator.getInputs().get(0).getValue();
+        while (descendantOp != null) {
+            if (descendantOp.getOperatorTag() == LogicalOperatorTag.INDEX_INSERT_DELETE) {
+                IndexInsertDeleteOperator indexInsertDeleteOperator = (IndexInsertDeleteOperator) descendantOp;
+                if (!indexInsertDeleteOperator.isBulkload()) {
+                    primaryKeyExprs = indexInsertDeleteOperator.getPrimaryKeyExpressions();
+                    datasetId = ((DatasetDataSource) indexInsertDeleteOperator.getDataSourceIndex().getDataSource())
+                            .getDataset().getDatasetId();
+                    break;
+                }
+            } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.INSERT_DELETE) {
+                InsertDeleteOperator insertDeleteOperator = (InsertDeleteOperator) descendantOp;
+                if (!insertDeleteOperator.isBulkload()) {
+                    primaryKeyExprs = insertDeleteOperator.getPrimaryKeyExpressions();
+                    datasetId = ((DatasetDataSource) insertDeleteOperator.getDataSource()).getDataset().getDatasetId();
+                    break;
+                }
+            }
+            if (descendantOp.getInputs().size() < 1) {
+                break;
+            }
+            descendantOp = (AbstractLogicalOperator) descendantOp.getInputs().get(0).getValue();
+        }
+
+        if (primaryKeyExprs == null) {
+            return false;
+        }
+
+        //copy primaryKeyExprs
+        List<LogicalVariable> primaryKeyLogicalVars = new ArrayList<LogicalVariable>();
+        for (Mutable<ILogicalExpression> expr : primaryKeyExprs) {
+            VariableReferenceExpression varRefExpr = (VariableReferenceExpression) expr.getValue();
+            primaryKeyLogicalVars.add(new LogicalVariable(varRefExpr.getVariableReference().getId()));
+        }
+
+        //get JobId(TransactorId)
+        AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
+        JobId jobId = mp.getJobId();
+
+        //create the logical and physical operator
+        CommitOperator commitOperator = new CommitOperator(primaryKeyLogicalVars);
+        CommitPOperator commitPOperator = new CommitPOperator(jobId, datasetId, primaryKeyLogicalVars);
+        commitOperator.setPhysicalOperator(commitPOperator);
+
+        //create ExtensionOperator and put the commitOperator in it.
+        ExtensionOperator extensionOperator = new ExtensionOperator(commitOperator);
+        extensionOperator.setPhysicalOperator(commitPOperator);
+
+        //update plan link
+        extensionOperator.getInputs().add(sinkOperator.getInputs().get(0));
+        context.computeAndSetTypeEnvironmentForOperator(extensionOperator);
+        opRef.setValue(extensionOperator);
+        return true;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java
new file mode 100644
index 0000000..65c9da8
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java
@@ -0,0 +1,300 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.algebra.operators.physical.BTreeSearchPOperator;
+import edu.uci.ics.asterix.algebra.operators.physical.InvertedIndexPOperator;
+import edu.uci.ics.asterix.algebra.operators.physical.RTreeSearchPOperator;
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.optimizer.rules.am.AccessMethodJobGenParams;
+import edu.uci.ics.asterix.optimizer.rules.am.BTreeJobGenParams;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.OperatorAnnotations;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IMergeAggregationExpressionFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.ExternalGroupByPOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.PreclusteredGroupByPOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
+import edu.uci.ics.hyracks.algebricks.rewriter.util.JoinUtils;
+
+public class SetAsterixPhysicalOperatorsRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (context.checkIfInDontApplySet(this, op)) {
+            return false;
+        }
+
+        computeDefaultPhysicalOp(op, context);
+        context.addToDontApplySet(this, op);
+        return true;
+    }
+
+    private static void setPhysicalOperators(ILogicalPlan plan, IOptimizationContext context)
+            throws AlgebricksException {
+        for (Mutable<ILogicalOperator> root : plan.getRoots()) {
+            computeDefaultPhysicalOp((AbstractLogicalOperator) root.getValue(), context);
+        }
+    }
+
+    private static void computeDefaultPhysicalOp(AbstractLogicalOperator op, IOptimizationContext context)
+            throws AlgebricksException {
+        PhysicalOptimizationConfig physicalOptimizationConfig = context.getPhysicalOptimizationConfig();
+        if (op.getOperatorTag().equals(LogicalOperatorTag.GROUP)) {
+            GroupByOperator gby = (GroupByOperator) op;
+            if (gby.getNestedPlans().size() == 1) {
+                ILogicalPlan p0 = gby.getNestedPlans().get(0);
+                if (p0.getRoots().size() == 1) {
+                    Mutable<ILogicalOperator> r0 = p0.getRoots().get(0);
+                    if (((AbstractLogicalOperator) (r0.getValue())).getOperatorTag().equals(
+                            LogicalOperatorTag.AGGREGATE)) {
+                        AggregateOperator aggOp = (AggregateOperator) r0.getValue();
+                        boolean serializable = true;
+                        for (Mutable<ILogicalExpression> exprRef : aggOp.getExpressions()) {
+                            AbstractFunctionCallExpression expr = (AbstractFunctionCallExpression) exprRef.getValue();
+                            if (!AsterixBuiltinFunctions.isAggregateFunctionSerializable(expr.getFunctionIdentifier())) {
+                                serializable = false;
+                                break;
+                            }
+                        }
+
+                        if ((gby.getAnnotations().get(OperatorAnnotations.USE_HASH_GROUP_BY) == Boolean.TRUE || gby
+                                .getAnnotations().get(OperatorAnnotations.USE_EXTERNAL_GROUP_BY) == Boolean.TRUE)) {
+                            boolean setToExternalGby = false;
+                            if (serializable) {
+                                // if serializable, use external group-by
+                                // now check whether the serialized version aggregation function has corresponding intermediate agg
+                                boolean hasIntermediateAgg = true;
+                                IMergeAggregationExpressionFactory mergeAggregationExpressionFactory = context
+                                        .getMergeAggregationExpressionFactory();
+                                List<LogicalVariable> originalVariables = aggOp.getVariables();
+                                List<Mutable<ILogicalExpression>> aggExprs = aggOp.getExpressions();
+                                int aggNum = aggExprs.size();
+                                for (int i = 0; i < aggNum; i++) {
+                                    AbstractFunctionCallExpression expr = (AbstractFunctionCallExpression) aggExprs
+                                            .get(i).getValue();
+                                    AggregateFunctionCallExpression serialAggExpr = AsterixBuiltinFunctions
+                                            .makeSerializableAggregateFunctionExpression(expr.getFunctionIdentifier(),
+                                                    expr.getArguments());
+                                    if (mergeAggregationExpressionFactory.createMergeAggregation(
+                                            originalVariables.get(i), serialAggExpr, context) == null) {
+                                        hasIntermediateAgg = false;
+                                        break;
+                                    }
+                                }
+
+                                if (hasIntermediateAgg) {
+                                    for (int i = 0; i < aggNum; i++) {
+                                        AbstractFunctionCallExpression expr = (AbstractFunctionCallExpression) aggExprs
+                                                .get(i).getValue();
+                                        AggregateFunctionCallExpression serialAggExpr = AsterixBuiltinFunctions
+                                                .makeSerializableAggregateFunctionExpression(
+                                                        expr.getFunctionIdentifier(), expr.getArguments());
+                                        aggOp.getExpressions().get(i).setValue(serialAggExpr);
+                                    }
+                                    ExternalGroupByPOperator externalGby = new ExternalGroupByPOperator(
+                                            gby.getGroupByList(),
+                                            physicalOptimizationConfig.getMaxFramesExternalGroupBy(),
+                                            physicalOptimizationConfig.getExternalGroupByTableSize());
+                                    generateMergeAggregationExpressions(gby, context);
+                                    op.setPhysicalOperator(externalGby);
+                                    setToExternalGby = true;
+                                }
+                            }
+
+                            if (!setToExternalGby) {
+                                // if not serializable or no intermediate agg, use pre-clustered group-by
+                                List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> gbyList = gby.getGroupByList();
+                                List<LogicalVariable> columnList = new ArrayList<LogicalVariable>(gbyList.size());
+                                for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gbyList) {
+                                    ILogicalExpression expr = p.second.getValue();
+                                    if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+                                        VariableReferenceExpression varRef = (VariableReferenceExpression) expr;
+                                        columnList.add(varRef.getVariableReference());
+                                    }
+                                }
+                                op.setPhysicalOperator(new PreclusteredGroupByPOperator(columnList));
+                            }
+                        }
+                    } else if (((AbstractLogicalOperator) (r0.getValue())).getOperatorTag().equals(
+                            LogicalOperatorTag.RUNNINGAGGREGATE)) {
+                        List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> gbyList = gby.getGroupByList();
+                        List<LogicalVariable> columnList = new ArrayList<LogicalVariable>(gbyList.size());
+                        for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gbyList) {
+                            ILogicalExpression expr = p.second.getValue();
+                            if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+                                VariableReferenceExpression varRef = (VariableReferenceExpression) expr;
+                                columnList.add(varRef.getVariableReference());
+                            }
+                        }
+                        op.setPhysicalOperator(new PreclusteredGroupByPOperator(columnList));
+                    } else {
+                        throw new AlgebricksException("Unsupported nested operator within a group-by: "
+                                + ((AbstractLogicalOperator) (r0.getValue())).getOperatorTag().name());
+                    }
+                }
+            }
+        }
+        if (op.getPhysicalOperator() == null) {
+            switch (op.getOperatorTag()) {
+                case INNERJOIN: {
+                    JoinUtils.setJoinAlgorithmAndExchangeAlgo((InnerJoinOperator) op, context);
+                    break;
+                }
+                case LEFTOUTERJOIN: {
+                    JoinUtils.setJoinAlgorithmAndExchangeAlgo((LeftOuterJoinOperator) op, context);
+                    break;
+                }
+                case UNNEST_MAP: {
+                    UnnestMapOperator unnestMap = (UnnestMapOperator) op;
+                    ILogicalExpression unnestExpr = unnestMap.getExpressionRef().getValue();
+                    if (unnestExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                        AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
+                        FunctionIdentifier fid = f.getFunctionIdentifier();
+                        if (!fid.equals(AsterixBuiltinFunctions.INDEX_SEARCH)) {
+                            throw new IllegalStateException();
+                        }
+                        AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
+                        jobGenParams.readFromFuncArgs(f.getArguments());
+                        AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
+                        AqlSourceId dataSourceId = new AqlSourceId(jobGenParams.getDataverseName(),
+                                jobGenParams.getDatasetName());
+                        IDataSourceIndex<String, AqlSourceId> dsi = mp.findDataSourceIndex(jobGenParams.getIndexName(),
+                                dataSourceId);
+                        if (dsi == null) {
+                            throw new AlgebricksException("Could not find index " + jobGenParams.getIndexName()
+                                    + " for dataset " + dataSourceId);
+                        }
+                        IndexType indexType = jobGenParams.getIndexType();
+                        boolean requiresBroadcast = jobGenParams.getRequiresBroadcast();
+                        switch (indexType) {
+                            case BTREE: {
+                                BTreeJobGenParams btreeJobGenParams = new BTreeJobGenParams();
+                                btreeJobGenParams.readFromFuncArgs(f.getArguments());
+                                op.setPhysicalOperator(new BTreeSearchPOperator(dsi, requiresBroadcast,
+                                        btreeJobGenParams.isPrimaryIndex(), btreeJobGenParams.isEqCondition(),
+                                        btreeJobGenParams.getLowKeyVarList(), btreeJobGenParams.getHighKeyVarList()));
+                                break;
+                            }
+                            case RTREE: {
+                                op.setPhysicalOperator(new RTreeSearchPOperator(dsi, requiresBroadcast));
+                                break;
+                            }
+                            case SINGLE_PARTITION_WORD_INVIX:
+                            case SINGLE_PARTITION_NGRAM_INVIX: {
+                                op.setPhysicalOperator(new InvertedIndexPOperator(dsi, requiresBroadcast, false));
+                                break;
+                            }
+                            case LENGTH_PARTITIONED_WORD_INVIX:
+                            case LENGTH_PARTITIONED_NGRAM_INVIX: {
+                                op.setPhysicalOperator(new InvertedIndexPOperator(dsi, requiresBroadcast, true));
+                                break;
+                            }
+                            default: {
+                                throw new NotImplementedException(indexType + " indexes are not implemented.");
+                            }
+                        }
+                    }
+                    break;
+                }
+            }
+        }
+        if (op.hasNestedPlans()) {
+            AbstractOperatorWithNestedPlans nested = (AbstractOperatorWithNestedPlans) op;
+            for (ILogicalPlan p : nested.getNestedPlans()) {
+                setPhysicalOperators(p, context);
+            }
+        }
+        for (Mutable<ILogicalOperator> opRef : op.getInputs()) {
+            computeDefaultPhysicalOp((AbstractLogicalOperator) opRef.getValue(), context);
+        }
+    }
+
+    private static void generateMergeAggregationExpressions(GroupByOperator gby, IOptimizationContext context)
+            throws AlgebricksException {
+        if (gby.getNestedPlans().size() != 1) {
+            throw new AlgebricksException(
+                    "External group-by currently works only for one nested plan with one root containing"
+                            + "an aggregate and a nested-tuple-source.");
+        }
+        ILogicalPlan p0 = gby.getNestedPlans().get(0);
+        if (p0.getRoots().size() != 1) {
+            throw new AlgebricksException(
+                    "External group-by currently works only for one nested plan with one root containing"
+                            + "an aggregate and a nested-tuple-source.");
+        }
+        IMergeAggregationExpressionFactory mergeAggregationExpressionFactory = context
+                .getMergeAggregationExpressionFactory();
+        Mutable<ILogicalOperator> r0 = p0.getRoots().get(0);
+        AbstractLogicalOperator r0Logical = (AbstractLogicalOperator) r0.getValue();
+        if (r0Logical.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
+            throw new AlgebricksException("The merge aggregation expression generation should not process a "
+                    + r0Logical.getOperatorTag() + " operator.");
+        }
+        AggregateOperator aggOp = (AggregateOperator) r0.getValue();
+        List<Mutable<ILogicalExpression>> aggFuncRefs = aggOp.getExpressions();
+        List<LogicalVariable> aggProducedVars = aggOp.getVariables();
+        int n = aggOp.getExpressions().size();
+        List<Mutable<ILogicalExpression>> mergeExpressionRefs = new ArrayList<Mutable<ILogicalExpression>>();
+        for (int i = 0; i < n; i++) {
+            ILogicalExpression mergeExpr = mergeAggregationExpressionFactory.createMergeAggregation(
+                    aggProducedVars.get(i), aggFuncRefs.get(i).getValue(), context);
+            if (mergeExpr == null) {
+                throw new AlgebricksException("The aggregation function " + aggFuncRefs.get(i).getValue()
+                        + " does not have a registered intermediate aggregation function.");
+            }
+            mergeExpressionRefs.add(new MutableObject<ILogicalExpression>(mergeExpr));
+        }
+        aggOp.setMergeExpressions(mergeExpressionRefs);
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
new file mode 100644
index 0000000..4b9a947
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
@@ -0,0 +1,252 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
+import edu.uci.ics.asterix.om.types.AOrderedListType;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.AUnorderedListType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.AbstractConstVarFunVisitor;
+import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * open-record-constructor() becomes closed-record-constructor() if all the
+ * branches below lead to dataset scans for closed record types
+ */
+
+public class SetClosedRecordConstructorsRule implements IAlgebraicRewriteRule {
+
+    private SettingClosedRecordVisitor recordVisitor;
+
+    public SetClosedRecordConstructorsRule() {
+        this.recordVisitor = new SettingClosedRecordVisitor();
+    }
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext ctx) throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (ctx.checkIfInDontApplySet(this, op)) {
+            return false;
+        }
+        ctx.addToDontApplySet(this, op);
+        this.recordVisitor.setOptimizationContext(ctx, op.computeInputTypeEnvironment(ctx));
+        boolean res = op.acceptExpressionTransform(recordVisitor);
+        if (res) {
+            ctx.computeAndSetTypeEnvironmentForOperator(op);
+        }
+        return res;
+    }
+
+    private static class SettingClosedRecordVisitor extends AbstractConstVarFunVisitor<ClosedDataInfo, Void> implements
+            ILogicalExpressionReferenceTransform {
+
+        private IOptimizationContext context;
+        private IVariableTypeEnvironment env;
+
+        public void setOptimizationContext(IOptimizationContext context, IVariableTypeEnvironment env) {
+            this.context = context;
+            this.env = env;
+        }
+
+        @Override
+        public boolean transform(Mutable<ILogicalExpression> exprRef) throws AlgebricksException {
+            AbstractLogicalExpression expr = (AbstractLogicalExpression) exprRef.getValue();
+            ClosedDataInfo cdi = expr.accept(this, null);
+            if (cdi.expressionChanged) {
+                exprRef.setValue(cdi.expression);
+            }
+            return cdi.expressionChanged;
+        }
+
+        @Override
+        public ClosedDataInfo visitConstantExpression(ConstantExpression expr, Void arg) throws AlgebricksException {
+            return new ClosedDataInfo(false, hasClosedType(expr), expr);
+        }
+
+        @Override
+        public ClosedDataInfo visitFunctionCallExpression(AbstractFunctionCallExpression expr, Void arg)
+                throws AlgebricksException {
+            boolean allClosed = true;
+            boolean changed = false;
+            if (expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR)) {
+                ARecordType reqType = (ARecordType) TypeComputerUtilities.getRequiredType(expr);
+                if (reqType == null || !reqType.isOpen()) {
+                    int n = expr.getArguments().size();
+                    if (n % 2 > 0) {
+                        throw new AlgebricksException(
+                                "Record constructor expected to have an even number of arguments: " + expr);
+                    }
+                    for (int i = 0; i < n / 2; i++) {
+                        ILogicalExpression a0 = expr.getArguments().get(2 * i).getValue();
+                        if (a0.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+                            allClosed = false;
+                        }
+                        Mutable<ILogicalExpression> aRef1 = expr.getArguments().get(2 * i + 1);
+                        ILogicalExpression a1 = aRef1.getValue();
+                        ClosedDataInfo cdi = a1.accept(this, arg);
+                        if (!cdi.dataIsClosed) {
+                            allClosed = false;
+                        }
+                        if (cdi.expressionChanged) {
+                            aRef1.setValue(cdi.expression);
+                            changed = true;
+                        }
+                    }
+                    if (allClosed) {
+                        expr.setFunctionInfo(FunctionUtils
+                                .getFunctionInfo(AsterixBuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR));
+                        GlobalConfig.ASTERIX_LOGGER.finest("Switching to CLOSED record constructor in " + expr + ".\n");
+                        changed = true;
+                    }
+                }
+            } else {
+                boolean rewrite = true;
+                if (expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR)
+                        || (expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR))) {
+                    IAType reqType = TypeComputerUtilities.getRequiredType(expr);
+                    if (reqType == null) {
+                        rewrite = false;
+                    }
+                }
+                if (rewrite) {
+                    for (Mutable<ILogicalExpression> e : expr.getArguments()) {
+                        ILogicalExpression ale = e.getValue();
+                        ClosedDataInfo cdi = ale.accept(this, arg);
+                        if (!cdi.dataIsClosed) {
+                            allClosed = false;
+                        }
+                        if (cdi.expressionChanged) {
+                            e.setValue(cdi.expression);
+                            changed = true;
+                        }
+                    }
+                }
+            }
+            return new ClosedDataInfo(changed, hasClosedType(expr), expr);
+        }
+
+        @Override
+        public ClosedDataInfo visitVariableReferenceExpression(VariableReferenceExpression expr, Void arg)
+                throws AlgebricksException {
+            Object varType = env.getVarType(expr.getVariableReference());
+            if (varType == null) {
+                throw new AlgebricksException("Could not infer type for variable '" + expr.getVariableReference()
+                        + "'.");
+            }
+            boolean dataIsClosed = isClosedRec((IAType) varType);
+            return new ClosedDataInfo(false, dataIsClosed, expr);
+        }
+
+        private boolean hasClosedType(ILogicalExpression expr) throws AlgebricksException {
+            IAType t = (IAType) context.getExpressionTypeComputer().getType(expr, context.getMetadataProvider(), env);
+            return isClosedRec(t);
+        }
+
+        private static boolean isClosedRec(IAType t) throws AlgebricksException {
+            switch (t.getTypeTag()) {
+                case ANY: {
+                    return false;
+                }
+                case CIRCLE:
+                case INT8:
+                case INT16:
+                case INT32:
+                case INT64:
+                case BINARY:
+                case BITARRAY:
+                case FLOAT:
+                case DOUBLE:
+                case STRING:
+                case LINE:
+                case NULL:
+                case BOOLEAN:
+                case DATETIME:
+                case DATE:
+                case TIME:
+                case UUID:
+                case DURATION:
+                case YEARMONTHDURATION:
+                case DAYTIMEDURATION:
+                case INTERVAL:
+                case POINT:
+                case POINT3D:
+                case POLYGON:
+                case RECTANGLE:
+                case SHORTWITHOUTTYPEINFO: {
+                    return true;
+                }
+                case RECORD: {
+                    return !((ARecordType) t).isOpen();
+                }
+                case UNION: {
+                    AUnionType ut = (AUnionType) t;
+                    for (IAType t2 : ut.getUnionList()) {
+                        if (!isClosedRec(t2)) {
+                            return false;
+                        }
+                    }
+                    return true;
+                }
+                case ORDEREDLIST: {
+                    AOrderedListType ol = (AOrderedListType) t;
+                    return isClosedRec(ol.getItemType());
+                }
+                case UNORDEREDLIST: {
+                    AUnorderedListType ul = (AUnorderedListType) t;
+                    return isClosedRec(ul.getItemType());
+                }
+                default: {
+                    throw new IllegalStateException("Closed type analysis not implemented for type " + t);
+                }
+            }
+        }
+    }
+
+    private static class ClosedDataInfo {
+        boolean expressionChanged;
+        boolean dataIsClosed;
+        ILogicalExpression expression;
+
+        public ClosedDataInfo(boolean expressionChanged, boolean dataIsClosed, ILogicalExpression expression) {
+            this.expressionChanged = expressionChanged;
+            this.dataIsClosed = dataIsClosed;
+            this.expression = expression;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SimilarityCheckRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SimilarityCheckRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SimilarityCheckRule.java
new file mode 100644
index 0000000..2d03d69
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SimilarityCheckRule.java
@@ -0,0 +1,336 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.base.ADouble;
+import edu.uci.ics.asterix.om.base.AFloat;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.IAObject;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.hierachy.ATypeHierarchy;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * Looks for a select operator, containing a condition:
+ * similarity-function GE/GT/LE/LE constant/variable
+ * Rewrites the select condition (and possibly the assign expr) with the equivalent similarity-check function.
+ */
+public class SimilarityCheckRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        // Look for select.
+        if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
+            return false;
+        }
+        SelectOperator select = (SelectOperator) op;
+        Mutable<ILogicalExpression> condExpr = select.getCondition();
+
+        // Gather assigns below this select.
+        List<AssignOperator> assigns = new ArrayList<AssignOperator>();
+        AbstractLogicalOperator childOp = (AbstractLogicalOperator) select.getInputs().get(0).getValue();
+        // Skip selects.
+        while (childOp.getOperatorTag() == LogicalOperatorTag.SELECT) {
+            childOp = (AbstractLogicalOperator) childOp.getInputs().get(0).getValue();
+        }
+        while (childOp.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+            assigns.add((AssignOperator) childOp);
+            childOp = (AbstractLogicalOperator) childOp.getInputs().get(0).getValue();
+        }
+        return replaceSelectConditionExprs(condExpr, assigns, context);
+    }
+
+    private boolean replaceSelectConditionExprs(Mutable<ILogicalExpression> expRef, List<AssignOperator> assigns,
+            IOptimizationContext context) throws AlgebricksException {
+        ILogicalExpression expr = expRef.getValue();
+        if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return false;
+        }
+        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+        FunctionIdentifier funcIdent = funcExpr.getFunctionIdentifier();
+        // Recursively traverse conjuncts.
+        // TODO: Ignore disjuncts for now, because some replacements may be invalid.
+        // For example, if the result of the similarity function is used somewhere upstream,
+        // then we may still need the true similarity value even if the GE/GT/LE/LE comparison returns false.
+        if (funcIdent == AlgebricksBuiltinFunctions.AND) {
+            boolean found = true;
+            for (int i = 0; i < funcExpr.getArguments().size(); ++i) {
+                found = found && replaceSelectConditionExprs(funcExpr.getArguments().get(i), assigns, context);
+            }
+            return found;
+        }
+
+        // Look for GE/GT/LE/LT.
+        if (funcIdent != AlgebricksBuiltinFunctions.GE && funcIdent != AlgebricksBuiltinFunctions.GT
+                && funcIdent != AlgebricksBuiltinFunctions.LE && funcIdent != AlgebricksBuiltinFunctions.LT) {
+            return false;
+        }
+
+        // One arg should be a function call or a variable, the other a constant.
+        AsterixConstantValue constVal = null;
+        ILogicalExpression nonConstExpr = null;
+        ILogicalExpression arg1 = funcExpr.getArguments().get(0).getValue();
+        ILogicalExpression arg2 = funcExpr.getArguments().get(1).getValue();
+        // Normalized GE/GT/LE/LT as if constant was on the right hand side.
+        FunctionIdentifier normFuncIdent = null;
+        // One of the args must be a constant.
+        if (arg1.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
+            ConstantExpression constExpr = (ConstantExpression) arg1;
+            constVal = (AsterixConstantValue) constExpr.getValue();
+            nonConstExpr = arg2;
+            // Get func ident as if swapping lhs and rhs.
+            normFuncIdent = getLhsAndRhsSwappedFuncIdent(funcIdent);
+        } else if (arg2.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
+            ConstantExpression constExpr = (ConstantExpression) arg2;
+            constVal = (AsterixConstantValue) constExpr.getValue();
+            nonConstExpr = arg1;
+            // Constant is already on rhs, so nothing to be done for normalizedFuncIdent.
+            normFuncIdent = funcIdent;
+        } else {
+            return false;
+        }
+
+        // The other arg is a function call. We can directly replace the select condition with an equivalent similarity check expression.
+        if (nonConstExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+            return replaceWithFunctionCallArg(expRef, normFuncIdent, constVal,
+                    (AbstractFunctionCallExpression) nonConstExpr);
+        }
+        // The other arg ist a variable. We may have to introduce an assign operator that assigns the result of a similarity-check function to a variable.
+        if (nonConstExpr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+            return replaceWithVariableArg(expRef, normFuncIdent, constVal, (VariableReferenceExpression) nonConstExpr,
+                    assigns, context);
+        }
+        return false;
+    }
+
+    private boolean replaceWithVariableArg(Mutable<ILogicalExpression> expRef, FunctionIdentifier normFuncIdent,
+            AsterixConstantValue constVal, VariableReferenceExpression varRefExpr, List<AssignOperator> assigns,
+            IOptimizationContext context) throws AlgebricksException {
+
+        // Find variable in assigns to determine its originating function.
+        LogicalVariable var = varRefExpr.getVariableReference();
+        Mutable<ILogicalExpression> simFuncExprRef = null;
+        ScalarFunctionCallExpression simCheckFuncExpr = null;
+        AssignOperator matchingAssign = null;
+        for (int i = 0; i < assigns.size(); i++) {
+            AssignOperator assign = assigns.get(i);
+            for (int j = 0; j < assign.getVariables().size(); j++) {
+                // Check if variables match.
+                if (var != assign.getVariables().get(j)) {
+                    continue;
+                }
+                // Check if corresponding expr is a function call.
+                if (assign.getExpressions().get(j).getValue().getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+                    continue;
+                }
+                simFuncExprRef = assign.getExpressions().get(j);
+                // Analyze function expression and get equivalent similarity check function.
+                simCheckFuncExpr = getSimilarityCheckExpr(normFuncIdent, constVal,
+                        (AbstractFunctionCallExpression) simFuncExprRef.getValue());
+                matchingAssign = assign;
+                break;
+            }
+            if (simCheckFuncExpr != null) {
+                break;
+            }
+        }
+
+        // Only non-null if we found that varRefExpr refers to an optimizable similarity function call.
+        if (simCheckFuncExpr != null) {
+            // Create a new assign under matchingAssign which assigns the result of our similarity-check function to a variable.
+            LogicalVariable newVar = context.newVar();
+            AssignOperator newAssign = new AssignOperator(newVar, new MutableObject<ILogicalExpression>(
+                    simCheckFuncExpr));
+            // Hook up inputs.
+            newAssign.getInputs()
+                    .add(new MutableObject<ILogicalOperator>(matchingAssign.getInputs().get(0).getValue()));
+            matchingAssign.getInputs().get(0).setValue(newAssign);
+
+            // Replace select condition with a get-item on newVar.
+            List<Mutable<ILogicalExpression>> selectGetItemArgs = new ArrayList<Mutable<ILogicalExpression>>();
+            // First arg is a variable reference expr on newVar.
+            selectGetItemArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(newVar)));
+            // Second arg is the item index to be accessed, here 0.
+            selectGetItemArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(
+                    new AsterixConstantValue(new AInt32(0)))));
+            ILogicalExpression selectGetItemExpr = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM), selectGetItemArgs);
+            // Replace the old similarity function call with the new getItemExpr.
+            expRef.setValue(selectGetItemExpr);
+
+            // Replace expr corresponding to original variable in the original assign with a get-item on newVar.
+            List<Mutable<ILogicalExpression>> assignGetItemArgs = new ArrayList<Mutable<ILogicalExpression>>();
+            // First arg is a variable reference expr on newVar.
+            assignGetItemArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(newVar)));
+            // Second arg is the item index to be accessed, here 1.
+            assignGetItemArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(
+                    new AsterixConstantValue(new AInt32(1)))));
+            ILogicalExpression assignGetItemExpr = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM), assignGetItemArgs);
+            // Replace the original assign expr with the get-item expr.
+            simFuncExprRef.setValue(assignGetItemExpr);
+
+            context.computeAndSetTypeEnvironmentForOperator(newAssign);
+            context.computeAndSetTypeEnvironmentForOperator(matchingAssign);
+
+            return true;
+        }
+
+        return false;
+    }
+
+    private boolean replaceWithFunctionCallArg(Mutable<ILogicalExpression> expRef, FunctionIdentifier normFuncIdent,
+            AsterixConstantValue constVal, AbstractFunctionCallExpression funcExpr) throws AlgebricksException {
+        // Analyze func expr to see if it is an optimizable similarity function.
+        ScalarFunctionCallExpression simCheckFuncExpr = getSimilarityCheckExpr(normFuncIdent, constVal, funcExpr);
+
+        // Replace the expr in the select condition.
+        if (simCheckFuncExpr != null) {
+            // Get item 0 from var.
+            List<Mutable<ILogicalExpression>> getItemArgs = new ArrayList<Mutable<ILogicalExpression>>();
+            // First arg is the similarity-check function call.
+            getItemArgs.add(new MutableObject<ILogicalExpression>(simCheckFuncExpr));
+            // Second arg is the item index to be accessed.
+            getItemArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
+                    new AInt32(0)))));
+            ILogicalExpression getItemExpr = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM), getItemArgs);
+            // Replace the old similarity function call with the new getItemExpr.
+            expRef.setValue(getItemExpr);
+            return true;
+        }
+
+        return false;
+    }
+
+    private ScalarFunctionCallExpression getSimilarityCheckExpr(FunctionIdentifier normFuncIdent,
+            AsterixConstantValue constVal, AbstractFunctionCallExpression funcExpr) throws AlgebricksException {
+        // Remember args from original similarity function to add them to the similarity-check function later.
+        ArrayList<Mutable<ILogicalExpression>> similarityArgs = null;
+        ScalarFunctionCallExpression simCheckFuncExpr = null;
+        // Look for jaccard function call, and GE or GT.
+        if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.SIMILARITY_JACCARD) {
+            IAObject jaccThresh;
+            if (normFuncIdent == AlgebricksBuiltinFunctions.GE) {
+                if (constVal.getObject() instanceof AFloat) {
+                    jaccThresh = constVal.getObject();
+                } else {
+                    jaccThresh = new AFloat((float) ((ADouble) constVal.getObject()).getDoubleValue());
+                }
+            } else if (normFuncIdent == AlgebricksBuiltinFunctions.GT) {
+                float threshVal = 0.0f;
+                if (constVal.getObject() instanceof AFloat) {
+                    threshVal = ((AFloat) constVal.getObject()).getFloatValue();
+                } else {
+                    threshVal = (float) ((ADouble) constVal.getObject()).getDoubleValue();
+                }
+                float f = threshVal + Float.MIN_VALUE;
+                if (f > 1.0f)
+                    f = 1.0f;
+                jaccThresh = new AFloat(f);
+            } else {
+                return null;
+            }
+            similarityArgs = new ArrayList<Mutable<ILogicalExpression>>();
+            similarityArgs.addAll(funcExpr.getArguments());
+            similarityArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
+                    jaccThresh))));
+            simCheckFuncExpr = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.SIMILARITY_JACCARD_CHECK), similarityArgs);
+        }
+
+        // Look for edit-distance function call, and LE or LT.
+        if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE) {
+            AInt32 aInt = new AInt32(0);
+            try {
+                aInt = (AInt32) ATypeHierarchy.convertNumericTypeObject(constVal.getObject(), ATypeTag.INT32);
+            } catch (AsterixException e) {
+                throw new AlgebricksException(e);
+            }
+
+            AInt32 edThresh;
+            if (normFuncIdent == AlgebricksBuiltinFunctions.LE) {
+                edThresh = aInt;
+            } else if (normFuncIdent == AlgebricksBuiltinFunctions.LT) {
+                int ed = aInt.getIntegerValue() - 1;
+                if (ed < 0)
+                    ed = 0;
+                edThresh = new AInt32(ed);
+            } else {
+                return null;
+            }
+            similarityArgs = new ArrayList<Mutable<ILogicalExpression>>();
+            similarityArgs.addAll(funcExpr.getArguments());
+            similarityArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
+                    edThresh))));
+            simCheckFuncExpr = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.EDIT_DISTANCE_CHECK), similarityArgs);
+        }
+        // Preserve all annotations.
+        if (simCheckFuncExpr != null) {
+            simCheckFuncExpr.getAnnotations().putAll(funcExpr.getAnnotations());
+        }
+        return simCheckFuncExpr;
+    }
+
+    private FunctionIdentifier getLhsAndRhsSwappedFuncIdent(FunctionIdentifier oldFuncIdent) {
+        if (oldFuncIdent == AlgebricksBuiltinFunctions.GE) {
+            return AlgebricksBuiltinFunctions.LE;
+        }
+        if (oldFuncIdent == AlgebricksBuiltinFunctions.GT) {
+            return AlgebricksBuiltinFunctions.LT;
+        }
+        if (oldFuncIdent == AlgebricksBuiltinFunctions.LE) {
+            return AlgebricksBuiltinFunctions.GE;
+        }
+        if (oldFuncIdent == AlgebricksBuiltinFunctions.LT) {
+            return AlgebricksBuiltinFunctions.GT;
+        }
+        throw new IllegalStateException();
+    }
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+}



[41/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeAccessMethod.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeAccessMethod.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeAccessMethod.java
deleted file mode 100644
index 22f3b80..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeAccessMethod.java
+++ /dev/null
@@ -1,660 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.optimizer.rules.am;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.BitSet;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Set;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.common.annotations.SkipSecondaryIndexSearchExpressionAnnotation;
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.optimizer.rules.util.EquivalenceClassUtils;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IndexedNLJoinExpressionAnnotation;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions.ComparisonKind;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractDataSourceOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExternalDataLookupOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
-
-/**
- * Class for helping rewrite rules to choose and apply BTree indexes.
- */
-public class BTreeAccessMethod implements IAccessMethod {
-
-    // Describes whether a search predicate is an open/closed interval.
-    private enum LimitType {
-        LOW_INCLUSIVE,
-        LOW_EXCLUSIVE,
-        HIGH_INCLUSIVE,
-        HIGH_EXCLUSIVE,
-        EQUAL
-    }
-
-    // TODO: There is some redundancy here, since these are listed in AlgebricksBuiltinFunctions as well.
-    private static List<FunctionIdentifier> funcIdents = new ArrayList<FunctionIdentifier>();
-    static {
-        funcIdents.add(AlgebricksBuiltinFunctions.EQ);
-        funcIdents.add(AlgebricksBuiltinFunctions.LE);
-        funcIdents.add(AlgebricksBuiltinFunctions.GE);
-        funcIdents.add(AlgebricksBuiltinFunctions.LT);
-        funcIdents.add(AlgebricksBuiltinFunctions.GT);
-    }
-
-    public static BTreeAccessMethod INSTANCE = new BTreeAccessMethod();
-
-    @Override
-    public List<FunctionIdentifier> getOptimizableFunctions() {
-        return funcIdents;
-    }
-
-    @Override
-    public boolean analyzeFuncExprArgs(AbstractFunctionCallExpression funcExpr,
-            List<AbstractLogicalOperator> assignsAndUnnests, AccessMethodAnalysisContext analysisCtx) {
-        boolean matches = AccessMethodUtils.analyzeFuncExprArgsForOneConstAndVar(funcExpr, analysisCtx);
-        if (!matches) {
-            matches = AccessMethodUtils.analyzeFuncExprArgsForTwoVars(funcExpr, analysisCtx);
-        }
-        return matches;
-    }
-
-    @Override
-    public boolean matchAllIndexExprs() {
-        return false;
-    }
-
-    @Override
-    public boolean matchPrefixIndexExprs() {
-        return true;
-    }
-
-    @Override
-    public boolean applySelectPlanTransformation(Mutable<ILogicalOperator> selectRef,
-            OptimizableOperatorSubTree subTree, Index chosenIndex, AccessMethodAnalysisContext analysisCtx,
-            IOptimizationContext context) throws AlgebricksException {
-        SelectOperator select = (SelectOperator) selectRef.getValue();
-        Mutable<ILogicalExpression> conditionRef = select.getCondition();
-        ILogicalOperator primaryIndexUnnestOp = createSecondaryToPrimaryPlan(selectRef, conditionRef, subTree, null,
-                chosenIndex, analysisCtx, false, false, false, context);
-        if (primaryIndexUnnestOp == null) {
-            return false;
-        }
-        Mutable<ILogicalOperator> opRef = (subTree.assignsAndUnnestsRefs.isEmpty()) ? null
-                : subTree.assignsAndUnnestsRefs.get(0);
-        ILogicalOperator op = null;
-        if (opRef != null) {
-            op = opRef.getValue();
-        }
-        // Generate new select using the new condition.
-        if (conditionRef.getValue() != null) {
-            select.getInputs().clear();
-            if (op != null) {
-                subTree.dataSourceRef.setValue(primaryIndexUnnestOp);
-                select.getInputs().add(new MutableObject<ILogicalOperator>(op));
-            } else {
-                select.getInputs().add(new MutableObject<ILogicalOperator>(primaryIndexUnnestOp));
-            }
-        } else {
-            ((AbstractLogicalOperator) primaryIndexUnnestOp).setExecutionMode(ExecutionMode.PARTITIONED);
-            if (op != null) {
-                subTree.dataSourceRef.setValue(primaryIndexUnnestOp);
-                selectRef.setValue(op);
-            } else {
-                selectRef.setValue(primaryIndexUnnestOp);
-            }
-        }
-        return true;
-    }
-
-    @Override
-    public boolean applyJoinPlanTransformation(Mutable<ILogicalOperator> joinRef,
-            OptimizableOperatorSubTree leftSubTree, OptimizableOperatorSubTree rightSubTree, Index chosenIndex,
-            AccessMethodAnalysisContext analysisCtx, IOptimizationContext context, boolean isLeftOuterJoin,
-            boolean hasGroupBy) throws AlgebricksException {
-        AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) joinRef.getValue();
-        Mutable<ILogicalExpression> conditionRef = joinOp.getCondition();
-        // Determine if the index is applicable on the left or right side (if both, we arbitrarily prefer the left side).
-        Dataset dataset = analysisCtx.indexDatasetMap.get(chosenIndex);
-        // Determine probe and index subtrees based on chosen index.
-        OptimizableOperatorSubTree indexSubTree = null;
-        OptimizableOperatorSubTree probeSubTree = null;
-        if (!isLeftOuterJoin && leftSubTree.hasDataSourceScan()
-                && dataset.getDatasetName().equals(leftSubTree.dataset.getDatasetName())) {
-            indexSubTree = leftSubTree;
-            probeSubTree = rightSubTree;
-        } else if (rightSubTree.hasDataSourceScan()
-                && dataset.getDatasetName().equals(rightSubTree.dataset.getDatasetName())) {
-            indexSubTree = rightSubTree;
-            probeSubTree = leftSubTree;
-        }
-        if (indexSubTree == null) {
-            //This may happen for left outer join case
-            return false;
-        }
-
-        LogicalVariable newNullPlaceHolderVar = null;
-        if (isLeftOuterJoin) {
-            //get a new null place holder variable that is the first field variable of the primary key
-            //from the indexSubTree's datasourceScanOp
-            newNullPlaceHolderVar = indexSubTree.getDataSourceVariables().get(0);
-        }
-
-        ILogicalOperator primaryIndexUnnestOp = createSecondaryToPrimaryPlan(joinRef, conditionRef, indexSubTree,
-                probeSubTree, chosenIndex, analysisCtx, true, isLeftOuterJoin, true, context);
-        if (primaryIndexUnnestOp == null) {
-            return false;
-        }
-
-        if (isLeftOuterJoin && hasGroupBy) {
-            //reset the null place holder variable
-            AccessMethodUtils.resetLOJNullPlaceholderVariableInGroupByOp(analysisCtx, newNullPlaceHolderVar, context);
-        }
-
-        // If there are conditions left, add a new select operator on top.
-        indexSubTree.dataSourceRef.setValue(primaryIndexUnnestOp);
-        if (conditionRef.getValue() != null) {
-            SelectOperator topSelect = new SelectOperator(conditionRef, isLeftOuterJoin, newNullPlaceHolderVar);
-            topSelect.getInputs().add(indexSubTree.rootRef);
-            topSelect.setExecutionMode(ExecutionMode.LOCAL);
-            context.computeAndSetTypeEnvironmentForOperator(topSelect);
-            // Replace the original join with the new subtree rooted at the select op.
-            joinRef.setValue(topSelect);
-        } else {
-            joinRef.setValue(indexSubTree.rootRef.getValue());
-        }
-        return true;
-    }
-
-    private ILogicalOperator createSecondaryToPrimaryPlan(Mutable<ILogicalOperator> topOpRef,
-            Mutable<ILogicalExpression> conditionRef, OptimizableOperatorSubTree indexSubTree,
-            OptimizableOperatorSubTree probeSubTree, Index chosenIndex, AccessMethodAnalysisContext analysisCtx,
-            boolean retainInput, boolean retainNull, boolean requiresBroadcast, IOptimizationContext context)
-            throws AlgebricksException {
-        Dataset dataset = indexSubTree.dataset;
-        ARecordType recordType = indexSubTree.recordType;
-        // we made sure indexSubTree has datasource scan
-        AbstractDataSourceOperator dataSourceOp = (AbstractDataSourceOperator) indexSubTree.dataSourceRef.getValue();
-        List<Pair<Integer, Integer>> exprAndVarList = analysisCtx.indexExprsAndVars.get(chosenIndex);
-        List<IOptimizableFuncExpr> matchedFuncExprs = analysisCtx.matchedFuncExprs;
-        int numSecondaryKeys = analysisCtx.indexNumMatchedKeys.get(chosenIndex);
-        // List of function expressions that will be replaced by the secondary-index search.
-        // These func exprs will be removed from the select condition at the very end of this method.
-        Set<ILogicalExpression> replacedFuncExprs = new HashSet<ILogicalExpression>();
-
-        // Info on high and low keys for the BTree search predicate.
-        ILogicalExpression[] lowKeyExprs = new ILogicalExpression[numSecondaryKeys];
-        ILogicalExpression[] highKeyExprs = new ILogicalExpression[numSecondaryKeys];
-        LimitType[] lowKeyLimits = new LimitType[numSecondaryKeys];
-        LimitType[] highKeyLimits = new LimitType[numSecondaryKeys];
-        boolean[] lowKeyInclusive = new boolean[numSecondaryKeys];
-        boolean[] highKeyInclusive = new boolean[numSecondaryKeys];
-
-        // TODO: For now we don't do any sophisticated analysis of the func exprs to come up with "the best" range predicate.
-        // If we can't figure out how to integrate a certain funcExpr into the current predicate, we just bail by setting this flag.
-        boolean couldntFigureOut = false;
-        boolean doneWithExprs = false;
-        boolean isEqCondition = false;
-        // TODO: For now don't consider prefix searches.
-        BitSet setLowKeys = new BitSet(numSecondaryKeys);
-        BitSet setHighKeys = new BitSet(numSecondaryKeys);
-        // Go through the func exprs listed as optimizable by the chosen index,
-        // and formulate a range predicate on the secondary-index keys.
-
-        // checks whether a type casting happened from a real (FLOAT, DOUBLE) value to an INT value
-        // since we have a round issues when dealing with LT(<) OR GT(>) operator.
-        boolean realTypeConvertedToIntegerType = false;
-
-        for (Pair<Integer, Integer> exprIndex : exprAndVarList) {
-            // Position of the field of matchedFuncExprs.get(exprIndex) in the chosen index's indexed exprs.
-            IOptimizableFuncExpr optFuncExpr = matchedFuncExprs.get(exprIndex.first);
-            int keyPos = indexOf(optFuncExpr.getFieldName(0), chosenIndex.getKeyFieldNames());
-            if (keyPos < 0) {
-                if (optFuncExpr.getNumLogicalVars() > 1) {
-                    // If we are optimizing a join, the matching field may be the second field name.
-                    keyPos = indexOf(optFuncExpr.getFieldName(1), chosenIndex.getKeyFieldNames());
-                }
-            }
-            if (keyPos < 0) {
-                throw new AlgebricksException(
-                        "Could not match optimizable function expression to any index field name.");
-            }
-            Pair<ILogicalExpression, Boolean> returnedSearchKeyExpr = AccessMethodUtils.createSearchKeyExpr(
-                    optFuncExpr, indexSubTree, probeSubTree);
-            ILogicalExpression searchKeyExpr = returnedSearchKeyExpr.first;
-            realTypeConvertedToIntegerType = returnedSearchKeyExpr.second;
-
-            LimitType limit = getLimitType(optFuncExpr, probeSubTree);
-
-            // If a DOUBLE or FLOAT constant is converted to an INT type value,
-            // we need to check a corner case where two real values are located between an INT value.
-            // For example, for the following query,
-            //
-            // for $emp in dataset empDataset
-            // where $emp.age > double("2.3") and $emp.age < double("3.3")
-            // return $emp.id;
-            //
-            // It should generate a result if there is a tuple that satisfies the condition, which is 3,
-            // however, it does not generate the desired result since finding candidates
-            // fail after truncating the fraction part (there is no INT whose value is greater than 2 and less than 3.)
-            //
-            // Therefore, we convert LT(<) to LE(<=) and GT(>) to GE(>=) to find candidates.
-            // This does not change the result of an actual comparison since this conversion is only applied
-            // for finding candidates from an index.
-            //
-            if (realTypeConvertedToIntegerType) {
-                if (limit == LimitType.HIGH_EXCLUSIVE) {
-                    limit = LimitType.HIGH_INCLUSIVE;
-                } else if (limit == LimitType.LOW_EXCLUSIVE) {
-                    limit = LimitType.LOW_INCLUSIVE;
-                }
-            }
-
-            switch (limit) {
-                case EQUAL: {
-                    if (lowKeyLimits[keyPos] == null && highKeyLimits[keyPos] == null) {
-                        lowKeyLimits[keyPos] = highKeyLimits[keyPos] = limit;
-                        lowKeyInclusive[keyPos] = highKeyInclusive[keyPos] = true;
-                        lowKeyExprs[keyPos] = highKeyExprs[keyPos] = searchKeyExpr;
-                        setLowKeys.set(keyPos);
-                        setHighKeys.set(keyPos);
-                        isEqCondition = true;
-                    } else {
-                        // Has already been set to the identical values. When optimizing join we may encounter the same optimizable expression twice
-                        // (once from analyzing each side of the join)
-                        if (lowKeyLimits[keyPos] == limit && lowKeyInclusive[keyPos] == true
-                                && lowKeyExprs[keyPos].equals(searchKeyExpr) && highKeyLimits[keyPos] == limit
-                                && highKeyInclusive[keyPos] == true && highKeyExprs[keyPos].equals(searchKeyExpr)) {
-                            isEqCondition = true;
-                            break;
-                        }
-                        couldntFigureOut = true;
-                    }
-                    // TODO: For now don't consider prefix searches.
-                    // If high and low keys are set, we exit for now.
-                    if (setLowKeys.cardinality() == numSecondaryKeys && setHighKeys.cardinality() == numSecondaryKeys) {
-                        doneWithExprs = true;
-                    }
-                    break;
-                }
-                case HIGH_EXCLUSIVE: {
-                    if (highKeyLimits[keyPos] == null || (highKeyLimits[keyPos] != null && highKeyInclusive[keyPos])) {
-                        highKeyLimits[keyPos] = limit;
-                        highKeyExprs[keyPos] = searchKeyExpr;
-                        highKeyInclusive[keyPos] = false;
-                    } else {
-                        // Has already been set to the identical values. When optimizing join we may encounter the same optimizable expression twice
-                        // (once from analyzing each side of the join)
-                        if (highKeyLimits[keyPos] == limit && highKeyInclusive[keyPos] == false
-                                && highKeyExprs[keyPos].equals(searchKeyExpr)) {
-                            break;
-                        }
-                        couldntFigureOut = true;
-                        doneWithExprs = true;
-                    }
-                    break;
-                }
-                case HIGH_INCLUSIVE: {
-                    if (highKeyLimits[keyPos] == null) {
-                        highKeyLimits[keyPos] = limit;
-                        highKeyExprs[keyPos] = searchKeyExpr;
-                        highKeyInclusive[keyPos] = true;
-                    } else {
-                        // Has already been set to the identical values. When optimizing join we may encounter the same optimizable expression twice
-                        // (once from analyzing each side of the join)
-                        if (highKeyLimits[keyPos] == limit && highKeyInclusive[keyPos] == true
-                                && highKeyExprs[keyPos].equals(searchKeyExpr)) {
-                            break;
-                        }
-                        couldntFigureOut = true;
-                        doneWithExprs = true;
-                    }
-                    break;
-                }
-                case LOW_EXCLUSIVE: {
-                    if (lowKeyLimits[keyPos] == null || (lowKeyLimits[keyPos] != null && lowKeyInclusive[keyPos])) {
-                        lowKeyLimits[keyPos] = limit;
-                        lowKeyExprs[keyPos] = searchKeyExpr;
-                        lowKeyInclusive[keyPos] = false;
-                    } else {
-                        // Has already been set to the identical values. When optimizing join we may encounter the same optimizable expression twice
-                        // (once from analyzing each side of the join)
-                        if (lowKeyLimits[keyPos] == limit && lowKeyInclusive[keyPos] == false
-                                && lowKeyExprs[keyPos].equals(searchKeyExpr)) {
-                            break;
-                        }
-                        couldntFigureOut = true;
-                        doneWithExprs = true;
-                    }
-                    break;
-                }
-                case LOW_INCLUSIVE: {
-                    if (lowKeyLimits[keyPos] == null) {
-                        lowKeyLimits[keyPos] = limit;
-                        lowKeyExprs[keyPos] = searchKeyExpr;
-                        lowKeyInclusive[keyPos] = true;
-                    } else {
-                        // Has already been set to the identical values. When optimizing join we may encounter the same optimizable expression twice
-                        // (once from analyzing each side of the join)
-                        if (lowKeyLimits[keyPos] == limit && lowKeyInclusive[keyPos] == true
-                                && lowKeyExprs[keyPos].equals(searchKeyExpr)) {
-                            break;
-                        }
-                        couldntFigureOut = true;
-                        doneWithExprs = true;
-                    }
-                    break;
-                }
-                default: {
-                    throw new IllegalStateException();
-                }
-            }
-            if (!couldntFigureOut) {
-                // Remember to remove this funcExpr later.
-                replacedFuncExprs.add(matchedFuncExprs.get(exprIndex.first).getFuncExpr());
-            }
-            if (doneWithExprs) {
-                break;
-            }
-        }
-        if (couldntFigureOut) {
-            return null;
-        }
-
-        // If the select condition contains mixed open/closed intervals on multiple keys, then we make all intervals closed to obtain a superset of answers and leave the original selection in place.
-        boolean primaryIndexPostProccessingIsNeeded = false;
-        for (int i = 1; i < numSecondaryKeys; ++i) {
-            if (lowKeyInclusive[i] != lowKeyInclusive[0]) {
-                Arrays.fill(lowKeyInclusive, true);
-                primaryIndexPostProccessingIsNeeded = true;
-                break;
-            }
-        }
-        for (int i = 1; i < numSecondaryKeys; ++i) {
-            if (highKeyInclusive[i] != highKeyInclusive[0]) {
-                Arrays.fill(highKeyInclusive, true);
-                primaryIndexPostProccessingIsNeeded = true;
-                break;
-            }
-        }
-
-        // determine cases when prefix search could be applied
-        for (int i = 1; i < lowKeyExprs.length; i++) {
-            if (lowKeyLimits[0] == null && lowKeyLimits[i] != null || lowKeyLimits[0] != null
-                    && lowKeyLimits[i] == null || highKeyLimits[0] == null && highKeyLimits[i] != null
-                    || highKeyLimits[0] != null && highKeyLimits[i] == null) {
-                numSecondaryKeys--;
-                primaryIndexPostProccessingIsNeeded = true;
-            }
-        }
-        if (lowKeyLimits[0] == null) {
-            lowKeyInclusive[0] = true;
-        }
-        if (highKeyLimits[0] == null) {
-            highKeyInclusive[0] = true;
-        }
-
-        // Here we generate vars and funcs for assigning the secondary-index keys to be fed into the secondary-index search.
-        // List of variables for the assign.
-        ArrayList<LogicalVariable> keyVarList = new ArrayList<LogicalVariable>();
-        // List of variables and expressions for the assign.
-        ArrayList<LogicalVariable> assignKeyVarList = new ArrayList<LogicalVariable>();
-        ArrayList<Mutable<ILogicalExpression>> assignKeyExprList = new ArrayList<Mutable<ILogicalExpression>>();
-        int numLowKeys = createKeyVarsAndExprs(numSecondaryKeys, lowKeyLimits, lowKeyExprs, assignKeyVarList,
-                assignKeyExprList, keyVarList, context);
-        int numHighKeys = createKeyVarsAndExprs(numSecondaryKeys, highKeyLimits, highKeyExprs, assignKeyVarList,
-                assignKeyExprList, keyVarList, context);
-
-        BTreeJobGenParams jobGenParams = new BTreeJobGenParams(chosenIndex.getIndexName(), IndexType.BTREE,
-                dataset.getDataverseName(), dataset.getDatasetName(), retainInput, retainNull, requiresBroadcast);
-        jobGenParams.setLowKeyInclusive(lowKeyInclusive[0]);
-        jobGenParams.setHighKeyInclusive(highKeyInclusive[0]);
-        jobGenParams.setIsEqCondition(isEqCondition);
-        jobGenParams.setLowKeyVarList(keyVarList, 0, numLowKeys);
-        jobGenParams.setHighKeyVarList(keyVarList, numLowKeys, numHighKeys);
-
-        ILogicalOperator inputOp = null;
-        if (!assignKeyVarList.isEmpty()) {
-            // Assign operator that sets the constant secondary-index search-key fields if necessary.
-            AssignOperator assignConstantSearchKeys = new AssignOperator(assignKeyVarList, assignKeyExprList);
-            // Input to this assign is the EmptyTupleSource (which the dataSourceScan also must have had as input).
-            assignConstantSearchKeys.getInputs().add(dataSourceOp.getInputs().get(0));
-            assignConstantSearchKeys.setExecutionMode(dataSourceOp.getExecutionMode());
-            inputOp = assignConstantSearchKeys;
-        } else {
-            // All index search keys are variables.
-            inputOp = probeSubTree.root;
-        }
-
-        UnnestMapOperator secondaryIndexUnnestOp = AccessMethodUtils.createSecondaryIndexUnnestMap(dataset, recordType,
-                chosenIndex, inputOp, jobGenParams, context, false, retainInput);
-
-        // Generate the rest of the upstream plan which feeds the search results into the primary index.
-        UnnestMapOperator primaryIndexUnnestOp = null;
-        boolean isPrimaryIndex = chosenIndex.isPrimaryIndex();
-        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
-            // External dataset
-            ExternalDataLookupOperator externalDataAccessOp = AccessMethodUtils.createExternalDataLookupUnnestMap(
-                    dataSourceOp, dataset, recordType, secondaryIndexUnnestOp, context, chosenIndex, retainInput,
-                    retainNull);
-            indexSubTree.dataSourceRef.setValue(externalDataAccessOp);
-            return externalDataAccessOp;
-        } else if (!isPrimaryIndex) {
-            primaryIndexUnnestOp = AccessMethodUtils.createPrimaryIndexUnnestMap(dataSourceOp, dataset, recordType,
-                    secondaryIndexUnnestOp, context, true, retainInput, retainNull, false);
-
-            // Replace the datasource scan with the new plan rooted at
-            // primaryIndexUnnestMap.
-            indexSubTree.dataSourceRef.setValue(primaryIndexUnnestOp);
-        } else {
-            List<Object> primaryIndexOutputTypes = new ArrayList<Object>();
-            try {
-                AccessMethodUtils.appendPrimaryIndexTypes(dataset, recordType, primaryIndexOutputTypes);
-            } catch (IOException e) {
-                throw new AlgebricksException(e);
-            }
-            List<LogicalVariable> scanVariables = dataSourceOp.getVariables();
-            primaryIndexUnnestOp = new UnnestMapOperator(scanVariables, secondaryIndexUnnestOp.getExpressionRef(),
-                    primaryIndexOutputTypes, retainInput);
-            primaryIndexUnnestOp.getInputs().add(new MutableObject<ILogicalOperator>(inputOp));
-
-            if (!primaryIndexPostProccessingIsNeeded) {
-                List<Mutable<ILogicalExpression>> remainingFuncExprs = new ArrayList<Mutable<ILogicalExpression>>();
-                getNewConditionExprs(conditionRef, replacedFuncExprs, remainingFuncExprs);
-                // Generate new condition.
-                if (!remainingFuncExprs.isEmpty()) {
-                    ILogicalExpression pulledCond = createSelectCondition(remainingFuncExprs);
-                    conditionRef.setValue(pulledCond);
-                } else {
-                    conditionRef.setValue(null);
-                }
-            }
-
-            // Adds equivalence classes --- one equivalent class between a primary key
-            // variable and a record field-access expression.
-            EquivalenceClassUtils.addEquivalenceClassesForPrimaryIndexAccess(primaryIndexUnnestOp, scanVariables,
-                    recordType, dataset, context);
-        }
-
-        return primaryIndexUnnestOp;
-    }
-
-    private int createKeyVarsAndExprs(int numKeys, LimitType[] keyLimits, ILogicalExpression[] searchKeyExprs,
-            ArrayList<LogicalVariable> assignKeyVarList, ArrayList<Mutable<ILogicalExpression>> assignKeyExprList,
-            ArrayList<LogicalVariable> keyVarList, IOptimizationContext context) {
-        if (keyLimits[0] == null) {
-            return 0;
-        }
-        for (int i = 0; i < numKeys; i++) {
-            ILogicalExpression searchKeyExpr = searchKeyExprs[i];
-            LogicalVariable keyVar = null;
-            if (searchKeyExpr.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
-                keyVar = context.newVar();
-                assignKeyExprList.add(new MutableObject<ILogicalExpression>(searchKeyExpr));
-                assignKeyVarList.add(keyVar);
-            } else {
-                keyVar = ((VariableReferenceExpression) searchKeyExpr).getVariableReference();
-            }
-            keyVarList.add(keyVar);
-        }
-        return numKeys;
-    }
-
-    private void getNewConditionExprs(Mutable<ILogicalExpression> conditionRef,
-            Set<ILogicalExpression> replacedFuncExprs, List<Mutable<ILogicalExpression>> remainingFuncExprs) {
-        remainingFuncExprs.clear();
-        if (replacedFuncExprs.isEmpty()) {
-            return;
-        }
-        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) conditionRef.getValue();
-        if (replacedFuncExprs.size() == 1) {
-            Iterator<ILogicalExpression> it = replacedFuncExprs.iterator();
-            if (!it.hasNext()) {
-                return;
-            }
-            if (funcExpr == it.next()) {
-                // There are no remaining function exprs.
-                return;
-            }
-        }
-        // The original select cond must be an AND. Check it just to be sure.
-        if (funcExpr.getFunctionIdentifier() != AlgebricksBuiltinFunctions.AND) {
-            throw new IllegalStateException();
-        }
-        // Clean the conjuncts.
-        for (Mutable<ILogicalExpression> arg : funcExpr.getArguments()) {
-            ILogicalExpression argExpr = arg.getValue();
-            if (argExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-                continue;
-            }
-            // If the function expression was not replaced by the new index
-            // plan, then add it to the list of remaining function expressions.
-            if (!replacedFuncExprs.contains(argExpr)) {
-                remainingFuncExprs.add(arg);
-            }
-        }
-    }
-
-    private <T> int indexOf(T value, List<T> coll) {
-        int i = 0;
-        for (T member : coll) {
-            if (member.equals(value)) {
-                return i;
-            }
-            i++;
-        }
-        return -1;
-    }
-
-    private LimitType getLimitType(IOptimizableFuncExpr optFuncExpr, OptimizableOperatorSubTree probeSubTree) {
-        ComparisonKind ck = AlgebricksBuiltinFunctions.getComparisonType(optFuncExpr.getFuncExpr()
-                .getFunctionIdentifier());
-        LimitType limit = null;
-        switch (ck) {
-            case EQ: {
-                limit = LimitType.EQUAL;
-                break;
-            }
-            case GE: {
-                limit = probeIsOnLhs(optFuncExpr, probeSubTree) ? LimitType.HIGH_INCLUSIVE : LimitType.LOW_INCLUSIVE;
-                break;
-            }
-            case GT: {
-                limit = probeIsOnLhs(optFuncExpr, probeSubTree) ? LimitType.HIGH_EXCLUSIVE : LimitType.LOW_EXCLUSIVE;
-                break;
-            }
-            case LE: {
-                limit = probeIsOnLhs(optFuncExpr, probeSubTree) ? LimitType.LOW_INCLUSIVE : LimitType.HIGH_INCLUSIVE;
-                break;
-            }
-            case LT: {
-                limit = probeIsOnLhs(optFuncExpr, probeSubTree) ? LimitType.LOW_EXCLUSIVE : LimitType.HIGH_EXCLUSIVE;
-                break;
-            }
-            case NEQ: {
-                limit = null;
-                break;
-            }
-            default: {
-                throw new IllegalStateException();
-            }
-        }
-        return limit;
-    }
-
-    private boolean probeIsOnLhs(IOptimizableFuncExpr optFuncExpr, OptimizableOperatorSubTree probeSubTree) {
-        if (probeSubTree == null) {
-            // We are optimizing a selection query. Search key is a constant. Return true if constant is on lhs.
-            return optFuncExpr.getFuncExpr().getArguments().get(0) == optFuncExpr.getConstantVal(0);
-        } else {
-            // We are optimizing a join query. Determine whether the feeding variable is on the lhs.
-            return (optFuncExpr.getOperatorSubTree(0) == null || optFuncExpr.getOperatorSubTree(0) == probeSubTree);
-        }
-    }
-
-    private ILogicalExpression createSelectCondition(List<Mutable<ILogicalExpression>> predList) {
-        if (predList.size() > 1) {
-            IFunctionInfo finfo = FunctionUtils.getFunctionInfo(AlgebricksBuiltinFunctions.AND);
-            return new ScalarFunctionCallExpression(finfo, predList);
-        }
-        return predList.get(0).getValue();
-    }
-
-    @Override
-    public boolean exprIsOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) {
-        // If we are optimizing a join, check for the indexed nested-loop join hint.
-        if (optFuncExpr.getNumLogicalVars() == 2) {
-            if (!optFuncExpr.getFuncExpr().getAnnotations().containsKey(IndexedNLJoinExpressionAnnotation.INSTANCE)) {
-                return false;
-            }
-        }
-        if (!index.isPrimaryIndex()
-                && optFuncExpr.getFuncExpr().getAnnotations()
-                        .containsKey(SkipSecondaryIndexSearchExpressionAnnotation.INSTANCE)) {
-            return false;
-        }
-        // No additional analysis required for BTrees.
-        return true;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeJobGenParams.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeJobGenParams.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeJobGenParams.java
deleted file mode 100644
index a3df9d3..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/BTreeJobGenParams.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules.am;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-
-/**
- * Helper class for reading and writing job-gen parameters for BTree access methods to
- * and from a list of function arguments, typically of an unnest-map.
- */
-public class BTreeJobGenParams extends AccessMethodJobGenParams {
-
-    protected List<LogicalVariable> lowKeyVarList;
-    protected List<LogicalVariable> highKeyVarList;
-
-    protected boolean lowKeyInclusive;
-    protected boolean highKeyInclusive;
-    protected boolean isEqCondition;
-
-    public BTreeJobGenParams() {
-        super();
-    }
-
-    public BTreeJobGenParams(String indexName, IndexType indexType, String dataverseName, String datasetName,
-            boolean retainInput, boolean retainNull, boolean requiresBroadcast) {
-        super(indexName, indexType, dataverseName, datasetName, retainInput, retainNull, requiresBroadcast);
-    }
-
-    public void setLowKeyVarList(List<LogicalVariable> keyVarList, int startIndex, int numKeys) {
-        lowKeyVarList = new ArrayList<LogicalVariable>(numKeys);
-        setKeyVarList(keyVarList, lowKeyVarList, startIndex, numKeys);
-    }
-
-    public void setHighKeyVarList(List<LogicalVariable> keyVarList, int startIndex, int numKeys) {
-        highKeyVarList = new ArrayList<LogicalVariable>(numKeys);
-        setKeyVarList(keyVarList, highKeyVarList, startIndex, numKeys);
-    }
-
-    private void setKeyVarList(List<LogicalVariable> src, List<LogicalVariable> dest, int startIndex, int numKeys) {
-        for (int i = 0; i < numKeys; i++) {
-            dest.add(src.get(startIndex + i));
-        }
-    }
-
-    public void setLowKeyInclusive(boolean lowKeyInclusive) {
-        this.lowKeyInclusive = lowKeyInclusive;
-    }
-
-    public void setHighKeyInclusive(boolean highKeyInclusive) {
-        this.highKeyInclusive = highKeyInclusive;
-    }
-
-    public void setIsEqCondition(boolean isEqConsition) {
-        this.isEqCondition = isEqConsition;
-    }
-
-    public void writeToFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
-        super.writeToFuncArgs(funcArgs);
-        writeVarList(lowKeyVarList, funcArgs);
-        writeVarList(highKeyVarList, funcArgs);
-        writeBoolean(lowKeyInclusive, funcArgs);
-        writeBoolean(highKeyInclusive, funcArgs);
-        writeBoolean(isEqCondition, funcArgs);
-    }
-
-    public void readFromFuncArgs(List<Mutable<ILogicalExpression>> funcArgs) {
-        super.readFromFuncArgs(funcArgs);
-        int index = super.getNumParams();
-        lowKeyVarList = new ArrayList<LogicalVariable>();
-        highKeyVarList = new ArrayList<LogicalVariable>();
-        int nextIndex = readVarList(funcArgs, index, lowKeyVarList);
-        nextIndex = readVarList(funcArgs, nextIndex, highKeyVarList);
-        nextIndex = readKeyInclusives(funcArgs, nextIndex);
-        readIsEqCondition(funcArgs, nextIndex);
-    }
-
-    private int readKeyInclusives(List<Mutable<ILogicalExpression>> funcArgs, int index) {
-        lowKeyInclusive = ((ConstantExpression) funcArgs.get(index).getValue()).getValue().isTrue();
-        // Read the next function argument at index + 1.
-        highKeyInclusive = ((ConstantExpression) funcArgs.get(index + 1).getValue()).getValue().isTrue();
-        // We have read two of the function arguments, so the next index is at index + 2.
-        return index + 2;
-    }
-
-    private void readIsEqCondition(List<Mutable<ILogicalExpression>> funcArgs, int index) {
-        isEqCondition = ((ConstantExpression) funcArgs.get(index).getValue()).getValue().isTrue();
-    }
-
-    private void writeBoolean(boolean val, List<Mutable<ILogicalExpression>> funcArgs) {
-        ILogicalExpression keyExpr = val ? ConstantExpression.TRUE : ConstantExpression.FALSE;
-        funcArgs.add(new MutableObject<ILogicalExpression>(keyExpr));
-    }
-
-    public List<LogicalVariable> getLowKeyVarList() {
-        return lowKeyVarList;
-    }
-
-    public List<LogicalVariable> getHighKeyVarList() {
-        return highKeyVarList;
-    }
-
-    public boolean isEqCondition() {
-        return isEqCondition;
-    }
-
-    public boolean isLowKeyInclusive() {
-        return lowKeyInclusive;
-    }
-
-    public boolean isHighKeyInclusive() {
-        return highKeyInclusive;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IAccessMethod.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IAccessMethod.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IAccessMethod.java
deleted file mode 100644
index d03d1a0..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IAccessMethod.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules.am;
-
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-
-/**
- * Interface that an access method should implement to work with the rewrite
- * rules to apply it for join and/or selection queries. This interface provides
- * methods for analyzing a select/join condition, and for rewriting the plan
- * with a given index.
- */
-public interface IAccessMethod {
-
-    /**
-     * @return A list of function identifiers that are optimizable by this
-     *         access method.
-     */
-    public List<FunctionIdentifier> getOptimizableFunctions();
-
-    /**
-     * Analyzes the arguments of a given optimizable funcExpr to see if this
-     * access method is applicable (e.g., one arg is a constant and one is a
-     * var). We assume that the funcExpr has already been determined to be
-     * optimizable by this access method based on its function identifier. If
-     * funcExpr has been found to be optimizable, this method adds an
-     * OptimizableFunction to analysisCtx.matchedFuncExprs for further analysis.
-     *
-     * @return true if funcExpr is optimizable by this access method, false
-     *         otherwise
-     */
-    public boolean analyzeFuncExprArgs(AbstractFunctionCallExpression funcExpr,
-            List<AbstractLogicalOperator> assignsAndUnnests, AccessMethodAnalysisContext analysisCtx);
-
-    /**
-     * Indicates whether all index expressions must be matched in order for this
-     * index to be applicable.
-     *
-     * @return boolean
-     */
-    public boolean matchAllIndexExprs();
-
-    /**
-     * Indicates whether this index is applicable if only a prefix of the index
-     * expressions are matched.
-     *
-     * @return boolean
-     */
-    public boolean matchPrefixIndexExprs();
-
-    /**
-     * Applies the plan transformation to use chosenIndex to optimize a selection query.
-     */
-    public boolean applySelectPlanTransformation(Mutable<ILogicalOperator> selectRef,
-            OptimizableOperatorSubTree subTree, Index chosenIndex, AccessMethodAnalysisContext analysisCtx,
-            IOptimizationContext context) throws AlgebricksException;
-
-    /**
-     * Applies the plan transformation to use chosenIndex to optimize a join query.
-     * In the case of a LeftOuterJoin, there may or may not be a needed groupby operation
-     * If there is, we will need to include it in the transformation
-     */
-    public boolean applyJoinPlanTransformation(Mutable<ILogicalOperator> joinRef,
-            OptimizableOperatorSubTree leftSubTree, OptimizableOperatorSubTree rightSubTree, Index chosenIndex,
-            AccessMethodAnalysisContext analysisCtx, IOptimizationContext context, boolean isLeftOuterJoin,
-            boolean hasGroupBy) throws AlgebricksException;
-
-    /**
-     * Analyzes expr to see whether it is optimizable by the given concrete index.
-     * 
-     * @throws AlgebricksException
-     */
-    public boolean exprIsOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) throws AlgebricksException;
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IOptimizableFuncExpr.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IOptimizableFuncExpr.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IOptimizableFuncExpr.java
deleted file mode 100644
index 326d38b..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IOptimizableFuncExpr.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules.am;
-
-import java.util.List;
-
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IAlgebricksConstantValue;
-
-/**
- * Describes a function expression that is optimizable by an access method.
- * Provides convenient methods for accessing arguments (constants, variables)
- * and metadata of such a function.
- */
-public interface IOptimizableFuncExpr {
-    public AbstractFunctionCallExpression getFuncExpr();
-
-    public int getNumLogicalVars();
-
-    public int getNumConstantVals();
-
-    public LogicalVariable getLogicalVar(int index);
-
-    public void setLogicalExpr(int index, ILogicalExpression logExpr);
-
-    public ILogicalExpression getLogicalExpr(int index);
-
-    public void setFieldName(int index, List<String> fieldName);
-
-    public List<String> getFieldName(int index);
-
-    public void setFieldType(int index, IAType fieldName);
-
-    public IAType getFieldType(int index);
-
-    public void setOptimizableSubTree(int index, OptimizableOperatorSubTree subTree);
-
-    public OptimizableOperatorSubTree getOperatorSubTree(int index);
-
-    public IAlgebricksConstantValue getConstantVal(int index);
-
-    public int findLogicalVar(LogicalVariable var);
-
-    public int findFieldName(List<String> fieldName);
-
-    public void substituteVar(LogicalVariable original, LogicalVariable substitution);
-
-    public void setPartialField(boolean partialField);
-
-    public boolean containsPartialField();
-
-    public void setSourceVar(int index, LogicalVariable var);
-
-    public LogicalVariable getSourceVar(int index);
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IntroduceJoinAccessMethodRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IntroduceJoinAccessMethodRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IntroduceJoinAccessMethodRule.java
deleted file mode 100644
index 7d93ecc..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IntroduceJoinAccessMethodRule.java
+++ /dev/null
@@ -1,246 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules.am;
-
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
-
-/**
- * This rule optimizes a join with secondary indexes into an indexed nested-loop join.
- * Matches the following operator pattern:
- * (join) <-- (select)? <-- (assign | unnest)+ <-- (datasource scan)
- * <-- (select)? <-- (assign | unnest)+ <-- (datasource scan | unnest-map)
- * The order of the join inputs does not matter.
- * Replaces the above pattern with the following simplified plan:
- * (select) <-- (assign) <-- (btree search) <-- (sort) <-- (unnest(index search)) <-- (assign) <-- (datasource scan | unnest-map)
- * The sort is optional, and some access methods may choose not to sort.
- * Note that for some index-based optimizations we do not remove the triggering
- * condition from the join, since the secondary index may only act as a filter, and the
- * final verification must still be done with the original join condition.
- * The basic outline of this rule is:
- * 1. Match operator pattern.
- * 2. Analyze join condition to see if there are optimizable functions (delegated to IAccessMethods).
- * 3. Check metadata to see if there are applicable indexes.
- * 4. Choose an index to apply (for now only a single index will be chosen).
- * 5. Rewrite plan using index (delegated to IAccessMethods).
- * For left-outer-join, additional patterns are checked and additional treatment is needed as follows:
- * 1. First it checks if there is a groupByOp above the join: (groupby) <-- (leftouterjoin)
- * 2. Inherently, only the right-subtree of the lojOp can be used as indexSubtree.
- * So, the right-subtree must have at least one applicable index on join field(s)
- * 3. If there is a groupByOp, the null placeholder variable introduced in groupByOp should be taken care of correctly.
- * Here, the primary key variable from datasourceScanOp replaces the introduced null placeholder variable.
- * If the primary key is composite key, then the first variable of the primary key variables becomes the
- * null place holder variable. This null placeholder variable works for all three types of indexes.
- */
-public class IntroduceJoinAccessMethodRule extends AbstractIntroduceAccessMethodRule {
-
-    protected Mutable<ILogicalOperator> joinRef = null;
-    protected AbstractBinaryJoinOperator join = null;
-    protected AbstractFunctionCallExpression joinCond = null;
-    protected final OptimizableOperatorSubTree leftSubTree = new OptimizableOperatorSubTree();
-    protected final OptimizableOperatorSubTree rightSubTree = new OptimizableOperatorSubTree();
-    protected boolean isLeftOuterJoin = false;
-    protected boolean hasGroupBy = true;
-
-    // Register access methods.
-    protected static Map<FunctionIdentifier, List<IAccessMethod>> accessMethods = new HashMap<FunctionIdentifier, List<IAccessMethod>>();
-    static {
-        registerAccessMethod(BTreeAccessMethod.INSTANCE, accessMethods);
-        registerAccessMethod(RTreeAccessMethod.INSTANCE, accessMethods);
-        registerAccessMethod(InvertedIndexAccessMethod.INSTANCE, accessMethods);
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        clear();
-        setMetadataDeclarations(context);
-
-        // Match operator pattern and initialize optimizable sub trees.
-        if (!matchesOperatorPattern(opRef, context)) {
-            return false;
-        }
-        // Analyze condition on those optimizable subtrees that have a datasource scan.
-        Map<IAccessMethod, AccessMethodAnalysisContext> analyzedAMs = new HashMap<IAccessMethod, AccessMethodAnalysisContext>();
-        boolean matchInLeftSubTree = false;
-        boolean matchInRightSubTree = false;
-        if (leftSubTree.hasDataSource()) {
-            matchInLeftSubTree = analyzeCondition(joinCond, leftSubTree.assignsAndUnnests, analyzedAMs);
-        }
-        if (rightSubTree.hasDataSource()) {
-            matchInRightSubTree = analyzeCondition(joinCond, rightSubTree.assignsAndUnnests, analyzedAMs);
-        }
-        if (!matchInLeftSubTree && !matchInRightSubTree) {
-            return false;
-        }
-
-        // Set dataset and type metadata.
-        AqlMetadataProvider metadataProvider = (AqlMetadataProvider) context.getMetadataProvider();
-        boolean checkLeftSubTreeMetadata = false;
-        boolean checkRightSubTreeMetadata = false;
-        if (matchInLeftSubTree) {
-            checkLeftSubTreeMetadata = leftSubTree.setDatasetAndTypeMetadata(metadataProvider);
-        }
-        if (matchInRightSubTree) {
-            checkRightSubTreeMetadata = rightSubTree.setDatasetAndTypeMetadata(metadataProvider);
-        }
-        if (!checkLeftSubTreeMetadata && !checkRightSubTreeMetadata) {
-            return false;
-        }
-        if (checkLeftSubTreeMetadata) {
-            fillSubTreeIndexExprs(leftSubTree, analyzedAMs, context);
-        }
-        if (checkRightSubTreeMetadata) {
-            fillSubTreeIndexExprs(rightSubTree, analyzedAMs, context);
-        }
-        pruneIndexCandidates(analyzedAMs);
-
-        //Remove possibly chosen indexes from left Tree
-        if (isLeftOuterJoin) {
-            Iterator<Map.Entry<IAccessMethod, AccessMethodAnalysisContext>> amIt = analyzedAMs.entrySet().iterator();
-            // Check applicability of indexes by access method type.
-            while (amIt.hasNext()) {
-                Map.Entry<IAccessMethod, AccessMethodAnalysisContext> entry = amIt.next();
-                AccessMethodAnalysisContext amCtx = entry.getValue();
-                Iterator<Map.Entry<Index, List<Pair<Integer, Integer>>>> indexIt = amCtx.indexExprsAndVars.entrySet()
-                        .iterator();
-                while (indexIt.hasNext()) {
-                    Map.Entry<Index, List<Pair<Integer, Integer>>> indexEntry = indexIt.next();
-
-                    Index chosenIndex = indexEntry.getKey();
-                    if (!chosenIndex.getDatasetName().equals(rightSubTree.dataset.getDatasetName())) {
-                        indexIt.remove();
-                    }
-                }
-            }
-        }
-
-        // Choose index to be applied.
-        Pair<IAccessMethod, Index> chosenIndex = chooseIndex(analyzedAMs);
-        if (chosenIndex == null) {
-            context.addToDontApplySet(this, join);
-            return false;
-        }
-
-        // Apply plan transformation using chosen index.
-        AccessMethodAnalysisContext analysisCtx = analyzedAMs.get(chosenIndex.first);
-
-        //For LOJ with GroupBy, prepare objects to reset LOJ nullPlaceHolderVariable in GroupByOp
-        if (isLeftOuterJoin && hasGroupBy) {
-            analysisCtx.setLOJGroupbyOpRef(opRef);
-            ScalarFunctionCallExpression isNullFuncExpr = AccessMethodUtils
-                    .findLOJIsNullFuncInGroupBy((GroupByOperator) opRef.getValue());
-            analysisCtx.setLOJIsNullFuncInGroupBy(isNullFuncExpr);
-        }
-        boolean res = chosenIndex.first.applyJoinPlanTransformation(joinRef, leftSubTree, rightSubTree,
-                chosenIndex.second, analysisCtx, context, isLeftOuterJoin, hasGroupBy);
-        if (res) {
-            OperatorPropertiesUtil.typeOpRec(opRef, context);
-        }
-        context.addToDontApplySet(this, join);
-        return res;
-    }
-
-    protected boolean matchesOperatorPattern(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
-        // First check that the operator is a join and its condition is a function call.
-        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
-        if (context.checkIfInDontApplySet(this, op1)) {
-            return false;
-        }
-
-        boolean isInnerJoin = isInnerJoin(op1);
-        isLeftOuterJoin = isLeftOuterJoin(op1);
-
-        if (!isInnerJoin && !isLeftOuterJoin) {
-            return false;
-        }
-
-        // Set and analyze select.
-        if (isInnerJoin) {
-            joinRef = opRef;
-            join = (InnerJoinOperator) op1;
-        } else {
-            joinRef = op1.getInputs().get(0);
-            join = (LeftOuterJoinOperator) joinRef.getValue();
-        }
-
-        // Check that the select's condition is a function call.
-        ILogicalExpression condExpr = join.getCondition().getValue();
-        if (condExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            return false;
-        }
-        joinCond = (AbstractFunctionCallExpression) condExpr;
-        leftSubTree.initFromSubTree(join.getInputs().get(0));
-        rightSubTree.initFromSubTree(join.getInputs().get(1));
-        // One of the subtrees must have a datasource scan.
-        if (leftSubTree.hasDataSourceScan() || rightSubTree.hasDataSourceScan()) {
-            return true;
-        }
-        return false;
-    }
-
-    private boolean isLeftOuterJoin(AbstractLogicalOperator op1) {
-        if (op1.getInputs().size() != 1) {
-            return false;
-        }
-        if (((AbstractLogicalOperator) op1.getInputs().get(0).getValue()).getOperatorTag() != LogicalOperatorTag.LEFTOUTERJOIN) {
-            return false;
-        }
-        if (op1.getOperatorTag() == LogicalOperatorTag.GROUP) {
-            return true;
-        }
-        hasGroupBy = false;
-        return true;
-    }
-
-    private boolean isInnerJoin(AbstractLogicalOperator op1) {
-        return op1.getOperatorTag() == LogicalOperatorTag.INNERJOIN;
-    }
-
-    @Override
-    public Map<FunctionIdentifier, List<IAccessMethod>> getAccessMethods() {
-        return accessMethods;
-    }
-
-    private void clear() {
-        joinRef = null;
-        join = null;
-        joinCond = null;
-        isLeftOuterJoin = false;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java
deleted file mode 100644
index e441a20..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java
+++ /dev/null
@@ -1,451 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules.am;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions.ComparisonKind;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class IntroduceLSMComponentFilterRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-
-        if (!checkIfRuleIsApplicable(opRef, context)) {
-            return false;
-        }
-
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        ILogicalExpression condExpr = ((SelectOperator) op).getCondition().getValue();
-        AccessMethodAnalysisContext analysisCtx = analyzeCondition(condExpr);
-        if (analysisCtx.matchedFuncExprs.isEmpty()) {
-            return false;
-        }
-
-        Dataset dataset = getDataset(op, context);
-        List<String> filterFieldName = null;
-        ARecordType recType = null;
-        if (dataset != null && dataset.getDatasetType() == DatasetType.INTERNAL) {
-            filterFieldName = DatasetUtils.getFilterField(dataset);
-            IAType itemType = ((AqlMetadataProvider) context.getMetadataProvider()).findType(
-                    dataset.getDataverseName(), dataset.getItemTypeName());
-            if (itemType.getTypeTag() == ATypeTag.RECORD) {
-                recType = (ARecordType) itemType;
-            }
-        }
-        if (filterFieldName == null || recType == null) {
-            return false;
-        }
-        List<Index> datasetIndexes = ((AqlMetadataProvider) context.getMetadataProvider()).getDatasetIndexes(
-                dataset.getDataverseName(), dataset.getDatasetName());
-
-        List<IOptimizableFuncExpr> optFuncExprs = new ArrayList<IOptimizableFuncExpr>();
-
-        for (int i = 0; i < analysisCtx.matchedFuncExprs.size(); i++) {
-            IOptimizableFuncExpr optFuncExpr = analysisCtx.matchedFuncExprs.get(i);
-            boolean found = findMacthedExprFieldName(optFuncExpr, op, dataset, recType, datasetIndexes);
-            if (found && optFuncExpr.getFieldName(0).equals(filterFieldName)) {
-                optFuncExprs.add(optFuncExpr);
-            }
-        }
-        if (optFuncExprs.isEmpty()) {
-            return false;
-        }
-        changePlan(optFuncExprs, op, dataset, context);
-
-        OperatorPropertiesUtil.typeOpRec(opRef, context);
-        context.addToDontApplySet(this, op);
-        return true;
-    }
-
-    private AssignOperator createAssignOperator(List<IOptimizableFuncExpr> optFuncExprs,
-            List<LogicalVariable> minFilterVars, List<LogicalVariable> maxFilterVars, IOptimizationContext context) {
-        List<LogicalVariable> assignKeyVarList = new ArrayList<LogicalVariable>();
-        List<Mutable<ILogicalExpression>> assignKeyExprList = new ArrayList<Mutable<ILogicalExpression>>();
-
-        for (IOptimizableFuncExpr optFuncExpr : optFuncExprs) {
-            ComparisonKind ck = AlgebricksBuiltinFunctions.getComparisonType(optFuncExpr.getFuncExpr()
-                    .getFunctionIdentifier());
-            ILogicalExpression searchKeyExpr = new ConstantExpression(optFuncExpr.getConstantVal(0));
-            LogicalVariable var = context.newVar();
-            assignKeyExprList.add(new MutableObject<ILogicalExpression>(searchKeyExpr));
-            assignKeyVarList.add(var);
-            if (ck == ComparisonKind.GE || ck == ComparisonKind.GT) {
-                minFilterVars.add(var);
-            } else if (ck == ComparisonKind.LE || ck == ComparisonKind.LT) {
-                maxFilterVars.add(var);
-            } else if (ck == ComparisonKind.EQ) {
-                minFilterVars.add(var);
-                maxFilterVars.add(var);
-            }
-        }
-        return new AssignOperator(assignKeyVarList, assignKeyExprList);
-    }
-
-    private void changePlan(List<IOptimizableFuncExpr> optFuncExprs, AbstractLogicalOperator op, Dataset dataset,
-            IOptimizationContext context) throws AlgebricksException {
-
-        AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
-        while (descendantOp != null) {
-            if (descendantOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
-                DataSourceScanOperator dataSourceScanOp = (DataSourceScanOperator) descendantOp;
-                AqlDataSource ds = (AqlDataSource) dataSourceScanOp.getDataSource();
-                if (dataset.getDatasetName().compareTo(((DatasetDataSource) ds).getDataset().getDatasetName()) == 0) {
-                    List<LogicalVariable> minFilterVars = new ArrayList<LogicalVariable>();
-                    List<LogicalVariable> maxFilterVars = new ArrayList<LogicalVariable>();
-
-                    AssignOperator assignOp = createAssignOperator(optFuncExprs, minFilterVars, maxFilterVars, context);
-
-                    dataSourceScanOp.setMinFilterVars(minFilterVars);
-                    dataSourceScanOp.setMaxFilterVars(maxFilterVars);
-
-                    List<Mutable<ILogicalExpression>> additionalFilteringExpressions = new ArrayList<Mutable<ILogicalExpression>>();;
-                    for (LogicalVariable var : assignOp.getVariables()) {
-                        additionalFilteringExpressions.add(new MutableObject<ILogicalExpression>(
-                                new VariableReferenceExpression(var)));
-                    }
-
-                    dataSourceScanOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
-
-                    assignOp.getInputs().add(
-                            new MutableObject<ILogicalOperator>(dataSourceScanOp.getInputs().get(0).getValue()));
-                    dataSourceScanOp.getInputs().get(0).setValue(assignOp);
-                }
-            } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.UNNEST_MAP) {
-                UnnestMapOperator unnestMapOp = (UnnestMapOperator) descendantOp;
-                ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
-                if (unnestExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                    AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
-                    FunctionIdentifier fid = f.getFunctionIdentifier();
-                    if (!fid.equals(AsterixBuiltinFunctions.INDEX_SEARCH)) {
-                        throw new IllegalStateException();
-                    }
-                    AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
-                    jobGenParams.readFromFuncArgs(f.getArguments());
-                    if (dataset.getDatasetName().compareTo(jobGenParams.datasetName) == 0) {
-                        List<LogicalVariable> minFilterVars = new ArrayList<LogicalVariable>();
-                        List<LogicalVariable> maxFilterVars = new ArrayList<LogicalVariable>();
-
-                        AssignOperator assignOp = createAssignOperator(optFuncExprs, minFilterVars, maxFilterVars,
-                                context);
-
-                        unnestMapOp.setMinFilterVars(minFilterVars);
-                        unnestMapOp.setMaxFilterVars(maxFilterVars);
-
-                        List<Mutable<ILogicalExpression>> additionalFilteringExpressions = new ArrayList<Mutable<ILogicalExpression>>();;
-                        for (LogicalVariable var : assignOp.getVariables()) {
-                            additionalFilteringExpressions.add(new MutableObject<ILogicalExpression>(
-                                    new VariableReferenceExpression(var)));
-                        }
-                        unnestMapOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
-                        assignOp.getInputs().add(
-                                new MutableObject<ILogicalOperator>(unnestMapOp.getInputs().get(0).getValue()));
-                        unnestMapOp.getInputs().get(0).setValue(assignOp);
-                    }
-                }
-            }
-            if (descendantOp.getInputs().isEmpty()) {
-                break;
-            }
-            descendantOp = (AbstractLogicalOperator) descendantOp.getInputs().get(0).getValue();
-        }
-    }
-
-    private Dataset getDataset(AbstractLogicalOperator op, IOptimizationContext context) throws AlgebricksException {
-        AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
-        while (descendantOp != null) {
-            if (descendantOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
-                DataSourceScanOperator dataSourceScanOp = (DataSourceScanOperator) descendantOp;
-                AqlDataSource ds = (AqlDataSource) dataSourceScanOp.getDataSource();
-                return ((DatasetDataSource) ds).getDataset();
-            } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.UNNEST_MAP) {
-                UnnestMapOperator unnestMapOp = (UnnestMapOperator) descendantOp;
-                ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
-                if (unnestExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                    AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
-                    FunctionIdentifier fid = f.getFunctionIdentifier();
-                    if (!fid.equals(AsterixBuiltinFunctions.INDEX_SEARCH)) {
-                        throw new IllegalStateException();
-                    }
-                    AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
-                    jobGenParams.readFromFuncArgs(f.getArguments());
-                    return ((AqlMetadataProvider) context.getMetadataProvider()).findDataset(
-                            jobGenParams.dataverseName, jobGenParams.datasetName);
-                }
-            }
-            if (descendantOp.getInputs().isEmpty()) {
-                break;
-            }
-            descendantOp = (AbstractLogicalOperator) descendantOp.getInputs().get(0).getValue();
-        }
-        return null;
-    }
-
-    private boolean checkIfRuleIsApplicable(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
-        // First check that the operator is a select and its condition is a function call.
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (context.checkIfInDontApplySet(this, op)) {
-            return false;
-        }
-        if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
-            return false;
-        }
-
-        ILogicalExpression condExpr = ((SelectOperator) op).getCondition().getValue();
-        if (condExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            return false;
-        }
-        return true;
-    }
-
-    private AccessMethodAnalysisContext analyzeCondition(ILogicalExpression cond) {
-        AccessMethodAnalysisContext analysisCtx = new AccessMethodAnalysisContext();
-        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) cond;
-        FunctionIdentifier funcIdent = funcExpr.getFunctionIdentifier();
-        if (funcIdent != AlgebricksBuiltinFunctions.OR) {
-            analyzeFunctionExpr(funcExpr, analysisCtx);
-            for (Mutable<ILogicalExpression> arg : funcExpr.getArguments()) {
-                ILogicalExpression argExpr = arg.getValue();
-                if (argExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-                    continue;
-                }
-                analyzeFunctionExpr((AbstractFunctionCallExpression) argExpr, analysisCtx);
-            }
-        }
-        return analysisCtx;
-    }
-
-    private void analyzeFunctionExpr(AbstractFunctionCallExpression funcExpr, AccessMethodAnalysisContext analysisCtx) {
-        FunctionIdentifier funcIdent = funcExpr.getFunctionIdentifier();
-        if (funcIdent == AlgebricksBuiltinFunctions.LE || funcIdent == AlgebricksBuiltinFunctions.GE
-                || funcIdent == AlgebricksBuiltinFunctions.LT || funcIdent == AlgebricksBuiltinFunctions.GT
-                || funcIdent == AlgebricksBuiltinFunctions.EQ) {
-            AccessMethodUtils.analyzeFuncExprArgsForOneConstAndVar(funcExpr, analysisCtx);
-        }
-    }
-
-    private boolean findMacthedExprFieldName(IOptimizableFuncExpr optFuncExpr, AbstractLogicalOperator op,
-            Dataset dataset, ARecordType recType, List<Index> datasetIndexes) throws AlgebricksException {
-        AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
-        while (descendantOp != null) {
-            if (descendantOp.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
-                AssignOperator assignOp = (AssignOperator) descendantOp;
-                List<LogicalVariable> varList = assignOp.getVariables();
-                for (int varIndex = 0; varIndex < varList.size(); varIndex++) {
-                    LogicalVariable var = varList.get(varIndex);
-                    int funcVarIndex = optFuncExpr.findLogicalVar(var);
-                    if (funcVarIndex == -1) {
-                        continue;
-                    }
-                    List<String> fieldName = getFieldNameFromSubAssignTree(optFuncExpr, descendantOp, varIndex, recType).second;
-                    if (fieldName == null) {
-                        return false;
-                    }
-                    optFuncExpr.setFieldName(funcVarIndex, fieldName);
-                    return true;
-                }
-            } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
-                DataSourceScanOperator scanOp = (DataSourceScanOperator) descendantOp;
-                List<LogicalVariable> varList = scanOp.getVariables();
-                for (int varIndex = 0; varIndex < varList.size(); varIndex++) {
-                    LogicalVariable var = varList.get(varIndex);
-                    int funcVarIndex = optFuncExpr.findLogicalVar(var);
-                    if (funcVarIndex == -1) {
-                        continue;
-                    }
-                    // The variable value is one of the partitioning fields.
-                    List<String> fieldName = DatasetUtils.getPartitioningKeys(dataset).get(varIndex);
-                    if (fieldName == null) {
-                        return false;
-                    }
-                    optFuncExpr.setFieldName(funcVarIndex, fieldName);
-                    return true;
-                }
-            } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.UNNEST_MAP) {
-                UnnestMapOperator unnestMapOp = (UnnestMapOperator) descendantOp;
-                List<LogicalVariable> varList = unnestMapOp.getVariables();
-                for (int varIndex = 0; varIndex < varList.size(); varIndex++) {
-                    LogicalVariable var = varList.get(varIndex);
-                    int funcVarIndex = optFuncExpr.findLogicalVar(var);
-                    if (funcVarIndex == -1) {
-                        continue;
-                    }
-
-                    String indexName = null;
-                    Index index = null;
-                    ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
-                    if (unnestExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                        AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
-                        FunctionIdentifier fid = f.getFunctionIdentifier();
-                        if (!fid.equals(AsterixBuiltinFunctions.INDEX_SEARCH)) {
-                            throw new IllegalStateException();
-                        }
-                        AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
-                        jobGenParams.readFromFuncArgs(f.getArguments());
-                        indexName = jobGenParams.indexName;
-                        for (Index idx : datasetIndexes) {
-                            if (idx.getIndexName().compareTo(indexName) == 0) {
-                                index = idx;
-                                break;
-                            }
-                        }
-                    }
-
-                    int numSecondaryKeys = AccessMethodUtils.getNumSecondaryKeys(index, recType);
-                    List<String> fieldName;
-                    if (varIndex >= numSecondaryKeys) {
-                        fieldName = DatasetUtils.getPartitioningKeys(dataset).get(varIndex - numSecondaryKeys);
-                    } else {
-                        fieldName = index.getKeyFieldNames().get(varIndex);
-                    }
-                    if (fieldName == null) {
-                        return false;
-                    }
-                    optFuncExpr.setFieldName(funcVarIndex, fieldName);
-                    return true;
-                }
-            }
-
-            if (descendantOp.getInputs().isEmpty()) {
-                break;
-            }
-            descendantOp = (AbstractLogicalOperator) descendantOp.getInputs().get(0).getValue();
-        }
-        return false;
-    }
-
-    private Pair<ARecordType, List<String>> getFieldNameFromSubAssignTree(IOptimizableFuncExpr optFuncExpr,
-            AbstractLogicalOperator op, int varIndex, ARecordType recType) {
-        AbstractLogicalExpression expr = null;
-        if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
-            AssignOperator assignOp = (AssignOperator) op;
-            expr = (AbstractLogicalExpression) assignOp.getExpressions().get(varIndex).getValue();
-        }
-        if (expr == null || expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            return null;
-        }
-        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
-        FunctionIdentifier funcIdent = funcExpr.getFunctionIdentifier();
-        if (funcIdent == AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME
-                || funcIdent == AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX) {
-
-            //get the variable from here. Figure out which input it came from. Go to that input!!!
-            ArrayList<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
-            expr.getUsedVariables(usedVars);
-            LogicalVariable usedVar = usedVars.get(0);
-            List<String> returnList = new ArrayList<String>();
-
-            //Find the input that it came from
-            for (int varCheck = 0; varCheck < op.getInputs().size(); varCheck++) {
-                AbstractLogicalOperator nestedOp = (AbstractLogicalOperator) op.getInputs().get(varCheck).getValue();
-                if (nestedOp.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
-                    if (varCheck == op.getInputs().size() - 1) {
-
-                    }
-                } else {
-                    int nestedAssignVar = ((AssignOperator) nestedOp).getVariables().indexOf(usedVar);
-                    if (nestedAssignVar == -1) {
-                        continue;
-                    }
-                    //get the nested info from the lower input
-                    Pair<ARecordType, List<String>> lowerInfo = getFieldNameFromSubAssignTree(optFuncExpr,
-                            (AbstractLogicalOperator) op.getInputs().get(varCheck).getValue(), nestedAssignVar, recType);
-                    if (lowerInfo != null) {
-                        recType = lowerInfo.first;
-                        returnList = lowerInfo.second;
-                    }
-                }
-            }
-
-            if (funcIdent == AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME) {
-                ILogicalExpression nameArg = funcExpr.getArguments().get(1).getValue();
-                if (nameArg.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
-                    return null;
-                }
-                ConstantExpression constExpr = (ConstantExpression) nameArg;
-                returnList.addAll(Arrays.asList(((AString) ((AsterixConstantValue) constExpr.getValue()).getObject())
-                        .getStringValue()));
-                return new Pair<ARecordType, List<String>>(recType, returnList);
-            } else if (funcIdent == AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX) {
-                ILogicalExpression idxArg = funcExpr.getArguments().get(1).getValue();
-                if (idxArg.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
-                    return null;
-                }
-                ConstantExpression constExpr = (ConstantExpression) idxArg;
-                int fieldIndex = ((AInt32) ((AsterixConstantValue) constExpr.getValue()).getObject()).getIntegerValue();
-                returnList.addAll(Arrays.asList(recType.getFieldNames()[fieldIndex]));
-                IAType subType = recType.getFieldTypes()[fieldIndex];
-                if (subType.getTypeTag() == ATypeTag.RECORD) {
-                    recType = (ARecordType) subType;
-                }
-                return new Pair<ARecordType, List<String>>(recType, returnList);
-            }
-
-        }
-
-        ILogicalExpression argExpr = funcExpr.getArguments().get(0).getValue();
-        if (argExpr.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
-            return null;
-        }
-
-        return null;
-    }
-}
\ No newline at end of file


[44/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushFieldAccessRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushFieldAccessRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushFieldAccessRule.java
deleted file mode 100644
index 3a19854..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushFieldAccessRule.java
+++ /dev/null
@@ -1,391 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.LinkedList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.algebra.base.AsterixOperatorAnnotations;
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.common.exceptions.AsterixRuntimeException;
-import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
-import edu.uci.ics.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.base.IAObject;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.optimizer.base.AnalysisUtil;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSource;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class PushFieldAccessRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (context.checkIfInDontApplySet(this, op)) {
-            return false;
-        }
-        if (op.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
-            return false;
-        }
-        AssignOperator access = (AssignOperator) op;
-        ILogicalExpression expr = getFirstExpr(access);
-        String finalAnnot = null;
-        if (AnalysisUtil.isAccessToFieldRecord(expr)) {
-            finalAnnot = AsterixOperatorAnnotations.PUSHED_FIELD_ACCESS;
-        } else if (AnalysisUtil.isRunnableAccessToFieldRecord(expr)) {
-            finalAnnot = AsterixOperatorAnnotations.PUSHED_RUNNABLE_FIELD_ACCESS;
-        } else {
-            return false;
-        }
-        return propagateFieldAccessRec(opRef, context, finalAnnot);
-    }
-
-    @SuppressWarnings("unchecked")
-    private boolean isAccessToIndexedField(AssignOperator assign, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractFunctionCallExpression accessFun = (AbstractFunctionCallExpression) assign.getExpressions().get(0)
-                .getValue();
-        ILogicalExpression e0 = accessFun.getArguments().get(0).getValue();
-        if (e0.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
-            return false;
-        }
-        LogicalVariable var = ((VariableReferenceExpression) e0).getVariableReference();
-        if (context.findPrimaryKey(var) == null) {
-            // not referring to a dataset record
-            return false;
-        }
-        AbstractLogicalOperator op = assign;
-        while (op.getInputs().size() == 1 && op.getOperatorTag() != LogicalOperatorTag.DATASOURCESCAN) {
-            op = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
-        }
-        if (op.getOperatorTag() != LogicalOperatorTag.DATASOURCESCAN) {
-            return false;
-        }
-        DataSourceScanOperator scan = (DataSourceScanOperator) op;
-        LogicalVariable recVar = scan.getVariables().get(scan.getVariables().size() - 1);
-        if (recVar != var) {
-            return false;
-        }
-        AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
-        AqlSourceId asid = ((IDataSource<AqlSourceId>) scan.getDataSource()).getId();
-
-        Dataset dataset = mp.findDataset(asid.getDataverseName(), asid.getDatasourceName());
-        if (dataset == null) {
-            throw new AlgebricksException("Dataset " + asid.getDatasourceName() + " not found.");
-        }
-        if (dataset.getDatasetType() != DatasetType.INTERNAL) {
-            return false;
-        }
-        ILogicalExpression e1 = accessFun.getArguments().get(1).getValue();
-        if (e1.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
-            return false;
-        }
-        ConstantExpression ce = (ConstantExpression) e1;
-        IAObject obj = ((AsterixConstantValue) ce.getValue()).getObject();
-        String fldName;
-        if (obj.getType().getTypeTag() == ATypeTag.STRING) {
-            fldName = ((AString) obj).getStringValue();
-        } else {
-            int pos = ((AInt32) obj).getIntegerValue();
-            String tName = dataset.getItemTypeName();
-            IAType t = mp.findType(dataset.getDataverseName(), tName);
-            if (t.getTypeTag() != ATypeTag.RECORD) {
-                return false;
-            }
-            ARecordType rt = (ARecordType) t;
-            if (pos >= rt.getFieldNames().length) {
-                return false;
-            }
-            fldName = rt.getFieldNames()[pos];
-        }
-
-        List<Index> datasetIndexes = mp.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName());
-        boolean hasSecondaryIndex = false;
-        for (Index index : datasetIndexes) {
-            if (index.isSecondaryIndex()) {
-                hasSecondaryIndex = true;
-                break;
-            }
-        }
-
-        return hasSecondaryIndex;
-    }
-
-    private boolean tryingToPushThroughSelectionWithSameDataSource(AssignOperator access, AbstractLogicalOperator op2) {
-        if (op2.getOperatorTag() != LogicalOperatorTag.SELECT) {
-            return false;
-        }
-        ILogicalExpression e1 = (ILogicalExpression) access.getAnnotations().get(
-                AsterixOperatorAnnotations.FIELD_ACCESS);
-        if (e1 == null) {
-            return false;
-        }
-        ILogicalExpression e2 = (ILogicalExpression) op2.getAnnotations().get(AsterixOperatorAnnotations.FIELD_ACCESS);
-        if (e2 == null) {
-            return false;
-        }
-        return e1.equals(e2);
-    }
-
-    @SuppressWarnings("unchecked")
-    private boolean propagateFieldAccessRec(Mutable<ILogicalOperator> opRef, IOptimizationContext context,
-            String finalAnnot) throws AlgebricksException {
-        AssignOperator access = (AssignOperator) opRef.getValue();
-        Mutable<ILogicalOperator> opRef2 = access.getInputs().get(0);
-        AbstractLogicalOperator op2 = (AbstractLogicalOperator) opRef2.getValue();
-        // If it's not an indexed field, it is pushed so that scan can be
-        // rewritten into index search.
-        if (op2.getOperatorTag() == LogicalOperatorTag.PROJECT || context.checkAndAddToAlreadyCompared(access, op2)
-                && !(op2.getOperatorTag() == LogicalOperatorTag.SELECT && isAccessToIndexedField(access, context))) {
-            return false;
-        }
-        if (tryingToPushThroughSelectionWithSameDataSource(access, op2)) {
-            return false;
-        }
-        if (testAndModifyRedundantOp(access, op2)) {
-            propagateFieldAccessRec(opRef2, context, finalAnnot);
-            return true;
-        }
-        List<LogicalVariable> usedInAccess = new LinkedList<LogicalVariable>();
-        VariableUtilities.getUsedVariables(access, usedInAccess);
-        List<LogicalVariable> produced2 = new LinkedList<LogicalVariable>();
-        if (op2.getOperatorTag() == LogicalOperatorTag.GROUP) {
-            VariableUtilities.getLiveVariables(op2, produced2);
-        } else {
-            VariableUtilities.getProducedVariables(op2, produced2);
-        }
-        boolean pushItDown = false;
-        List<LogicalVariable> inter = new ArrayList<LogicalVariable>(usedInAccess);
-        if (inter.isEmpty()) { // ground value
-            return false;
-        }
-        inter.retainAll(produced2);
-        if (inter.isEmpty()) {
-            pushItDown = true;
-        } else if (op2.getOperatorTag() == LogicalOperatorTag.GROUP) {
-            GroupByOperator g = (GroupByOperator) op2;
-            List<Pair<LogicalVariable, LogicalVariable>> varMappings = new ArrayList<Pair<LogicalVariable, LogicalVariable>>();
-            for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : g.getDecorList()) {
-                ILogicalExpression e = p.second.getValue();
-                if (e.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-                    LogicalVariable decorVar = GroupByOperator.getDecorVariable(p);
-                    if (inter.contains(decorVar)) {
-                        inter.remove(decorVar);
-                        LogicalVariable v1 = ((VariableReferenceExpression) e).getVariableReference();
-                        varMappings.add(new Pair<LogicalVariable, LogicalVariable>(decorVar, v1));
-                    }
-                }
-            }
-            if (inter.isEmpty()) {
-                boolean changed = false;
-                for (Pair<LogicalVariable, LogicalVariable> m : varMappings) {
-                    LogicalVariable v2 = context.newVar();
-                    LogicalVariable oldVar = access.getVariables().get(0);
-                    g.getDecorList().add(
-                            new Pair<LogicalVariable, Mutable<ILogicalExpression>>(oldVar,
-                                    new MutableObject<ILogicalExpression>(new VariableReferenceExpression(v2))));
-                    changed = true;
-                    access.getVariables().set(0, v2);
-                    VariableUtilities.substituteVariables(access, m.first, m.second, context);
-                }
-                if (changed) {
-                    context.computeAndSetTypeEnvironmentForOperator(g);
-                }
-                usedInAccess.clear();
-                VariableUtilities.getUsedVariables(access, usedInAccess);
-                pushItDown = true;
-            }
-        }
-        if (pushItDown) {
-            if (op2.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE) {
-                Mutable<ILogicalOperator> childOfSubplan = ((NestedTupleSourceOperator) op2).getDataSourceReference()
-                        .getValue().getInputs().get(0);
-                pushAccessDown(opRef, op2, childOfSubplan, context, finalAnnot);
-                return true;
-            }
-            if (op2.getInputs().size() == 1 && !op2.hasNestedPlans()) {
-                pushAccessDown(opRef, op2, op2.getInputs().get(0), context, finalAnnot);
-                return true;
-            } else {
-                for (Mutable<ILogicalOperator> inp : op2.getInputs()) {
-                    HashSet<LogicalVariable> v2 = new HashSet<LogicalVariable>();
-                    VariableUtilities.getLiveVariables(inp.getValue(), v2);
-                    if (v2.containsAll(usedInAccess)) {
-                        pushAccessDown(opRef, op2, inp, context, finalAnnot);
-                        return true;
-                    }
-                }
-            }
-            if (op2.hasNestedPlans()) {
-                AbstractOperatorWithNestedPlans nestedOp = (AbstractOperatorWithNestedPlans) op2;
-                for (ILogicalPlan plan : nestedOp.getNestedPlans()) {
-                    for (Mutable<ILogicalOperator> root : plan.getRoots()) {
-                        HashSet<LogicalVariable> v2 = new HashSet<LogicalVariable>();
-                        VariableUtilities.getLiveVariables(root.getValue(), v2);
-                        if (v2.containsAll(usedInAccess)) {
-                            pushAccessDown(opRef, op2, root, context, finalAnnot);
-                            return true;
-                        }
-                    }
-                }
-            }
-            throw new AsterixRuntimeException("Field access " + access.getExpressions().get(0).getValue()
-                    + " does not correspond to any input of operator " + op2);
-        } else {
-            // Check if the accessed field is not one of the partitioning key
-            // fields. If yes, we can equate the two variables.
-            if (op2.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
-                DataSourceScanOperator scan = (DataSourceScanOperator) op2;
-                int n = scan.getVariables().size();
-                LogicalVariable scanRecordVar = scan.getVariables().get(n - 1);
-                AbstractFunctionCallExpression accessFun = (AbstractFunctionCallExpression) access.getExpressions()
-                        .get(0).getValue();
-                ILogicalExpression e0 = accessFun.getArguments().get(0).getValue();
-                LogicalExpressionTag tag = e0.getExpressionTag();
-                if (tag == LogicalExpressionTag.VARIABLE) {
-                    VariableReferenceExpression varRef = (VariableReferenceExpression) e0;
-                    if (varRef.getVariableReference() == scanRecordVar) {
-                        ILogicalExpression e1 = accessFun.getArguments().get(1).getValue();
-                        if (e1.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
-                            IDataSource<AqlSourceId> dataSource = (IDataSource<AqlSourceId>) scan.getDataSource();
-                            AqlDataSourceType dsType = ((AqlDataSource) dataSource).getDatasourceType();
-                            if (dsType == AqlDataSourceType.FEED || dsType == AqlDataSourceType.LOADABLE) {
-                                return false;
-                            }
-                            AqlSourceId asid = dataSource.getId();
-                            AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
-                            Dataset dataset = mp.findDataset(asid.getDataverseName(), asid.getDatasourceName());
-                            if (dataset == null) {
-                                throw new AlgebricksException("Dataset " + asid.getDatasourceName() + " not found.");
-                            }
-                            if (dataset.getDatasetType() != DatasetType.INTERNAL) {
-                                setAsFinal(access, context, finalAnnot);
-                                return false;
-                            }
-                            ConstantExpression ce = (ConstantExpression) e1;
-                            IAObject obj = ((AsterixConstantValue) ce.getValue()).getObject();
-                            String fldName;
-                            if (obj.getType().getTypeTag() == ATypeTag.STRING) {
-                                fldName = ((AString) obj).getStringValue();
-                            } else {
-                                int pos = ((AInt32) obj).getIntegerValue();
-                                String tName = dataset.getItemTypeName();
-                                IAType t = mp.findType(dataset.getDataverseName(), tName);
-                                if (t.getTypeTag() != ATypeTag.RECORD) {
-                                    return false;
-                                }
-                                ARecordType rt = (ARecordType) t;
-                                if (pos >= rt.getFieldNames().length) {
-                                    setAsFinal(access, context, finalAnnot);
-                                    return false;
-                                }
-                                fldName = rt.getFieldNames()[pos];
-                            }
-                            int p = DatasetUtils.getPositionOfPartitioningKeyField(dataset, fldName);
-                            if (p < 0) { // not one of the partitioning fields
-                                setAsFinal(access, context, finalAnnot);
-                                return false;
-                            }
-                            LogicalVariable keyVar = scan.getVariables().get(p);
-                            access.getExpressions().get(0).setValue(new VariableReferenceExpression(keyVar));
-                            return true;
-                        }
-                    }
-                }
-            }
-            setAsFinal(access, context, finalAnnot);
-            return false;
-        }
-    }
-
-    private void setAsFinal(ILogicalOperator access, IOptimizationContext context, String finalAnnot) {
-        access.getAnnotations().put(finalAnnot, true);
-        context.addToDontApplySet(this, access);
-    }
-
-    private boolean testAndModifyRedundantOp(AssignOperator access, AbstractLogicalOperator op2) {
-        if (op2.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
-            return false;
-        }
-        AssignOperator a2 = (AssignOperator) op2;
-        if (getFirstExpr(access).equals(getFirstExpr(a2))) {
-            access.getExpressions().get(0).setValue(new VariableReferenceExpression(a2.getVariables().get(0)));
-            return true;
-        } else {
-            return false;
-        }
-    }
-
-    // indirect recursivity with propagateFieldAccessRec
-    private void pushAccessDown(Mutable<ILogicalOperator> fldAccessOpRef, ILogicalOperator op2,
-            Mutable<ILogicalOperator> inputOfOp2, IOptimizationContext context, String finalAnnot)
-            throws AlgebricksException {
-        ILogicalOperator fieldAccessOp = fldAccessOpRef.getValue();
-        fldAccessOpRef.setValue(op2);
-        List<Mutable<ILogicalOperator>> faInpList = fieldAccessOp.getInputs();
-        faInpList.clear();
-        faInpList.add(new MutableObject<ILogicalOperator>(inputOfOp2.getValue()));
-        inputOfOp2.setValue(fieldAccessOp);
-        // typing
-        context.computeAndSetTypeEnvironmentForOperator(fieldAccessOp);
-        context.computeAndSetTypeEnvironmentForOperator(op2);
-        propagateFieldAccessRec(inputOfOp2, context, finalAnnot);
-    }
-
-    private ILogicalExpression getFirstExpr(AssignOperator assign) {
-        return assign.getExpressions().get(0).getValue();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushGroupByThroughProduct.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushGroupByThroughProduct.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushGroupByThroughProduct.java
deleted file mode 100644
index 647c3ee..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushGroupByThroughProduct.java
+++ /dev/null
@@ -1,172 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorManipulationUtil;
-import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class PushGroupByThroughProduct implements IAlgebraicRewriteRule {
-
-    private enum PushTestResult {
-        FALSE,
-        TRUE,
-        REPEATED_DECORS
-    }
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
-        if (op1.getOperatorTag() != LogicalOperatorTag.GROUP) {
-            return false;
-        }
-        Mutable<ILogicalOperator> opRef2 = op1.getInputs().get(0);
-        AbstractLogicalOperator op2 = (AbstractLogicalOperator) opRef2.getValue();
-        if (op2.getOperatorTag() != LogicalOperatorTag.INNERJOIN) {
-            return false;
-        }
-        InnerJoinOperator join = (InnerJoinOperator) op2;
-        if (!OperatorPropertiesUtil.isAlwaysTrueCond(join.getCondition().getValue())) {
-            // not a product
-            return false;
-        }
-        GroupByOperator gby = (GroupByOperator) op1;
-
-        List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorToPush = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
-        List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorNotToPush = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
-
-        Mutable<ILogicalOperator> opLeftRef = join.getInputs().get(0);
-        ILogicalOperator opLeft = opLeftRef.getValue();
-        switch (canPushThrough(gby, opLeft, decorToPush, decorNotToPush)) {
-            case REPEATED_DECORS: {
-                return false;
-            }
-            case TRUE: {
-                push(opRef, opRef2, 0, decorToPush, decorNotToPush, context);
-                return true;
-            }
-            case FALSE: {
-                decorToPush.clear();
-                Mutable<ILogicalOperator> opRightRef = join.getInputs().get(1);
-                ILogicalOperator opRight = opRightRef.getValue();
-                if (canPushThrough(gby, opRight, decorToPush, decorNotToPush) == PushTestResult.TRUE) {
-                    push(opRef, opRef2, 1, decorToPush, decorNotToPush, context);
-                    return true;
-                } else {
-                    return false;
-                }
-            }
-            default: {
-                throw new IllegalStateException();
-            }
-        }
-    }
-
-    private void push(Mutable<ILogicalOperator> opRefGby, Mutable<ILogicalOperator> opRefJoin, int branch,
-            List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorToPush,
-            List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorNotToPush, IOptimizationContext context)
-            throws AlgebricksException {
-        GroupByOperator gby = (GroupByOperator) opRefGby.getValue();
-        AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) opRefJoin.getValue();
-        gby.getDecorList().clear();
-        gby.getDecorList().addAll(decorToPush);
-        for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : decorNotToPush) {
-            LogicalVariable v1 = p.first;
-            VariableReferenceExpression varRef = (VariableReferenceExpression) p.second.getValue();
-            LogicalVariable v2 = varRef.getVariableReference();
-            OperatorManipulationUtil.substituteVarRec(join, v2, v1, true, context);
-        }
-        Mutable<ILogicalOperator> branchRef = join.getInputs().get(branch);
-        ILogicalOperator opBranch = branchRef.getValue();
-        opRefJoin.setValue(opBranch);
-        branchRef.setValue(gby);
-        opRefGby.setValue(join);
-    }
-
-    private PushTestResult canPushThrough(GroupByOperator gby, ILogicalOperator branch,
-            List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> toPush,
-            List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> notToPush) throws AlgebricksException {
-        Collection<LogicalVariable> fromBranch = new HashSet<LogicalVariable>();
-        VariableUtilities.getLiveVariables(branch, fromBranch);
-        Collection<LogicalVariable> usedInGbyExprList = new ArrayList<LogicalVariable>();
-        for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gby.getGroupByList()) {
-            p.second.getValue().getUsedVariables(usedInGbyExprList);
-        }
-
-        if (!fromBranch.containsAll(usedInGbyExprList)) {
-            return PushTestResult.FALSE;
-        }
-        Set<LogicalVariable> free = new HashSet<LogicalVariable>();
-        for (ILogicalPlan p : gby.getNestedPlans()) {
-            for (Mutable<ILogicalOperator> r : p.getRoots()) {
-                OperatorPropertiesUtil.getFreeVariablesInSelfOrDesc((AbstractLogicalOperator) r.getValue(), free);
-            }
-        }
-        if (!fromBranch.containsAll(free)) {
-            return PushTestResult.FALSE;
-        }
-
-        Set<LogicalVariable> decorVarRhs = new HashSet<LogicalVariable>();
-        decorVarRhs.clear();
-        for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gby.getDecorList()) {
-            ILogicalExpression expr = p.second.getValue();
-            if (expr.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
-                return PushTestResult.FALSE;
-            }
-            VariableReferenceExpression varRef = (VariableReferenceExpression) expr;
-            LogicalVariable v = varRef.getVariableReference();
-            if (decorVarRhs.contains(v)) {
-                return PushTestResult.REPEATED_DECORS;
-            }
-            decorVarRhs.add(v);
-
-            if (fromBranch.contains(v)) {
-                toPush.add(p);
-            } else {
-                notToPush.add(p);
-            }
-        }
-        return PushTestResult.TRUE;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushProperJoinThroughProduct.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushProperJoinThroughProduct.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushProperJoinThroughProduct.java
deleted file mode 100644
index 7d9d261..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushProperJoinThroughProduct.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class PushProperJoinThroughProduct implements IAlgebraicRewriteRule {
-
-    private List<LogicalVariable> usedInCond1AndMaps = new ArrayList<LogicalVariable>();
-    private List<LogicalVariable> productLeftBranchVars = new ArrayList<LogicalVariable>();
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        LogicalOperatorTag tag1 = op.getOperatorTag();
-        if (tag1 != LogicalOperatorTag.INNERJOIN && tag1 != LogicalOperatorTag.LEFTOUTERJOIN) {
-            return false;
-        }
-        AbstractBinaryJoinOperator join1 = (AbstractBinaryJoinOperator) op;
-        ILogicalExpression cond1 = join1.getCondition().getValue();
-        // don't try to push a product down
-        if (OperatorPropertiesUtil.isAlwaysTrueCond(cond1)) {
-            return false;
-        }
-
-        Mutable<ILogicalOperator> opRef2 = op.getInputs().get(0);
-
-        AbstractLogicalOperator op2 = (AbstractLogicalOperator) opRef2.getValue();
-        while (op2.isMap()) {
-            opRef2 = op2.getInputs().get(0);
-            op2 = (AbstractLogicalOperator) opRef2.getValue();
-        }
-
-        if (op2.getOperatorTag() != LogicalOperatorTag.INNERJOIN) {
-            return false;
-        }
-
-        InnerJoinOperator product = (InnerJoinOperator) op2;
-        if (!OperatorPropertiesUtil.isAlwaysTrueCond(product.getCondition().getValue())) {
-            return false;
-        }
-
-        usedInCond1AndMaps.clear();
-        cond1.getUsedVariables(usedInCond1AndMaps);
-        Mutable<ILogicalOperator> opIterRef = op.getInputs().get(0);
-        ILogicalOperator opIter = opIterRef.getValue();
-        do {
-            VariableUtilities.getUsedVariables(opIter, usedInCond1AndMaps);
-            opIterRef = opIter.getInputs().get(0);
-            opIter = opIterRef.getValue();
-        } while (opIter.isMap());
-
-        productLeftBranchVars.clear();
-        ILogicalOperator opLeft = op2.getInputs().get(0).getValue();
-        VariableUtilities.getLiveVariables(opLeft, productLeftBranchVars);
-
-        if (!OperatorPropertiesUtil.disjoint(usedInCond1AndMaps, productLeftBranchVars)) {
-            return false;
-        }
-
-        // now push the operators from in between joins, too
-        opIterRef = op.getInputs().get(0);
-        opIter = opIterRef.getValue();
-
-        Mutable<ILogicalOperator> op3Ref = product.getInputs().get(1);
-        ILogicalOperator op3 = op3Ref.getValue();
-
-        opRef2.setValue(op3);
-        op3Ref.setValue(join1);
-        opRef.setValue(product);
-        return true;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushSimilarityFunctionsBelowJoin.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushSimilarityFunctionsBelowJoin.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushSimilarityFunctionsBelowJoin.java
deleted file mode 100644
index d9e06f4..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushSimilarityFunctionsBelowJoin.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.HashSet;
-import java.util.Set;
-
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushFunctionsBelowJoin;
-
-/**
- * Pushes similarity function-call expressions below a join if possible.
- * Assigns the similarity function-call expressions to new variables, and replaces the original
- * expression with a corresponding variable reference expression.
- * This rule can help reduce the cost of computing expensive similarity functions by pushing them below
- * a join (which may blow up the cardinality).
- * Also, this rule may help to enable other rules such as common subexpression elimination, again to reduce
- * the number of calls to expensive similarity functions.
- * Example:
- * Before plan:
- * assign [$$10] <- [funcA(funcB(simFuncX($$3, $$4)))]
- * join (some condition)
- * join_branch_0 where $$3 and $$4 are not live
- * ...
- * join_branch_1 where $$3 and $$4 are live
- * ...
- * After plan:
- * assign [$$10] <- [funcA(funcB($$11))]
- * join (some condition)
- * join_branch_0 where $$3 and $$4 are not live
- * ...
- * join_branch_1 where $$3 and $$4 are live
- * assign[$$11] <- [simFuncX($$3, $$4)]
- * ...
- */
-public class PushSimilarityFunctionsBelowJoin extends PushFunctionsBelowJoin {
-
-    private static final Set<FunctionIdentifier> simFuncIdents = new HashSet<FunctionIdentifier>();
-    static {
-        simFuncIdents.add(AsterixBuiltinFunctions.SIMILARITY_JACCARD);
-        simFuncIdents.add(AsterixBuiltinFunctions.SIMILARITY_JACCARD_CHECK);
-        simFuncIdents.add(AsterixBuiltinFunctions.EDIT_DISTANCE);
-        simFuncIdents.add(AsterixBuiltinFunctions.EDIT_DISTANCE_CHECK);
-    }
-
-    public PushSimilarityFunctionsBelowJoin() {
-        super(simFuncIdents);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveRedundantListifyRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveRedundantListifyRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveRedundantListifyRule.java
deleted file mode 100644
index 05c68f4..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveRedundantListifyRule.java
+++ /dev/null
@@ -1,254 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.ListSet;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.StatefulFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.UnpartitionedPropertyComputer;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-/*
- *
- *  unnest $x [[ at $p ]] <- $y
- *    aggregate $y <- function-call: listify@1(unresolved), Args:[$z]
- *       Rest 
- *   
- * if $y is not used above these operators, 
- * the plan fragment becomes
- *
- *  [[ runningaggregate $p <- tid]]
- *  assign $x <- $z
- *       Rest
- *  
- *
- */
-
-public class RemoveRedundantListifyRule implements IAlgebraicRewriteRule {
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        // apply it only at the top of the plan
-        ILogicalOperator op = opRef.getValue();
-        if (context.checkIfInDontApplySet(this, op)) {
-            return false;
-        }
-        Set<LogicalVariable> varSet = new HashSet<LogicalVariable>();
-        return applyRuleDown(opRef, varSet, context);
-    }
-
-    private boolean applyRuleDown(Mutable<ILogicalOperator> opRef, Set<LogicalVariable> varSet,
-            IOptimizationContext context) throws AlgebricksException {
-        boolean changed = applies(opRef, varSet, context);
-        changed |= appliesForReverseCase(opRef, varSet, context);
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        VariableUtilities.getUsedVariables(op, varSet);
-        if (op.hasNestedPlans()) {
-            // Variables used by the parent operators should be live at op.
-            Set<LogicalVariable> localLiveVars = new ListSet<LogicalVariable>();
-            VariableUtilities.getLiveVariables(op, localLiveVars);
-            varSet.retainAll(localLiveVars);
-            AbstractOperatorWithNestedPlans aonp = (AbstractOperatorWithNestedPlans) op;
-            for (ILogicalPlan p : aonp.getNestedPlans()) {
-                for (Mutable<ILogicalOperator> r : p.getRoots()) {
-                    if (applyRuleDown(r, varSet, context)) {
-                        changed = true;
-                    }
-                    context.addToDontApplySet(this, r.getValue());
-                }
-            }
-        }
-        for (Mutable<ILogicalOperator> i : op.getInputs()) {
-            if (applyRuleDown(i, varSet, context)) {
-                changed = true;
-            }
-            context.addToDontApplySet(this, i.getValue());
-        }
-        return changed;
-    }
-
-    private boolean applies(Mutable<ILogicalOperator> opRef, Set<LogicalVariable> varUsedAbove,
-            IOptimizationContext context) throws AlgebricksException {
-        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
-        if (op1.getOperatorTag() != LogicalOperatorTag.UNNEST) {
-            return false;
-        }
-        UnnestOperator unnest1 = (UnnestOperator) op1;
-        ILogicalExpression expr = unnest1.getExpressionRef().getValue();
-        LogicalVariable unnestedVar;
-        switch (expr.getExpressionTag()) {
-            case VARIABLE:
-                unnestedVar = ((VariableReferenceExpression) expr).getVariableReference();
-                break;
-            case FUNCTION_CALL:
-                if (((AbstractFunctionCallExpression) expr).getFunctionIdentifier() != AsterixBuiltinFunctions.SCAN_COLLECTION) {
-                    return false;
-                }
-                AbstractFunctionCallExpression functionCall = (AbstractFunctionCallExpression) expr;
-                ILogicalExpression functionCallArgExpr = functionCall.getArguments().get(0).getValue();
-                if (functionCallArgExpr.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
-                    return false;
-                }
-                unnestedVar = ((VariableReferenceExpression) functionCallArgExpr).getVariableReference();
-                break;
-            default:
-                return false;
-        }
-        if (varUsedAbove.contains(unnestedVar)) {
-            return false;
-        }
-
-        Mutable<ILogicalOperator> opRef2 = op1.getInputs().get(0);
-        AbstractLogicalOperator r = (AbstractLogicalOperator) opRef2.getValue();
-
-        if (r.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
-            return false;
-        }
-        AggregateOperator agg = (AggregateOperator) r;
-        if (agg.getVariables().size() > 1) {
-            return false;
-        }
-        LogicalVariable aggVar = agg.getVariables().get(0);
-        ILogicalExpression aggFun = agg.getExpressions().get(0).getValue();
-        if (!aggVar.equals(unnestedVar)
-                || ((AbstractLogicalExpression) aggFun).getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            return false;
-        }
-        AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) aggFun;
-        if (!AsterixBuiltinFunctions.LISTIFY.equals(f.getFunctionIdentifier())) {
-            return false;
-        }
-        if (f.getArguments().size() != 1) {
-            return false;
-        }
-        ILogicalExpression arg0 = f.getArguments().get(0).getValue();
-        if (((AbstractLogicalExpression) arg0).getExpressionTag() != LogicalExpressionTag.VARIABLE) {
-            return false;
-        }
-        LogicalVariable paramVar = ((VariableReferenceExpression) arg0).getVariableReference();
-
-        ArrayList<LogicalVariable> assgnVars = new ArrayList<LogicalVariable>(1);
-        assgnVars.add(unnest1.getVariable());
-        ArrayList<Mutable<ILogicalExpression>> assgnExprs = new ArrayList<Mutable<ILogicalExpression>>(1);
-        assgnExprs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(paramVar)));
-        AssignOperator assign = new AssignOperator(assgnVars, assgnExprs);
-        assign.getInputs().add(agg.getInputs().get(0));
-        context.computeAndSetTypeEnvironmentForOperator(assign);
-        LogicalVariable posVar = unnest1.getPositionalVariable();
-
-        if (posVar == null) {
-            opRef.setValue(assign);
-        } else {
-            ArrayList<LogicalVariable> raggVars = new ArrayList<LogicalVariable>(1);
-            raggVars.add(posVar);
-            ArrayList<Mutable<ILogicalExpression>> rAggExprs = new ArrayList<Mutable<ILogicalExpression>>(1);
-            StatefulFunctionCallExpression tidFun = new StatefulFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.TID), UnpartitionedPropertyComputer.INSTANCE);
-            rAggExprs.add(new MutableObject<ILogicalExpression>(tidFun));
-            RunningAggregateOperator rAgg = new RunningAggregateOperator(raggVars, rAggExprs);
-            rAgg.getInputs().add(new MutableObject<ILogicalOperator>(assign));
-            opRef.setValue(rAgg);
-            context.computeAndSetTypeEnvironmentForOperator(rAgg);
-        }
-
-        return true;
-    }
-
-    private boolean appliesForReverseCase(Mutable<ILogicalOperator> opRef, Set<LogicalVariable> varUsedAbove,
-            IOptimizationContext context) throws AlgebricksException {
-        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
-        if (op1.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
-            return false;
-        }
-        AggregateOperator agg = (AggregateOperator) op1;
-        if (agg.getVariables().size() > 1 || agg.getVariables().size() <= 0) {
-            return false;
-        }
-        LogicalVariable aggVar = agg.getVariables().get(0);
-        ILogicalExpression aggFun = agg.getExpressions().get(0).getValue();
-        AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) aggFun;
-        if (!AsterixBuiltinFunctions.LISTIFY.equals(f.getFunctionIdentifier())) {
-            return false;
-        }
-        if (f.getArguments().size() != 1) {
-            return false;
-        }
-        ILogicalExpression arg0 = f.getArguments().get(0).getValue();
-        if (((AbstractLogicalExpression) arg0).getExpressionTag() != LogicalExpressionTag.VARIABLE) {
-            return false;
-        }
-        LogicalVariable aggInputVar = ((VariableReferenceExpression) arg0).getVariableReference();
-
-        if (agg.getInputs().size() == 0) {
-            return false;
-        }
-        AbstractLogicalOperator op2 = (AbstractLogicalOperator) agg.getInputs().get(0).getValue();
-        if (op2.getOperatorTag() != LogicalOperatorTag.UNNEST) {
-            return false;
-        }
-        UnnestOperator unnest = (UnnestOperator) op2;
-        if (unnest.getPositionalVariable() != null) {
-            return false;
-        }
-        if (!unnest.getVariable().equals(aggInputVar)) {
-            return false;
-        }
-        List<LogicalVariable> unnestSource = new ArrayList<LogicalVariable>();
-        VariableUtilities.getUsedVariables(unnest, unnestSource);
-        if (unnestSource.size() > 1 || unnestSource.size() <= 0) {
-            return false;
-        }
-        ArrayList<LogicalVariable> assgnVars = new ArrayList<LogicalVariable>(1);
-        assgnVars.add(aggVar);
-        ArrayList<Mutable<ILogicalExpression>> assgnExprs = new ArrayList<Mutable<ILogicalExpression>>(1);
-        assgnExprs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(unnestSource.get(0))));
-        AssignOperator assign = new AssignOperator(assgnVars, assgnExprs);
-        assign.getInputs().add(unnest.getInputs().get(0));
-        context.computeAndSetTypeEnvironmentForOperator(assign);
-        opRef.setValue(assign);
-        return true;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveRedundantSelectRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveRedundantSelectRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveRedundantSelectRule.java
deleted file mode 100644
index 2ed2d2e..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveRedundantSelectRule.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.optimizer.rules;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.om.base.ABoolean;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-/**
- * This rule removes redundant select operator, e.g., select operators
- * in which the condition is TRUE.
- * Note that the ConstantFoldingRule will evaluate the condition expression
- * during compile time if it is possible.
- *
- * @author yingyib
- */
-public class RemoveRedundantSelectRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
-            return false;
-        }
-        SelectOperator select = (SelectOperator) op;
-        ILogicalExpression cond = select.getCondition().getValue();
-        if (alwaysHold(cond)) {
-            opRef.setValue(select.getInputs().get(0).getValue());
-            return true;
-        }
-        return false;
-    }
-
-    /**
-     * Whether the condition expression always returns true.
-     * 
-     * @param cond
-     * @return true if the condition always holds; false otherwise.
-     */
-    private boolean alwaysHold(ILogicalExpression cond) {
-        if (cond.equals(ConstantExpression.TRUE)) {
-            return true;
-        }
-        if (cond.equals(new ConstantExpression(new AsterixConstantValue(ABoolean.TRUE)))) {
-            return true;
-        }
-        return false;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveSortInFeedIngestionRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveSortInFeedIngestionRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveSortInFeedIngestionRule.java
deleted file mode 100644
index 08daa40..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveSortInFeedIngestionRule.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
-import edu.uci.ics.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class RemoveSortInFeedIngestionRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (op.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE) {
-            return false;
-        }
-
-        AbstractLogicalOperator insertOp = op;
-        AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
-        boolean isSourceAFeed = false;
-        while (descendantOp != null) {
-            if (descendantOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
-                AqlDataSource dataSource = (AqlDataSource) ((DataSourceScanOperator) descendantOp).getDataSource();
-                if (dataSource.getDatasourceType().equals(AqlDataSourceType.FEED)) {
-                    isSourceAFeed = true;
-                }
-                break;
-            }
-            if (descendantOp.getInputs().isEmpty()) {
-                break;
-            }
-            descendantOp = (AbstractLogicalOperator) descendantOp.getInputs().get(0).getValue();
-        }
-
-        if (isSourceAFeed) {
-            AbstractLogicalOperator prevOp = (AbstractLogicalOperator) insertOp.getInputs().get(0).getValue();
-            if (prevOp.getOperatorTag() == LogicalOperatorTag.ORDER) {
-                insertOp.getInputs().set(0, prevOp.getInputs().get(0));
-                return true;
-            }
-        }
-
-        return false;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveUnusedOneToOneEquiJoinRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveUnusedOneToOneEquiJoinRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveUnusedOneToOneEquiJoinRule.java
deleted file mode 100644
index d8a8a02..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveUnusedOneToOneEquiJoinRule.java
+++ /dev/null
@@ -1,213 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
-import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
-import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-/**
- * Removes join operators for which all of the following conditions are true:
- * 1. The live variables of one input branch of the join are not used in the upstream plan
- * 2. The join is an inner equi join
- * 3. The join condition only uses variables that correspond to primary keys of the same dataset
- * Notice that the last condition implies a 1:1 join, i.e., the join does not change the result cardinality.
- * Joins that satisfy the above conditions may be introduced by other rules
- * which use surrogate optimizations. Such an optimization aims to reduce data copies and communication costs by
- * using the primary keys as surrogates for the desired data items. Typically,
- * such a surrogate-based plan introduces a top-level join to finally resolve
- * the surrogates to the desired data items.
- * In case the upstream plan does not require the original data items at all, such a top-level join is unnecessary.
- * The purpose of this rule is to remove such unnecessary joins.
- */
-public class RemoveUnusedOneToOneEquiJoinRule implements IAlgebraicRewriteRule {
-
-    private final Set<LogicalVariable> parentsUsedVars = new HashSet<LogicalVariable>();
-    private final List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
-    private final List<LogicalVariable> liveVars = new ArrayList<LogicalVariable>();
-    private final List<LogicalVariable> pkVars = new ArrayList<LogicalVariable>();
-    private final List<DataSourceScanOperator> dataScans = new ArrayList<DataSourceScanOperator>();
-    private boolean hasRun = false;
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        if (hasRun) {
-            return false;
-        }
-        hasRun = true;
-        if (removeUnusedJoin(opRef)) {
-            return true;
-        }
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        return false;
-    }
-
-    private boolean removeUnusedJoin(Mutable<ILogicalOperator> opRef) throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        boolean modified = false;
-
-        usedVars.clear();
-        VariableUtilities.getUsedVariables(op, usedVars);
-        // Propagate used variables from parents downwards.
-        parentsUsedVars.addAll(usedVars);
-
-        int numInputs = op.getInputs().size();
-        for (int i = 0; i < numInputs; i++) {
-            Mutable<ILogicalOperator> childOpRef = op.getInputs().get(i);
-            int unusedJoinBranchIndex = removeJoinFromInputBranch(childOpRef);
-            if (unusedJoinBranchIndex >= 0) {
-                int usedBranchIndex = (unusedJoinBranchIndex == 0) ? 1 : 0;
-                // Remove join at input index i, by hooking up op's input i with 
-                // the join's branch at unusedJoinBranchIndex.
-                AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) childOpRef.getValue();
-                op.getInputs().set(i, joinOp.getInputs().get(usedBranchIndex));
-                modified = true;
-            }
-            // Descend into children.
-            if (removeUnusedJoin(childOpRef)) {
-                modified = true;
-            }
-        }
-        return modified;
-    }
-
-    private int removeJoinFromInputBranch(Mutable<ILogicalOperator> opRef) throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (op.getOperatorTag() != LogicalOperatorTag.INNERJOIN) {
-            return -1;
-        }
-
-        AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) op;
-        // Make sure the join is an equi-join.
-        if (!isEquiJoin(joinOp.getCondition())) {
-            return -1;
-        }
-
-        int unusedJoinBranchIndex = -1;
-        for (int i = 0; i < joinOp.getInputs().size(); i++) {
-            liveVars.clear();
-            VariableUtilities.getLiveVariables(joinOp.getInputs().get(i).getValue(), liveVars);
-            liveVars.retainAll(parentsUsedVars);
-            if (liveVars.isEmpty()) {
-                // None of the live variables from this branch are used by its parents.
-                unusedJoinBranchIndex = i;
-                break;
-            }
-        }
-        if (unusedJoinBranchIndex < 0) {
-            // The variables from both branches are used in the upstream plan. We cannot remove this join.
-            return -1;
-        }
-
-        // Check whether one of the join branches is unused.
-        usedVars.clear();
-        VariableUtilities.getUsedVariables(joinOp, usedVars);
-
-        // Check whether all used variables originate from primary keys of exactly the same dataset.
-        // Collect a list of datascans whose primary key variables are used in the join condition.
-        gatherProducingDataScans(opRef, usedVars, dataScans);
-
-        // Check that all datascans scan the same dataset, and that the join condition
-        // only used primary key variables of those datascans.
-        for (int i = 0; i < dataScans.size(); i++) {
-            if (i > 0) {
-                DatasetDataSource prevAqlDataSource = (DatasetDataSource) dataScans.get(i - 1).getDataSource();
-                DatasetDataSource currAqlDataSource = (DatasetDataSource) dataScans.get(i).getDataSource();
-                if (!prevAqlDataSource.getDataset().equals(currAqlDataSource.getDataset())) {
-                    return -1;
-                }
-            }
-            // Remove from the used variables all the primary key vars of this dataset.
-            fillPKVars(dataScans.get(i), pkVars);
-            usedVars.removeAll(pkVars);
-        }
-        if (!usedVars.isEmpty()) {
-            // The join condition also uses some other variables that are not primary
-            // keys from datasource scans of the same dataset.
-            return -1;
-        }
-        return unusedJoinBranchIndex;
-    }
-
-    private void gatherProducingDataScans(Mutable<ILogicalOperator> opRef, List<LogicalVariable> joinUsedVars,
-            List<DataSourceScanOperator> dataScans) {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (op.getOperatorTag() != LogicalOperatorTag.DATASOURCESCAN) {
-            for (Mutable<ILogicalOperator> inputOp : op.getInputs()) {
-                gatherProducingDataScans(inputOp, joinUsedVars, dataScans);
-            }
-            return;
-        }
-        DataSourceScanOperator dataScan = (DataSourceScanOperator) op;
-        fillPKVars(dataScan, pkVars);
-        // Check if join uses all PK vars.
-        if (joinUsedVars.containsAll(pkVars)) {
-            dataScans.add(dataScan);
-        }
-    }
-
-    private void fillPKVars(DataSourceScanOperator dataScan, List<LogicalVariable> pkVars) {
-        pkVars.clear();
-        DatasetDataSource datasetDataSource = (DatasetDataSource) dataScan.getDataSource();
-        pkVars.clear();
-        if (datasetDataSource.getDataset().getDatasetDetails() instanceof InternalDatasetDetails) {
-            int numPKs = DatasetUtils.getPartitioningKeys(datasetDataSource.getDataset()).size();
-            for (int i = 0; i < numPKs; i++) {
-                pkVars.add(dataScan.getVariables().get(i));
-            }
-        }
-    }
-
-    private boolean isEquiJoin(Mutable<ILogicalExpression> conditionExpr) {
-        AbstractLogicalExpression expr = (AbstractLogicalExpression) conditionExpr.getValue();
-        if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-            AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
-            FunctionIdentifier funcIdent = funcExpr.getFunctionIdentifier();
-            if (funcIdent != AlgebricksBuiltinFunctions.AND && funcIdent != AlgebricksBuiltinFunctions.EQ) {
-                return false;
-            }
-        }
-        return true;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ReplaceSinkOpWithCommitOpRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ReplaceSinkOpWithCommitOpRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ReplaceSinkOpWithCommitOpRule.java
deleted file mode 100644
index 8de761a..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ReplaceSinkOpWithCommitOpRule.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.algebra.operators.CommitOperator;
-import edu.uci.ics.asterix.algebra.operators.physical.CommitPOperator;
-import edu.uci.ics.asterix.common.transactions.JobId;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class ReplaceSinkOpWithCommitOpRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        // TODO Auto-generated method stub
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (op.getOperatorTag() != LogicalOperatorTag.SINK) {
-            return false;
-        }
-        SinkOperator sinkOperator = (SinkOperator) op;
-
-        List<Mutable<ILogicalExpression>> primaryKeyExprs = null;
-        int datasetId = 0;
-        AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) sinkOperator.getInputs().get(0).getValue();
-        while (descendantOp != null) {
-            if (descendantOp.getOperatorTag() == LogicalOperatorTag.INDEX_INSERT_DELETE) {
-                IndexInsertDeleteOperator indexInsertDeleteOperator = (IndexInsertDeleteOperator) descendantOp;
-                if (!indexInsertDeleteOperator.isBulkload()) {
-                    primaryKeyExprs = indexInsertDeleteOperator.getPrimaryKeyExpressions();
-                    datasetId = ((DatasetDataSource) indexInsertDeleteOperator.getDataSourceIndex().getDataSource())
-                            .getDataset().getDatasetId();
-                    break;
-                }
-            } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.INSERT_DELETE) {
-                InsertDeleteOperator insertDeleteOperator = (InsertDeleteOperator) descendantOp;
-                if (!insertDeleteOperator.isBulkload()) {
-                    primaryKeyExprs = insertDeleteOperator.getPrimaryKeyExpressions();
-                    datasetId = ((DatasetDataSource) insertDeleteOperator.getDataSource()).getDataset().getDatasetId();
-                    break;
-                }
-            }
-            if (descendantOp.getInputs().size() < 1) {
-                break;
-            }
-            descendantOp = (AbstractLogicalOperator) descendantOp.getInputs().get(0).getValue();
-        }
-
-        if (primaryKeyExprs == null) {
-            return false;
-        }
-
-        //copy primaryKeyExprs
-        List<LogicalVariable> primaryKeyLogicalVars = new ArrayList<LogicalVariable>();
-        for (Mutable<ILogicalExpression> expr : primaryKeyExprs) {
-            VariableReferenceExpression varRefExpr = (VariableReferenceExpression) expr.getValue();
-            primaryKeyLogicalVars.add(new LogicalVariable(varRefExpr.getVariableReference().getId()));
-        }
-
-        //get JobId(TransactorId)
-        AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
-        JobId jobId = mp.getJobId();
-
-        //create the logical and physical operator
-        CommitOperator commitOperator = new CommitOperator(primaryKeyLogicalVars);
-        CommitPOperator commitPOperator = new CommitPOperator(jobId, datasetId, primaryKeyLogicalVars);
-        commitOperator.setPhysicalOperator(commitPOperator);
-
-        //create ExtensionOperator and put the commitOperator in it.
-        ExtensionOperator extensionOperator = new ExtensionOperator(commitOperator);
-        extensionOperator.setPhysicalOperator(commitPOperator);
-
-        //update plan link
-        extensionOperator.getInputs().add(sinkOperator.getInputs().get(0));
-        context.computeAndSetTypeEnvironmentForOperator(extensionOperator);
-        opRef.setValue(extensionOperator);
-        return true;
-    }
-
-}


[12/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
deleted file mode 100644
index 956b447..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
+++ /dev/null
@@ -1,279 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.hyracks.bootstrap;
-
-import java.io.File;
-import java.rmi.server.UnicastRemoteObject;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.kohsuke.args4j.CmdLineException;
-import org.kohsuke.args4j.CmdLineParser;
-import org.kohsuke.args4j.Option;
-
-import edu.uci.ics.asterix.api.common.AsterixAppRuntimeContext;
-import edu.uci.ics.asterix.common.api.AsterixThreadFactory;
-import edu.uci.ics.asterix.common.api.IAsterixAppRuntimeContext;
-import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
-import edu.uci.ics.asterix.common.config.AsterixTransactionProperties;
-import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
-import edu.uci.ics.asterix.common.transactions.IRecoveryManager;
-import edu.uci.ics.asterix.common.transactions.IRecoveryManager.SystemState;
-import edu.uci.ics.asterix.event.schema.cluster.Cluster;
-import edu.uci.ics.asterix.event.schema.cluster.Node;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.metadata.MetadataNode;
-import edu.uci.ics.asterix.metadata.api.IAsterixStateProxy;
-import edu.uci.ics.asterix.metadata.api.IMetadataNode;
-import edu.uci.ics.asterix.metadata.bootstrap.MetadataBootstrap;
-import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
-import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceRepository;
-import edu.uci.ics.hyracks.api.application.INCApplicationContext;
-import edu.uci.ics.hyracks.api.application.INCApplicationEntryPoint;
-import edu.uci.ics.hyracks.api.lifecycle.ILifeCycleComponentManager;
-import edu.uci.ics.hyracks.api.lifecycle.LifeCycleComponentManager;
-
-public class NCApplicationEntryPoint implements INCApplicationEntryPoint {
-    private static final Logger LOGGER = Logger.getLogger(NCApplicationEntryPoint.class.getName());
-
-    @Option(name = "-metadata-port", usage = "IP port to bind metadata listener (default: random port)", required = false)
-    public int metadataRmiPort = 0;
-
-    @Option(name = "-initial-run", usage = "A flag indicating if it's the first time the NC is started (default: false)", required = false)
-    public boolean initialRun = false;
-
-    private INCApplicationContext ncApplicationContext = null;
-    private IAsterixAppRuntimeContext runtimeContext;
-    private String nodeId;
-    private boolean isMetadataNode = false;
-    private boolean stopInitiated = false;
-    private SystemState systemState = SystemState.NEW_UNIVERSE;
-    private final long NON_SHARP_CHECKPOINT_TARGET_LSN = -1;
-
-    @Override
-    public void start(INCApplicationContext ncAppCtx, String[] args) throws Exception {
-        CmdLineParser parser = new CmdLineParser(this);
-
-        try {
-            parser.parseArgument(args);
-        } catch (CmdLineException e) {
-            System.err.println(e.getMessage());
-            System.err.println("Usage:");
-            parser.printUsage(System.err);
-            throw e;
-        }
-
-        ncAppCtx.setThreadFactory(new AsterixThreadFactory(ncAppCtx.getLifeCycleComponentManager()));
-        ncApplicationContext = ncAppCtx;
-        nodeId = ncApplicationContext.getNodeId();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Starting Asterix node controller: " + nodeId);
-        }
-
-        runtimeContext = new AsterixAppRuntimeContext(ncApplicationContext);
-        AsterixMetadataProperties metadataProperties = ((IAsterixPropertiesProvider) runtimeContext)
-                .getMetadataProperties();
-        if (!metadataProperties.getNodeNames().contains(ncApplicationContext.getNodeId())) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Substitute node joining : " + ncApplicationContext.getNodeId());
-            }
-            updateOnNodeJoin();
-        }
-        runtimeContext.initialize();
-        ncApplicationContext.setApplicationObject(runtimeContext);
-
-        if (initialRun) {
-            LOGGER.info("System is being initialized. (first run)");
-            systemState = SystemState.NEW_UNIVERSE;
-        } else {
-            // #. recover if the system is corrupted by checking system state.
-            IRecoveryManager recoveryMgr = runtimeContext.getTransactionSubsystem().getRecoveryManager();
-            systemState = recoveryMgr.getSystemState();
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("System is in a state: " + systemState);
-            }
-
-            if (systemState != SystemState.NEW_UNIVERSE) {
-                PersistentLocalResourceRepository localResourceRepository = (PersistentLocalResourceRepository) runtimeContext
-                        .getLocalResourceRepository();
-                localResourceRepository.initialize(nodeId, null, false, runtimeContext.getResourceIdFactory());
-            }
-            
-            if (systemState == SystemState.CORRUPTED) {
-                recoveryMgr.startRecovery(true);
-            }
-        }
-
-    }
-
-    @Override
-    public void stop() throws Exception {
-        if (!stopInitiated) {
-            runtimeContext.setShuttingdown(true);
-            stopInitiated = true;
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Stopping Asterix node controller: " + nodeId);
-            }
-
-            IRecoveryManager recoveryMgr = runtimeContext.getTransactionSubsystem().getRecoveryManager();
-            recoveryMgr.checkpoint(true, NON_SHARP_CHECKPOINT_TARGET_LSN);
-
-            if (isMetadataNode) {
-                MetadataBootstrap.stopUniverse();
-            }
-
-            ncApplicationContext.getLifeCycleComponentManager().stopAll(false);
-            runtimeContext.deinitialize();
-        } else {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Duplicate attempt to stop ignored: " + nodeId);
-            }
-        }
-    }
-
-    @Override
-    public void notifyStartupComplete() throws Exception {
-        AsterixMetadataProperties metadataProperties = ((IAsterixPropertiesProvider) runtimeContext)
-                .getMetadataProperties();
-
-        if (systemState == SystemState.NEW_UNIVERSE) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("System state: " + SystemState.NEW_UNIVERSE);
-                LOGGER.info("Node ID: " + nodeId);
-                LOGGER.info("Stores: " + metadataProperties.getStores());
-                LOGGER.info("Root Metadata Store: " + metadataProperties.getStores().get(nodeId)[0]);
-            }
-
-            PersistentLocalResourceRepository localResourceRepository = (PersistentLocalResourceRepository) runtimeContext
-                    .getLocalResourceRepository();
-            localResourceRepository.initialize(nodeId, metadataProperties.getStores().get(nodeId)[0], true, null);
-        }
-
-        IAsterixStateProxy proxy = null;
-        isMetadataNode = nodeId.equals(metadataProperties.getMetadataNodeName());
-        if (isMetadataNode) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Bootstrapping metadata");
-            }
-            MetadataNode.INSTANCE.initialize(runtimeContext);
-
-            proxy = (IAsterixStateProxy) ncApplicationContext.getDistributedState();
-            if (proxy == null) {
-                throw new IllegalStateException("Metadata node cannot access distributed state");
-            }
-
-            // This is a special case, we just give the metadataNode directly.
-            // This way we can delay the registration of the metadataNode until
-            // it is completely initialized.
-            MetadataManager.INSTANCE = new MetadataManager(proxy, MetadataNode.INSTANCE);
-            MetadataBootstrap.startUniverse(((IAsterixPropertiesProvider) runtimeContext), ncApplicationContext,
-                    systemState == SystemState.NEW_UNIVERSE);
-            MetadataBootstrap.startDDLRecovery();
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Metadata node bound");
-            }
-        }
-
-        ExternalLibraryBootstrap.setUpExternaLibraries(isMetadataNode);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Starting lifecycle components");
-        }
-
-        Map<String, String> lifecycleMgmtConfiguration = new HashMap<String, String>();
-        String dumpPathKey = LifeCycleComponentManager.Config.DUMP_PATH_KEY;
-        String dumpPath = metadataProperties.getCoredumpPath(nodeId);
-        lifecycleMgmtConfiguration.put(dumpPathKey, dumpPath);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Coredump directory for NC is: " + dumpPath);
-        }
-        ILifeCycleComponentManager lccm = ncApplicationContext.getLifeCycleComponentManager();
-        lccm.configure(lifecycleMgmtConfiguration);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Configured:" + lccm);
-        }
-        ncApplicationContext.setStateDumpHandler(new AsterixStateDumpHandler(ncApplicationContext.getNodeId(), lccm
-                .getDumpPath(), lccm));
-
-        lccm.startAll();
-
-        IRecoveryManager recoveryMgr = runtimeContext.getTransactionSubsystem().getRecoveryManager();
-        recoveryMgr.checkpoint(true, NON_SHARP_CHECKPOINT_TARGET_LSN);
-
-        if (isMetadataNode) {
-            IMetadataNode stub = null;
-            stub = (IMetadataNode) UnicastRemoteObject.exportObject(MetadataNode.INSTANCE, metadataRmiPort);
-            proxy.setMetadataNode(stub);
-        }
-
-        // Reclaim storage for temporary datasets.
-        String[] ioDevices = AsterixClusterProperties.INSTANCE.getIODevices(nodeId);
-        String[] nodeStores = metadataProperties.getStores().get(nodeId);
-        int numIoDevices = AsterixClusterProperties.INSTANCE.getNumberOfIODevices(nodeId);
-        for (int j = 0; j < nodeStores.length; j++) {
-            for (int k = 0; k < numIoDevices; k++) {
-                File f = new File(ioDevices[k] + File.separator + nodeStores[j] + File.separator + "temp");
-                f.delete();
-            }
-        }
-
-        // TODO
-        // reclaim storage for orphaned index artifacts in NCs.
-
-    }
-
-    private void updateOnNodeJoin() {
-        AsterixMetadataProperties metadataProperties = ((IAsterixPropertiesProvider) runtimeContext)
-                .getMetadataProperties();
-        if (!metadataProperties.getNodeNames().contains(nodeId)) {
-            metadataProperties.getNodeNames().add(nodeId);
-            Cluster cluster = AsterixClusterProperties.INSTANCE.getCluster();
-            String asterixInstanceName = cluster.getInstanceName();
-            AsterixTransactionProperties txnProperties = ((IAsterixPropertiesProvider) runtimeContext)
-                    .getTransactionProperties();
-            Node self = null;
-            for (Node node : cluster.getSubstituteNodes().getNode()) {
-                String ncId = asterixInstanceName + "_" + node.getId();
-                if (ncId.equalsIgnoreCase(nodeId)) {
-                    String storeDir = node.getStore() == null ? cluster.getStore() : node.getStore();
-                    metadataProperties.getStores().put(nodeId, storeDir.split(","));
-
-                    String coredumpPath = node.getLogDir() == null ? cluster.getLogDir() : node.getLogDir();
-                    metadataProperties.getCoredumpPaths().put(nodeId, coredumpPath);
-
-                    String txnLogDir = node.getTxnLogDir() == null ? cluster.getTxnLogDir() : node.getTxnLogDir();
-                    txnProperties.getLogDirectories().put(nodeId, txnLogDir);
-
-                    if (LOGGER.isLoggable(Level.INFO)) {
-                        LOGGER.info("Store set to : " + storeDir);
-                        LOGGER.info("Coredump dir set to : " + coredumpPath);
-                        LOGGER.info("Transaction log dir set to :" + txnLogDir);
-                    }
-                    self = node;
-                    break;
-                }
-            }
-            if (self != null) {
-                cluster.getSubstituteNodes().getNode().remove(self);
-                cluster.getNode().add(self);
-            } else {
-                throw new IllegalStateException("Unknown node joining the cluster");
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultReader.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultReader.java b/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultReader.java
deleted file mode 100644
index c27f859..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultReader.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.result;
-
-import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.comm.IFrame;
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.dataset.DatasetJobRecord.Status;
-import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
-import edu.uci.ics.hyracks.api.dataset.IHyracksDatasetReader;
-import edu.uci.ics.hyracks.api.dataset.ResultSetId;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ResultFrameTupleAccessor;
-
-public class ResultReader {
-    private final IHyracksDataset hyracksDataset;
-
-    private IHyracksDatasetReader reader;
-
-    private IFrameTupleAccessor frameTupleAccessor;
-
-    // Number of parallel result reader buffers
-    public static final int NUM_READERS = 1;
-
-    public static final int FRAME_SIZE = AsterixAppContextInfo.getInstance().getCompilerProperties().getFrameSize();
-
-    public ResultReader(IHyracksClientConnection hcc, IHyracksDataset hdc) throws Exception {
-        hyracksDataset = hdc;
-    }
-
-    public void open(JobId jobId, ResultSetId resultSetId) throws HyracksDataException {
-        reader = hyracksDataset.createReader(jobId, resultSetId);
-        frameTupleAccessor = new ResultFrameTupleAccessor();
-    }
-
-    public Status getStatus() {
-        return reader.getResultStatus();
-    }
-
-    public int read(IFrame frame) throws HyracksDataException {
-        return reader.read(frame);
-    }
-
-    public IFrameTupleAccessor getFrameTupleAccessor() {
-        return frameTupleAccessor;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultUtils.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultUtils.java b/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultUtils.java
deleted file mode 100644
index 3a4fd5f..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultUtils.java
+++ /dev/null
@@ -1,343 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.result;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.nio.charset.Charset;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import org.apache.http.ParseException;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import edu.uci.ics.asterix.api.common.SessionConfig;
-import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
-import edu.uci.ics.asterix.api.http.servlet.APIServlet;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.api.comm.IFrame;
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.comm.VSizeFrame;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.control.nc.resources.memory.FrameManager;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
-
-public class ResultUtils {
-    private static final Charset UTF_8 = Charset.forName("UTF-8");
-
-    static Map<Character, String> HTML_ENTITIES = new HashMap<Character, String>();
-
-    static {
-        HTML_ENTITIES.put('"', "&quot;");
-        HTML_ENTITIES.put('&', "&amp;");
-        HTML_ENTITIES.put('<', "&lt;");
-        HTML_ENTITIES.put('>', "&gt;");
-    }
-
-    public static String escapeHTML(String s) {
-        for (Character c : HTML_ENTITIES.keySet()) {
-            if (s.indexOf(c) >= 0) {
-                s = s.replace(c.toString(), HTML_ENTITIES.get(c));
-            }
-        }
-        return s;
-    }
-
-    public static void displayCSVHeader(ARecordType recordType, SessionConfig conf) {
-        // If HTML-ifying, we have to output this here before the header -
-        // pretty ugly
-        if (conf.is(SessionConfig.FORMAT_HTML)) {
-            conf.out().println("<h4>Results:</h4>");
-            conf.out().println("<pre>");
-        }
-
-        String[] fieldNames = recordType.getFieldNames();
-        boolean notfirst = false;
-        for (String name : fieldNames) {
-            if (notfirst) {
-                conf.out().print(',');
-            }
-            notfirst = true;
-            conf.out().print('"');
-            conf.out().print(name.replace("\"", "\"\""));
-            conf.out().print('"');
-        }
-        conf.out().print("\r\n");
-    }
-
-    public static FrameManager resultDisplayFrameMgr = new FrameManager(ResultReader.FRAME_SIZE);
-
-    public static void displayResults(ResultReader resultReader, SessionConfig conf)
-            throws HyracksDataException {
-        IFrameTupleAccessor fta = resultReader.getFrameTupleAccessor();
-
-        IFrame frame = new VSizeFrame(resultDisplayFrameMgr);
-        int bytesRead = resultReader.read(frame);
-        ByteBufferInputStream bbis = new ByteBufferInputStream();
-
-        // Whether we need to separate top-level ADM instances with commas
-        boolean need_commas = true;
-        // Whether this is the first instance being output
-        boolean notfirst = false;
-
-        // If we're outputting CSV with a header, the HTML header was already
-        // output by displayCSVHeader(), so skip it here
-        if (conf.is(SessionConfig.FORMAT_HTML) &&
-            ! (conf.fmt() == OutputFormat.CSV && conf.is(SessionConfig.FORMAT_CSV_HEADER))) {
-            conf.out().println("<h4>Results:</h4>");
-            conf.out().println("<pre>");
-        }
-
-        switch (conf.fmt()) {
-            case CSV:
-                need_commas = false;
-                break;
-            case JSON:
-            case ADM:
-                // Conveniently, JSON and ADM have the same syntax for an
-                // "ordered list", and our representation of the result of a
-                // statement is an ordered list of instances.
-                conf.out().print("[ ");
-                break;
-        }
-
-        if (bytesRead > 0) {
-            do {
-                try {
-                    fta.reset(frame.getBuffer());
-                    int last = fta.getTupleCount();
-                    String result;
-                    for (int tIndex = 0; tIndex < last; tIndex++) {
-                        int start = fta.getTupleStartOffset(tIndex);
-                        int length = fta.getTupleEndOffset(tIndex) - start;
-                        bbis.setByteBuffer(frame.getBuffer(), start);
-                        byte[] recordBytes = new byte[length];
-                        int numread = bbis.read(recordBytes, 0, length);
-                        if (conf.fmt() == OutputFormat.CSV) {
-                            if ( (numread > 0) && (recordBytes[numread-1] == '\n') ) {
-                                numread--;
-                            }
-                        }
-                        result = new String(recordBytes, 0, numread, UTF_8);
-                        if (need_commas && notfirst) {
-                            conf.out().print(", ");
-                        }
-                        notfirst = true;
-                        conf.out().print(result);
-                        if (conf.fmt() == OutputFormat.CSV) {
-                            conf.out().print("\r\n");
-                        }
-                    }
-                    frame.getBuffer().clear();
-                } finally {
-                    try {
-                        bbis.close();
-                    } catch (IOException e) {
-                        throw new HyracksDataException(e);
-                    }
-                }
-            } while (resultReader.read(frame) > 0);
-        }
-
-        conf.out().flush();
-
-        switch (conf.fmt()) {
-            case JSON:
-            case ADM:
-                conf.out().println(" ]");
-                break;
-            case CSV:
-                // Nothing to do
-                break;
-        }
-
-        if (conf.is(SessionConfig.FORMAT_HTML)) {
-            conf.out().println("</pre>");
-        }
-    }
-
-    public static JSONObject getErrorResponse(int errorCode, String errorMessage, String errorSummary,
-            String errorStackTrace) {
-        JSONObject errorResp = new JSONObject();
-        JSONArray errorArray = new JSONArray();
-        errorArray.put(errorCode);
-        errorArray.put(errorMessage);
-        try {
-            errorResp.put("error-code", errorArray);
-            if (!errorSummary.equals(""))
-                errorResp.put("summary", errorSummary);
-            if (!errorStackTrace.equals(""))
-                errorResp.put("stacktrace", errorStackTrace);
-        } catch (JSONException e) {
-            // TODO(madhusudancs): Figure out what to do when JSONException occurs while building the results.
-        }
-        return errorResp;
-    }
-
-    public static void webUIErrorHandler(PrintWriter out, Exception e) {
-        String errorTemplate = readTemplateFile("/webui/errortemplate.html", "%s\n%s\n%s");
-
-        String errorOutput = String.format(errorTemplate, escapeHTML(extractErrorMessage(e)),
-                escapeHTML(extractErrorSummary(e)), escapeHTML(extractFullStackTrace(e)));
-        out.println(errorOutput);
-    }
-
-    public static void webUIParseExceptionHandler(PrintWriter out, Throwable e, String query) {
-        String errorTemplate = readTemplateFile("/webui/errortemplate_message.html", "<pre class=\"error\">%s\n</pre>");
-
-        String errorOutput = String.format(errorTemplate, buildParseExceptionMessage(e, query));
-        out.println(errorOutput);
-    }
-
-    public static void apiErrorHandler(PrintWriter out, Exception e) {
-        int errorCode = 99;
-        if (e instanceof ParseException) {
-            errorCode = 2;
-        } else if (e instanceof AlgebricksException) {
-            errorCode = 3;
-        } else if (e instanceof HyracksDataException) {
-            errorCode = 4;
-        }
-
-        JSONObject errorResp = ResultUtils.getErrorResponse(errorCode, extractErrorMessage(e), extractErrorSummary(e),
-                extractFullStackTrace(e));
-        out.write(errorResp.toString());
-    }
-
-    public static String buildParseExceptionMessage(Throwable e, String query) {
-        StringBuilder errorMessage = new StringBuilder();
-        String message = e.getMessage();
-        message = message.replace("<", "&lt");
-        message = message.replace(">", "&gt");
-        errorMessage.append("SyntaxError: " + message + "\n");
-        int pos = message.indexOf("line");
-        if (pos > 0) {
-            Pattern p = Pattern.compile("\\d+");
-            Matcher m = p.matcher(message);
-            if (m.find(pos)) {
-                int lineNo = Integer.parseInt(message.substring(m.start(), m.end()));
-                String[] lines = query.split("\n");
-                if (lineNo > lines.length) {
-                    errorMessage.append("===> &ltBLANK LINE&gt \n");
-                } else {
-                    String line = lines[lineNo - 1];
-                    errorMessage.append("==> " + line);
-                }
-            }
-        }
-        return errorMessage.toString();
-    }
-
-    private static Throwable getRootCause(Throwable cause) {
-        Throwable nextCause = cause.getCause();
-        while (nextCause != null) {
-            cause = nextCause;
-            nextCause = cause.getCause();
-        }
-        return cause;
-    }
-
-    /**
-     * Extract the message in the root cause of the stack trace:
-     *
-     * @param e
-     * @return error message string.
-     */
-    private static String extractErrorMessage(Throwable e) {
-        Throwable cause = getRootCause(e);
-        String fullyQualifiedExceptionClassName = cause.getClass().getName();
-        String[] hierarchySplits = fullyQualifiedExceptionClassName.split("\\.");
-        //try returning the class without package qualification
-        String exceptionClassName = hierarchySplits[hierarchySplits.length - 1];
-        String localizedMessage = cause.getLocalizedMessage();
-        if(localizedMessage == null){
-            localizedMessage = "Internal error. Please check instance logs for further details.";
-        }
-        return localizedMessage + " [" + exceptionClassName + "]";
-    }
-
-    /**
-     * Extract the meaningful part of a stack trace:
-     * a. the causes in the stack trace hierarchy
-     * b. the top exception for each cause
-     *
-     * @param e
-     * @return the contacted message containing a and b.
-     */
-    private static String extractErrorSummary(Throwable e) {
-        StringBuilder errorMessageBuilder = new StringBuilder();
-        Throwable cause = e;
-        errorMessageBuilder.append(cause.getLocalizedMessage());
-        while (cause != null) {
-            StackTraceElement[] stackTraceElements = cause.getStackTrace();
-            errorMessageBuilder.append(stackTraceElements.length > 0 ? "\n caused by: " + stackTraceElements[0] : "");
-            cause = cause.getCause();
-        }
-        return errorMessageBuilder.toString();
-    }
-
-    /**
-     * Extract the full stack trace:
-     *
-     * @param e
-     * @return the string containing the full stack trace of the error.
-     */
-    private static String extractFullStackTrace(Throwable e) {
-        StringWriter stringWriter = new StringWriter();
-        PrintWriter printWriter = new PrintWriter(stringWriter);
-        e.printStackTrace(printWriter);
-        return stringWriter.toString();
-    }
-
-    /**
-     * Read the template file which is stored as a resource and return its content. If the file does not exist or is
-     * not readable return the default template string.
-     *
-     * @param path
-     *            The path to the resource template file
-     * @param defaultTemplate
-     *            The default template string if the template file does not exist or is not readable
-     * @return The template string to be used to render the output.
-     */
-    private static String readTemplateFile(String path, String defaultTemplate) {
-        String errorTemplate = defaultTemplate;
-        try {
-            String resourcePath = "/webui/errortemplate_message.html";
-            InputStream is = APIServlet.class.getResourceAsStream(resourcePath);
-            InputStreamReader isr = new InputStreamReader(is);
-            StringBuilder sb = new StringBuilder();
-            BufferedReader br = new BufferedReader(isr);
-            String line = br.readLine();
-
-            while (line != null) {
-                sb.append(line);
-                line = br.readLine();
-            }
-            errorTemplate = sb.toString();
-        } catch (IOException ioe) {
-            // If there is an IOException reading the error template html file, default value of error template is used.
-        }
-        return errorTemplate;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java b/asterix-app/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java
deleted file mode 100644
index a788c37..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.dataflow.std.misc;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-
-public class ConstantTupleSourceOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    private int[] fieldSlots;
-    private byte[] tupleData;
-    private int tupleSize;
-
-    public ConstantTupleSourceOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc, int[] fieldSlots,
-            byte[] tupleData, int tupleSize) {
-        super(spec, 0, 1);
-        this.tupleData = tupleData;
-        this.fieldSlots = fieldSlots;
-        this.tupleSize = tupleSize;
-        recordDescriptors[0] = recDesc;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
-        return new ConstantTupleSourceOperatorNodePushable(ctx, fieldSlots, tupleData, tupleSize);
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorNodePushable.java b/asterix-app/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorNodePushable.java
deleted file mode 100644
index ba75fb0..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorNodePushable.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.dataflow.std.misc;
-
-import edu.uci.ics.hyracks.api.comm.VSizeFrame;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
-
-public class ConstantTupleSourceOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
-    private IHyracksTaskContext ctx;
-
-    private int[] fieldSlots;
-    private byte[] tupleData;
-    private int tupleSize;
-
-    public ConstantTupleSourceOperatorNodePushable(IHyracksTaskContext ctx, int[] fieldSlots, byte[] tupleData,
-            int tupleSize) {
-        super();
-        this.fieldSlots = fieldSlots;
-        this.tupleData = tupleData;
-        this.tupleSize = tupleSize;
-        this.ctx = ctx;
-    }
-
-    @Override
-    public void initialize() throws HyracksDataException {
-        FrameTupleAppender appender = new FrameTupleAppender(new VSizeFrame(ctx));
-        if (fieldSlots != null && tupleData != null && tupleSize > 0)
-            appender.append(fieldSlots, tupleData, 0, tupleSize);
-        writer.open();
-        try {
-            appender.flush(writer, true);
-        }
-        finally {
-            writer.close();
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java b/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
new file mode 100644
index 0000000..8ccc9d5
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
@@ -0,0 +1,411 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.common;
+
+import java.io.PrintWriter;
+import java.rmi.RemoteException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.json.JSONException;
+
+import edu.uci.ics.asterix.api.common.Job.SubmissionMode;
+import edu.uci.ics.asterix.aql.base.Statement.Kind;
+import edu.uci.ics.asterix.aql.expression.FunctionDecl;
+import edu.uci.ics.asterix.aql.expression.Query;
+import edu.uci.ics.asterix.aql.expression.visitor.AQLPrintVisitor;
+import edu.uci.ics.asterix.aql.rewrites.AqlRewriter;
+import edu.uci.ics.asterix.common.config.AsterixCompilerProperties;
+import edu.uci.ics.asterix.common.config.AsterixExternalProperties;
+import edu.uci.ics.asterix.common.config.OptimizationConfUtil;
+import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.dataflow.data.common.AqlExpressionTypeComputer;
+import edu.uci.ics.asterix.dataflow.data.common.AqlMergeAggregationExpressionFactory;
+import edu.uci.ics.asterix.dataflow.data.common.AqlNullableTypeComputer;
+import edu.uci.ics.asterix.dataflow.data.common.AqlPartialAggregationTypeComputer;
+import edu.uci.ics.asterix.formats.base.IDataFormat;
+import edu.uci.ics.asterix.jobgen.AqlLogicalExpressionJobGen;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Dataverse;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.asterix.optimizer.base.RuleCollections;
+import edu.uci.ics.asterix.runtime.job.listener.JobEventListenerFactory;
+import edu.uci.ics.asterix.transaction.management.service.transaction.JobIdFactory;
+import edu.uci.ics.asterix.translator.AqlExpressionToPlanTranslator;
+import edu.uci.ics.asterix.translator.CompiledStatements.ICompiledDmlStatement;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.compiler.api.HeuristicCompilerFactoryBuilder;
+import edu.uci.ics.hyracks.algebricks.compiler.api.ICompiler;
+import edu.uci.ics.hyracks.algebricks.compiler.api.ICompilerFactory;
+import edu.uci.ics.hyracks.algebricks.compiler.rewriter.rulecontrollers.SequentialFixpointRuleController;
+import edu.uci.ics.hyracks.algebricks.compiler.rewriter.rulecontrollers.SequentialOnceRuleController;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionEvalSizeComputer;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionTypeComputer;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IMergeAggregationExpressionFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.INullableTypeComputer;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.LogicalExpressionJobGenToExpressionRuntimeProviderAdapter;
+import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.LogicalOperatorPrettyPrintVisitor;
+import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.PlanPlotter;
+import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.PlanPrettyPrinter;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.AbstractRuleController;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.AlgebricksOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IOptimizationContextFactory;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+/**
+ * Provides helper methods for compilation of a query into a JobSpec and submission
+ * to Hyracks through the Hyracks client interface.
+ */
+public class APIFramework {
+
+    private static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> buildDefaultLogicalRewrites() {
+        List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> defaultLogicalRewrites = new ArrayList<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>>();
+        SequentialFixpointRuleController seqCtrlNoDfs = new SequentialFixpointRuleController(false);
+        SequentialFixpointRuleController seqCtrlFullDfs = new SequentialFixpointRuleController(true);
+        SequentialOnceRuleController seqOnceCtrl = new SequentialOnceRuleController(true);
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
+                RuleCollections.buildInitialTranslationRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
+                RuleCollections.buildTypeInferenceRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
+                RuleCollections.buildAutogenerateIDRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,
+                RuleCollections.buildNormalizationRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
+                RuleCollections.buildCondPushDownAndJoinInferenceRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,
+                RuleCollections.buildLoadFieldsRuleCollection()));
+        // fj
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,
+                RuleCollections.buildFuzzyJoinRuleCollection()));
+        //
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,
+                RuleCollections.buildNormalizationRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
+                RuleCollections.buildCondPushDownAndJoinInferenceRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,
+                RuleCollections.buildLoadFieldsRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
+                RuleCollections.buildDataExchangeRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
+                RuleCollections.buildConsolidationRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
+                RuleCollections.buildAccessMethodRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
+                RuleCollections.buildPlanCleanupRuleCollection()));
+
+        //put TXnRuleCollection!
+        return defaultLogicalRewrites;
+    }
+
+    private static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> buildDefaultPhysicalRewrites() {
+        List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> defaultPhysicalRewrites = new ArrayList<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>>();
+        SequentialOnceRuleController seqOnceCtrl = new SequentialOnceRuleController(true);
+        SequentialOnceRuleController seqOnceTopLevel = new SequentialOnceRuleController(false);
+        defaultPhysicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
+                RuleCollections.buildPhysicalRewritesAllLevelsRuleCollection()));
+        defaultPhysicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceTopLevel,
+                RuleCollections.buildPhysicalRewritesTopLevelRuleCollection()));
+        defaultPhysicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
+                RuleCollections.prepareForJobGenRuleCollection()));
+        return defaultPhysicalRewrites;
+    }
+
+    private static class AqlOptimizationContextFactory implements IOptimizationContextFactory {
+
+        public static final AqlOptimizationContextFactory INSTANCE = new AqlOptimizationContextFactory();
+
+        private AqlOptimizationContextFactory() {
+        }
+
+        @Override
+        public IOptimizationContext createOptimizationContext(int varCounter,
+                IExpressionEvalSizeComputer expressionEvalSizeComputer,
+                IMergeAggregationExpressionFactory mergeAggregationExpressionFactory,
+                IExpressionTypeComputer expressionTypeComputer, INullableTypeComputer nullableTypeComputer,
+                PhysicalOptimizationConfig physicalOptimizationConfig) {
+            return new AlgebricksOptimizationContext(varCounter, expressionEvalSizeComputer,
+                    mergeAggregationExpressionFactory, expressionTypeComputer, nullableTypeComputer,
+                    physicalOptimizationConfig);
+        }
+
+    }
+
+    public static Pair<Query, Integer> reWriteQuery(List<FunctionDecl> declaredFunctions,
+            AqlMetadataProvider metadataProvider, Query q, SessionConfig conf) throws AsterixException {
+
+        if (!conf.is(SessionConfig.FORMAT_ONLY_PHYSICAL_OPS) && conf.is(SessionConfig.OOB_EXPR_TREE)) {
+            conf.out().println();
+
+            if (conf.is(SessionConfig.FORMAT_HTML)) {
+                conf.out().println("<h4>Expression tree:</h4>");
+                conf.out().println("<pre>");
+            } else {
+                conf.out().println("----------Expression tree:");
+            }
+
+            if (q != null) {
+                q.accept(new AQLPrintVisitor(conf.out()), 0);
+            }
+
+            if (conf.is(SessionConfig.FORMAT_HTML)) {
+                conf.out().println("</pre>");
+            }
+        }
+        AqlRewriter rw = new AqlRewriter(declaredFunctions, q, metadataProvider);
+        rw.rewrite();
+        Query rwQ = rw.getExpr();
+        return new Pair(rwQ, rw.getVarCounter());
+    }
+
+    public static JobSpecification compileQuery(List<FunctionDecl> declaredFunctions,
+            AqlMetadataProvider queryMetadataProvider, Query rwQ, int varCounter, String outputDatasetName,
+            SessionConfig conf, ICompiledDmlStatement statement) throws AsterixException, AlgebricksException,
+            JSONException, RemoteException, ACIDException {
+
+        if (!conf.is(SessionConfig.FORMAT_ONLY_PHYSICAL_OPS) && conf.is(SessionConfig.OOB_REWRITTEN_EXPR_TREE)) {
+            conf.out().println();
+
+            if (conf.is(SessionConfig.FORMAT_HTML)) {
+                conf.out().println("<h4>Rewritten expression tree:</h4>");
+                conf.out().println("<pre>");
+            } else {
+                conf.out().println("----------Rewritten expression:");
+            }
+
+            if (rwQ != null) {
+                rwQ.accept(new AQLPrintVisitor(conf.out()), 0);
+            }
+
+            if (conf.is(SessionConfig.FORMAT_HTML)) {
+                conf.out().println("</pre>");
+            }
+        }
+
+        edu.uci.ics.asterix.common.transactions.JobId asterixJobId = JobIdFactory.generateJobId();
+        queryMetadataProvider.setJobId(asterixJobId);
+        AqlExpressionToPlanTranslator t = new AqlExpressionToPlanTranslator(queryMetadataProvider, varCounter,
+                outputDatasetName, statement);
+
+        ILogicalPlan plan;
+        // statement = null when it's a query
+        if (statement == null || statement.getKind() != Kind.LOAD) {
+            plan = t.translate(rwQ);
+        } else {
+            plan = t.translateLoad();
+        }
+
+        LogicalOperatorPrettyPrintVisitor pvisitor = new LogicalOperatorPrettyPrintVisitor();
+        if (!conf.is(SessionConfig.FORMAT_ONLY_PHYSICAL_OPS) && conf.is(SessionConfig.OOB_LOGICAL_PLAN)) {
+            conf.out().println();
+
+            if (conf.is(SessionConfig.FORMAT_HTML)) {
+                conf.out().println("<h4>Logical plan:</h4>");
+                conf.out().println("<pre>");
+            } else {
+                conf.out().println("----------Logical plan:");
+            }
+
+            if (rwQ != null || statement.getKind() == Kind.LOAD) {
+                StringBuilder buffer = new StringBuilder();
+                PlanPrettyPrinter.printPlan(plan, buffer, pvisitor, 0);
+                conf.out().print(buffer);
+            }
+
+            if (conf.is(SessionConfig.FORMAT_HTML)) {
+                conf.out().println("</pre>");
+            }
+        }
+
+        //print the plot for the logical plan
+        AsterixExternalProperties xProps = AsterixAppContextInfo.getInstance().getExternalProperties();
+        Boolean plot = xProps.getIsPlottingEnabled();
+        if (plot) {
+            PlanPlotter.printLogicalPlan(plan);
+        }
+
+        AsterixCompilerProperties compilerProperties = AsterixAppContextInfo.getInstance().getCompilerProperties();
+        int frameSize = compilerProperties.getFrameSize();
+        int sortFrameLimit = (int) (compilerProperties.getSortMemorySize() / frameSize);
+        int groupFrameLimit = (int) (compilerProperties.getGroupMemorySize() / frameSize);
+        int joinFrameLimit = (int) (compilerProperties.getJoinMemorySize() / frameSize);
+        OptimizationConfUtil.getPhysicalOptimizationConfig().setFrameSize(frameSize);
+        OptimizationConfUtil.getPhysicalOptimizationConfig().setMaxFramesExternalSort(sortFrameLimit);
+        OptimizationConfUtil.getPhysicalOptimizationConfig().setMaxFramesExternalGroupBy(groupFrameLimit);
+        OptimizationConfUtil.getPhysicalOptimizationConfig().setMaxFramesHybridHash(joinFrameLimit);
+
+        HeuristicCompilerFactoryBuilder builder = new HeuristicCompilerFactoryBuilder(
+                AqlOptimizationContextFactory.INSTANCE);
+        builder.setPhysicalOptimizationConfig(OptimizationConfUtil.getPhysicalOptimizationConfig());
+        builder.setLogicalRewrites(buildDefaultLogicalRewrites());
+        builder.setPhysicalRewrites(buildDefaultPhysicalRewrites());
+        IDataFormat format = queryMetadataProvider.getFormat();
+        ICompilerFactory compilerFactory = builder.create();
+        builder.setExpressionEvalSizeComputer(format.getExpressionEvalSizeComputer());
+        builder.setIMergeAggregationExpressionFactory(new AqlMergeAggregationExpressionFactory());
+        builder.setPartialAggregationTypeComputer(new AqlPartialAggregationTypeComputer());
+        builder.setExpressionTypeComputer(AqlExpressionTypeComputer.INSTANCE);
+        builder.setNullableTypeComputer(AqlNullableTypeComputer.INSTANCE);
+
+        ICompiler compiler = compilerFactory.createCompiler(plan, queryMetadataProvider, t.getVarCounter());
+        if (conf.isOptimize()) {
+            compiler.optimize();
+            //plot optimized logical plan
+            if (plot)
+                PlanPlotter.printOptimizedLogicalPlan(plan);
+            if (conf.is(SessionConfig.OOB_OPTIMIZED_LOGICAL_PLAN)) {
+                if (conf.is(SessionConfig.FORMAT_ONLY_PHYSICAL_OPS)) {
+                    // For Optimizer tests.
+                    StringBuilder buffer = new StringBuilder();
+                    PlanPrettyPrinter.printPhysicalOps(plan, buffer, 0);
+                    conf.out().print(buffer);
+                } else {
+                    if (conf.is(SessionConfig.FORMAT_HTML)) {
+                        conf.out().println("<h4>Optimized logical plan:</h4>");
+                        conf.out().println("<pre>");
+                    } else {
+                        conf.out().println("----------Optimized logical plan:");
+                    }
+
+                    if (rwQ != null || statement.getKind() == Kind.LOAD) {
+                        StringBuilder buffer = new StringBuilder();
+                        PlanPrettyPrinter.printPlan(plan, buffer, pvisitor, 0);
+                        conf.out().print(buffer);
+                    }
+
+                    if (conf.is(SessionConfig.FORMAT_HTML)) {
+                        conf.out().println("</pre>");
+                    }
+                }
+            }
+        }
+
+        if (!conf.isGenerateJobSpec()) {
+            return null;
+        }
+
+        AlgebricksPartitionConstraint clusterLocs = queryMetadataProvider.getClusterLocations();
+        builder.setBinaryBooleanInspectorFactory(format.getBinaryBooleanInspectorFactory());
+        builder.setBinaryIntegerInspectorFactory(format.getBinaryIntegerInspectorFactory());
+        builder.setClusterLocations(clusterLocs);
+        builder.setComparatorFactoryProvider(format.getBinaryComparatorFactoryProvider());
+        builder.setExpressionRuntimeProvider(new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter(
+                AqlLogicalExpressionJobGen.INSTANCE));
+        builder.setHashFunctionFactoryProvider(format.getBinaryHashFunctionFactoryProvider());
+        builder.setHashFunctionFamilyProvider(format.getBinaryHashFunctionFamilyProvider());
+        builder.setNullWriterFactory(format.getNullWriterFactory());
+        builder.setPredicateEvaluatorFactoryProvider(format.getPredicateEvaluatorFactoryProvider());
+
+        switch (conf.fmt()) {
+            case JSON:
+                builder.setPrinterProvider(format.getJSONPrinterFactoryProvider());
+                break;
+            case CSV:
+                builder.setPrinterProvider(format.getCSVPrinterFactoryProvider());
+                break;
+            case ADM:
+                builder.setPrinterProvider(format.getPrinterFactoryProvider());
+                break;
+            default:
+                throw new RuntimeException("Unexpected OutputFormat!");
+        }
+
+        builder.setSerializerDeserializerProvider(format.getSerdeProvider());
+        builder.setTypeTraitProvider(format.getTypeTraitProvider());
+        builder.setNormalizedKeyComputerFactoryProvider(format.getNormalizedKeyComputerFactoryProvider());
+
+        JobEventListenerFactory jobEventListenerFactory = new JobEventListenerFactory(asterixJobId,
+                queryMetadataProvider.isWriteTransaction());
+        JobSpecification spec = compiler.createJob(AsterixAppContextInfo.getInstance(), jobEventListenerFactory);
+
+        if (conf.is(SessionConfig.OOB_HYRACKS_JOB)) {
+            if (conf.is(SessionConfig.FORMAT_HTML)) {
+                conf.out().println("<h4>Hyracks job:</h4>");
+                conf.out().println("<pre>");
+            } else {
+                conf.out().println("----------Hyracks job:");
+            }
+
+            if (rwQ != null) {
+                conf.out().println(spec.toJSON().toString(1));
+                conf.out().println(spec.getUserConstraints());
+            }
+
+            if (conf.is(SessionConfig.FORMAT_HTML)) {
+                conf.out().println("</pre>");
+            }
+        }
+        return spec;
+    }
+
+    public static void executeJobArray(IHyracksClientConnection hcc, JobSpecification[] specs, PrintWriter out)
+            throws Exception {
+        for (int i = 0; i < specs.length; i++) {
+            specs[i].setMaxReattempts(0);
+            JobId jobId = hcc.startJob(specs[i]);
+            long startTime = System.currentTimeMillis();
+            hcc.waitForCompletion(jobId);
+            long endTime = System.currentTimeMillis();
+            double duration = (endTime - startTime) / 1000.00;
+            out.println("<pre>Duration: " + duration + " sec</pre>");
+        }
+
+    }
+
+    public static void executeJobArray(IHyracksClientConnection hcc, Job[] jobs, PrintWriter out) throws Exception {
+        for (int i = 0; i < jobs.length; i++) {
+            jobs[i].getJobSpec().setMaxReattempts(0);
+            long startTime = System.currentTimeMillis();
+            try {
+                JobId jobId = hcc.startJob(jobs[i].getJobSpec());
+                if (jobs[i].getSubmissionMode() == SubmissionMode.ASYNCHRONOUS) {
+                    continue;
+                }
+                hcc.waitForCompletion(jobId);
+            } catch (Exception e) {
+                e.printStackTrace();
+                continue;
+            }
+            long endTime = System.currentTimeMillis();
+            double duration = (endTime - startTime) / 1000.00;
+            out.println("<pre>Duration: " + duration + " sec</pre>");
+        }
+
+    }
+
+    private static IDataFormat getDataFormat(MetadataTransactionContext mdTxnCtx, String dataverseName)
+            throws AsterixException {
+        Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
+        IDataFormat format;
+        try {
+            format = (IDataFormat) Class.forName(dataverse.getDataFormat()).newInstance();
+        } catch (Exception e) {
+            throw new AsterixException(e);
+        }
+        return format;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixAppRuntimeContext.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixAppRuntimeContext.java b/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixAppRuntimeContext.java
new file mode 100644
index 0000000..6d7f2a4
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixAppRuntimeContext.java
@@ -0,0 +1,271 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.common;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.api.AsterixThreadExecutor;
+import edu.uci.ics.asterix.common.api.IAsterixAppRuntimeContext;
+import edu.uci.ics.asterix.common.config.AsterixCompilerProperties;
+import edu.uci.ics.asterix.common.config.AsterixExternalProperties;
+import edu.uci.ics.asterix.common.config.AsterixFeedProperties;
+import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
+import edu.uci.ics.asterix.common.config.AsterixPropertiesAccessor;
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
+import edu.uci.ics.asterix.common.config.AsterixTransactionProperties;
+import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
+import edu.uci.ics.asterix.common.context.AsterixFileMapManager;
+import edu.uci.ics.asterix.common.context.DatasetLifecycleManager;
+import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.feeds.api.IFeedManager;
+import edu.uci.ics.asterix.common.transactions.IAsterixAppRuntimeContextProvider;
+import edu.uci.ics.asterix.common.transactions.ITransactionSubsystem;
+import edu.uci.ics.asterix.feeds.FeedManager;
+import edu.uci.ics.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
+import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceRepository;
+import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceRepositoryFactory;
+import edu.uci.ics.asterix.transaction.management.service.logging.LogManager;
+import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionSubsystem;
+import edu.uci.ics.hyracks.api.application.INCApplicationContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.IIOManager;
+import edu.uci.ics.hyracks.api.lifecycle.ILifeCycleComponent;
+import edu.uci.ics.hyracks.api.lifecycle.ILifeCycleComponentManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.IVirtualBufferCache;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.AsynchronousScheduler;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.PrefixMergePolicyFactory;
+import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
+import edu.uci.ics.hyracks.storage.common.buffercache.DelayPageCleanerPolicy;
+import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IPageCleanerPolicy;
+import edu.uci.ics.hyracks.storage.common.buffercache.IPageReplacementStrategy;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepository;
+import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepositoryFactory;
+import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactory;
+import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactoryProvider;
+
+public class AsterixAppRuntimeContext implements IAsterixAppRuntimeContext, IAsterixPropertiesProvider {
+
+    private static final AsterixPropertiesAccessor ASTERIX_PROPERTIES_ACCESSOR;
+
+    static {
+        try {
+            ASTERIX_PROPERTIES_ACCESSOR = new AsterixPropertiesAccessor();
+        } catch (AsterixException e) {
+            throw new ExceptionInInitializerError(e);
+        }
+    }
+
+    private static final int METADATA_IO_DEVICE_ID = 0;
+
+    private ILSMMergePolicyFactory metadataMergePolicyFactory;
+    private final INCApplicationContext ncApplicationContext;
+
+    private AsterixCompilerProperties compilerProperties;
+    private AsterixExternalProperties externalProperties;
+    private AsterixMetadataProperties metadataProperties;
+    private AsterixStorageProperties storageProperties;
+    private AsterixTransactionProperties txnProperties;
+    private AsterixFeedProperties feedProperties;
+
+
+    private AsterixThreadExecutor threadExecutor;
+    private DatasetLifecycleManager indexLifecycleManager;
+    private IFileMapManager fileMapManager;
+    private IBufferCache bufferCache;
+    private ITransactionSubsystem txnSubsystem;
+
+    private ILSMIOOperationScheduler lsmIOScheduler;
+    private ILocalResourceRepository localResourceRepository;
+    private ResourceIdFactory resourceIdFactory;
+    private IIOManager ioManager;
+    private boolean isShuttingdown;
+
+    private IFeedManager feedManager;
+
+    public AsterixAppRuntimeContext(INCApplicationContext ncApplicationContext) throws AsterixException {
+        this.ncApplicationContext = ncApplicationContext;
+        compilerProperties = new AsterixCompilerProperties(ASTERIX_PROPERTIES_ACCESSOR);
+        externalProperties = new AsterixExternalProperties(ASTERIX_PROPERTIES_ACCESSOR);
+        metadataProperties = new AsterixMetadataProperties(ASTERIX_PROPERTIES_ACCESSOR);
+        storageProperties = new AsterixStorageProperties(ASTERIX_PROPERTIES_ACCESSOR);
+        txnProperties = new AsterixTransactionProperties(ASTERIX_PROPERTIES_ACCESSOR);
+        feedProperties = new AsterixFeedProperties(ASTERIX_PROPERTIES_ACCESSOR);
+    }
+
+    public void initialize() throws IOException, ACIDException, AsterixException {
+        Logger.getLogger("edu.uci.ics").setLevel(externalProperties.getLogLevel());
+
+        threadExecutor = new AsterixThreadExecutor(ncApplicationContext.getThreadFactory());
+        fileMapManager = new AsterixFileMapManager();
+        ICacheMemoryAllocator allocator = new HeapBufferAllocator();
+        IPageCleanerPolicy pcp = new DelayPageCleanerPolicy(600000);
+        ioManager = ncApplicationContext.getRootContext().getIOManager();
+        IPageReplacementStrategy prs = new ClockPageReplacementStrategy(allocator,
+                storageProperties.getBufferCachePageSize(), storageProperties.getBufferCacheNumPages());
+        bufferCache = new BufferCache(ioManager, prs, pcp, fileMapManager,
+                storageProperties.getBufferCacheMaxOpenFiles(), ncApplicationContext.getThreadFactory());
+
+        AsynchronousScheduler.INSTANCE.init(ncApplicationContext.getThreadFactory());
+        lsmIOScheduler = AsynchronousScheduler.INSTANCE;
+
+        metadataMergePolicyFactory = new PrefixMergePolicyFactory();
+
+        ILocalResourceRepositoryFactory persistentLocalResourceRepositoryFactory = new PersistentLocalResourceRepositoryFactory(
+                ioManager);
+        localResourceRepository = (PersistentLocalResourceRepository) persistentLocalResourceRepositoryFactory
+                .createRepository();
+        resourceIdFactory = (new ResourceIdFactoryProvider(localResourceRepository)).createResourceIdFactory();
+
+        IAsterixAppRuntimeContextProvider asterixAppRuntimeContextProvider = new AsterixAppRuntimeContextProdiverForRecovery(
+                this);
+        txnSubsystem = new TransactionSubsystem(ncApplicationContext.getNodeId(), asterixAppRuntimeContextProvider,
+                txnProperties);
+        
+        indexLifecycleManager = new DatasetLifecycleManager(storageProperties, localResourceRepository,
+                MetadataPrimaryIndexes.FIRST_AVAILABLE_USER_DATASET_ID,(LogManager)txnSubsystem.getLogManager());
+        
+        isShuttingdown = false;
+
+        feedManager = new FeedManager(ncApplicationContext.getNodeId(), feedProperties,
+                compilerProperties.getFrameSize());
+
+        // The order of registration is important. The buffer cache must registered before recovery and transaction managers.
+        ILifeCycleComponentManager lccm = ncApplicationContext.getLifeCycleComponentManager();
+        lccm.register((ILifeCycleComponent) bufferCache);
+        lccm.register((ILifeCycleComponent) txnSubsystem.getTransactionManager());
+        lccm.register((ILifeCycleComponent) txnSubsystem.getLogManager());
+        lccm.register((ILifeCycleComponent) indexLifecycleManager);
+        lccm.register((ILifeCycleComponent) txnSubsystem.getLockManager());
+        lccm.register((ILifeCycleComponent) txnSubsystem.getRecoveryManager());
+    }
+
+    public boolean isShuttingdown() {
+        return isShuttingdown;
+    }
+
+    public void setShuttingdown(boolean isShuttingdown) {
+        this.isShuttingdown = isShuttingdown;
+    }
+
+    public void deinitialize() throws HyracksDataException {
+    }
+
+    public IBufferCache getBufferCache() {
+        return bufferCache;
+    }
+
+    public IFileMapProvider getFileMapManager() {
+        return fileMapManager;
+    }
+
+    public ITransactionSubsystem getTransactionSubsystem() {
+        return txnSubsystem;
+    }
+
+    public IIndexLifecycleManager getIndexLifecycleManager() {
+        return indexLifecycleManager;
+    }
+
+    public double getBloomFilterFalsePositiveRate() {
+        return storageProperties.getBloomFilterFalsePositiveRate();
+    }
+
+    public ILSMIOOperationScheduler getLSMIOScheduler() {
+        return lsmIOScheduler;
+    }
+
+    public ILocalResourceRepository getLocalResourceRepository() {
+        return localResourceRepository;
+    }
+
+    public ResourceIdFactory getResourceIdFactory() {
+        return resourceIdFactory;
+    }
+
+    public IIOManager getIOManager() {
+        return ioManager;
+    }
+
+    public int getMetaDataIODeviceId() {
+        return METADATA_IO_DEVICE_ID;
+    }
+
+    @Override
+    public AsterixStorageProperties getStorageProperties() {
+        return storageProperties;
+    }
+
+    @Override
+    public AsterixTransactionProperties getTransactionProperties() {
+        return txnProperties;
+    }
+
+    @Override
+    public AsterixCompilerProperties getCompilerProperties() {
+        return compilerProperties;
+    }
+
+    @Override
+    public AsterixMetadataProperties getMetadataProperties() {
+        return metadataProperties;
+    }
+
+    @Override
+    public AsterixExternalProperties getExternalProperties() {
+        return externalProperties;
+    }
+    
+    @Override
+    public AsterixFeedProperties getFeedProperties() {
+        return feedProperties;
+    }
+
+    @Override
+    public List<IVirtualBufferCache> getVirtualBufferCaches(int datasetID) {
+        return indexLifecycleManager.getVirtualBufferCaches(datasetID);
+    }
+
+    @Override
+    public ILSMOperationTracker getLSMBTreeOperationTracker(int datasetID) {
+        return indexLifecycleManager.getOperationTracker(datasetID);
+    }
+
+    @Override
+    public AsterixThreadExecutor getThreadExecutor() {
+        return threadExecutor;
+    }
+
+    public ILSMMergePolicyFactory getMetadataMergePolicyFactory() {
+        return metadataMergePolicyFactory;
+    }
+
+    @Override
+    public IFeedManager getFeedManager() {
+        return feedManager;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixAppRuntimeContextProdiverForRecovery.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixAppRuntimeContextProdiverForRecovery.java b/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixAppRuntimeContextProdiverForRecovery.java
new file mode 100644
index 0000000..938a8b9
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixAppRuntimeContextProdiverForRecovery.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.common;
+
+import java.util.List;
+
+import edu.uci.ics.asterix.common.api.AsterixThreadExecutor;
+import edu.uci.ics.asterix.common.api.IAsterixAppRuntimeContext;
+import edu.uci.ics.asterix.common.transactions.IAsterixAppRuntimeContextProvider;
+import edu.uci.ics.asterix.common.transactions.ITransactionSubsystem;
+import edu.uci.ics.hyracks.api.io.IIOManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.IVirtualBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepository;
+import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactory;
+
+public class AsterixAppRuntimeContextProdiverForRecovery implements IAsterixAppRuntimeContextProvider {
+
+    private final AsterixAppRuntimeContext asterixAppRuntimeContext;
+
+    public AsterixAppRuntimeContextProdiverForRecovery(AsterixAppRuntimeContext asterixAppRuntimeContext) {
+        this.asterixAppRuntimeContext = asterixAppRuntimeContext;
+    }
+
+    @Override
+    public IBufferCache getBufferCache() {
+        return asterixAppRuntimeContext.getBufferCache();
+    }
+
+    @Override
+    public IFileMapProvider getFileMapManager() {
+        return asterixAppRuntimeContext.getFileMapManager();
+    }
+
+    @Override
+    public ITransactionSubsystem getTransactionSubsystem() {
+        return asterixAppRuntimeContext.getTransactionSubsystem();
+    }
+
+    @Override
+    public IIndexLifecycleManager getIndexLifecycleManager() {
+        return asterixAppRuntimeContext.getIndexLifecycleManager();
+    }
+
+    @Override
+    public double getBloomFilterFalsePositiveRate() {
+        return asterixAppRuntimeContext.getBloomFilterFalsePositiveRate();
+    }
+
+    @Override
+    public ILSMIOOperationScheduler getLSMIOScheduler() {
+        return asterixAppRuntimeContext.getLSMIOScheduler();
+    }
+
+    @Override
+    public ILocalResourceRepository getLocalResourceRepository() {
+        return asterixAppRuntimeContext.getLocalResourceRepository();
+    }
+
+    @Override
+    public ResourceIdFactory getResourceIdFactory() {
+        return asterixAppRuntimeContext.getResourceIdFactory();
+    }
+
+    @Override
+    public IIOManager getIOManager() {
+        return asterixAppRuntimeContext.getIOManager();
+    }
+
+    @Override
+    public List<IVirtualBufferCache> getVirtualBufferCaches(int datasetID) {
+        return asterixAppRuntimeContext.getVirtualBufferCaches(datasetID);
+    }
+
+    @Override
+    public ILSMOperationTracker getLSMBTreeOperationTracker(int datasetID) {
+        return asterixAppRuntimeContext.getLSMBTreeOperationTracker(datasetID);
+    }
+
+    @Override
+    public IAsterixAppRuntimeContext getAppContext() {
+        return asterixAppRuntimeContext;
+    }
+
+    @Override
+    public AsterixThreadExecutor getThreadExecutor() {
+        return asterixAppRuntimeContext.getThreadExecutor();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixClientConfig.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixClientConfig.java b/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixClientConfig.java
new file mode 100644
index 0000000..9c3223e
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixClientConfig.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.common;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.kohsuke.args4j.Argument;
+import org.kohsuke.args4j.Option;
+
+public class AsterixClientConfig {
+    @Option(name = "-optimize", usage = "Turns compiler optimizations on (if set to true) or off (if set to false). It is true by default.")
+    public String optimize = "true";
+
+    @Option(name = "-only-physical", usage = "Prints only the physical annotations, not the entire operators. It is false by default.")
+    public String onlyPhysical = "false";
+
+    @Option(name = "-execute", usage = "Executes the job produced by the compiler. It is false by default.")
+    public String execute = "false";
+
+    @Option(name = "-hyracks-job", usage = "Generates and prints the Hyracks job. It is false by default.")
+    public String hyracksJob = "false";
+
+    @Option(name = "-hyracks-port", usage = "The port used to connect to the Hyracks server.")
+    public int hyracksPort = AsterixHyracksIntegrationUtil.DEFAULT_HYRACKS_CC_CLIENT_PORT;
+
+    @Argument
+    private List<String> arguments = new ArrayList<String>();
+
+    public List<String> getArguments() {
+        return arguments;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java b/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
new file mode 100644
index 0000000..91aa897
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
@@ -0,0 +1,155 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.common;
+
+import java.io.File;
+import java.util.EnumSet;
+
+import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.hyracks.bootstrap.CCApplicationEntryPoint;
+import edu.uci.ics.asterix.hyracks.bootstrap.NCApplicationEntryPoint;
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.job.JobFlag;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
+import edu.uci.ics.hyracks.control.common.controllers.CCConfig;
+import edu.uci.ics.hyracks.control.common.controllers.NCConfig;
+import edu.uci.ics.hyracks.control.nc.NodeControllerService;
+
+public class AsterixHyracksIntegrationUtil {
+
+    public static final int NODES = 2;
+    public static final int PARTITONS = 2;
+
+    public static final int DEFAULT_HYRACKS_CC_CLIENT_PORT = 1098;
+
+    public static final int DEFAULT_HYRACKS_CC_CLUSTER_PORT = 1099;
+
+    private static ClusterControllerService cc;
+    private static NodeControllerService[] ncs = new NodeControllerService[NODES];
+    private static IHyracksClientConnection hcc;
+
+    public static void init() throws Exception {
+        CCConfig ccConfig = new CCConfig();
+        ccConfig.clusterNetIpAddress = "127.0.0.1";
+        ccConfig.clientNetIpAddress = "127.0.0.1";
+        ccConfig.clientNetPort = DEFAULT_HYRACKS_CC_CLIENT_PORT;
+        ccConfig.clusterNetPort = DEFAULT_HYRACKS_CC_CLUSTER_PORT;
+        ccConfig.defaultMaxJobAttempts = 0;
+        ccConfig.resultTTL = 30000;
+        ccConfig.resultSweepThreshold = 1000;
+        ccConfig.appCCMainClass = CCApplicationEntryPoint.class.getName();
+        // ccConfig.useJOL = true;
+        cc = new ClusterControllerService(ccConfig);
+        cc.start();
+
+        int n = 0;
+        for (String ncName : getNcNames()) {
+            NCConfig ncConfig1 = new NCConfig();
+            ncConfig1.ccHost = "localhost";
+            ncConfig1.ccPort = DEFAULT_HYRACKS_CC_CLUSTER_PORT;
+            ncConfig1.clusterNetIPAddress = "127.0.0.1";
+            ncConfig1.dataIPAddress = "127.0.0.1";
+            ncConfig1.resultIPAddress = "127.0.0.1";
+            ncConfig1.nodeId = ncName;
+            ncConfig1.resultTTL = 30000;
+            ncConfig1.resultSweepThreshold = 1000;
+            for (int p = 0; p < PARTITONS; ++p) {
+                if (p == 0) {
+                    ncConfig1.ioDevices = System.getProperty("java.io.tmpdir") + File.separator + ncConfig1.nodeId
+                            + "/iodevice" + p;
+                } else {
+                    ncConfig1.ioDevices += "," + System.getProperty("java.io.tmpdir") + File.separator
+                            + ncConfig1.nodeId + "/iodevice" + p;
+                }
+            }
+            ncConfig1.appNCMainClass = NCApplicationEntryPoint.class.getName();
+            ncs[n] = new NodeControllerService(ncConfig1);
+            ncs[n].start();
+            ++n;
+        }
+
+        hcc = new HyracksConnection(cc.getConfig().clientNetIpAddress, cc.getConfig().clientNetPort);
+    }
+
+    public static String[] getNcNames() {
+        String[] names = new String[NODES];
+        for (int n = 0; n < NODES; ++n) {
+            names[n] = "nc" + (n + 1);
+        }
+        return names;
+    }
+
+    public static String[] getDataDirs() {
+        String[] names = new String[NODES];
+        for (int n = 0; n < NODES; ++n) {
+            names[n] = "nc" + (n + 1) + "data";
+        }
+        return names;
+    }
+
+    public static IHyracksClientConnection getHyracksClientConnection() {
+        return hcc;
+    }
+
+    public static void deinit() throws Exception {
+        for (int n = 0; n < ncs.length; ++n) {
+            if (ncs[n] != null)
+                ncs[n].stop();
+
+        }
+        if (cc != null)
+            cc.stop();
+    }
+
+    public static void runJob(JobSpecification spec) throws Exception {
+        GlobalConfig.ASTERIX_LOGGER.info(spec.toJSON().toString());
+        JobId jobId = hcc.startJob(spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
+        GlobalConfig.ASTERIX_LOGGER.info(jobId.toString());
+        hcc.waitForCompletion(jobId);
+    }
+
+    /**
+     * main method to run a simple 2 node cluster in-process
+     * suggested VM arguments: <code>-enableassertions -Xmx2048m -Dfile.encoding=UTF-8</code>
+     *
+     * @param args
+     *            unused
+     */
+    public static void main(String[] args) {
+        Runtime.getRuntime().addShutdownHook(new Thread() {
+            public void run() {
+                try {
+                    deinit();
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+            }
+        });
+        try {
+            System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, "asterix-build-configuration.xml");
+
+            init();
+            while (true) {
+                Thread.sleep(10000);
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+
+}


[45/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/LoadRecordFieldsRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/LoadRecordFieldsRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/LoadRecordFieldsRule.java
deleted file mode 100644
index 54e470c..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/LoadRecordFieldsRule.java
+++ /dev/null
@@ -1,368 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.algebra.base.AsterixOperatorAnnotations;
-import edu.uci.ics.asterix.common.exceptions.AsterixRuntimeException;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.optimizer.base.AnalysisUtil;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.FunctionalDependency;
-import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
-import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class LoadRecordFieldsRule implements IAlgebraicRewriteRule {
-
-    private ExtractFieldLoadExpressionVisitor exprVisitor = new ExtractFieldLoadExpressionVisitor();
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
-        if (context.checkIfInDontApplySet(this, op1)) {
-            return false;
-        }
-
-        if (op1.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
-            AssignOperator a1 = (AssignOperator) op1;
-            ILogicalExpression expr = getFirstExpr(a1);
-            if (AnalysisUtil.isAccessToFieldRecord(expr)) {
-                boolean res = findAndEliminateRedundantFieldAccess(a1);
-                context.addToDontApplySet(this, op1);
-                return res;
-            }
-        }
-        exprVisitor.setTopOp(op1);
-        exprVisitor.setContext(context);
-        boolean res = op1.acceptExpressionTransform(exprVisitor);
-        if (!res) {
-            context.addToDontApplySet(this, op1);
-        }
-        if (res && op1.getOperatorTag() == LogicalOperatorTag.SELECT) {
-            // checking if we can annotate a Selection as using just one field
-            // access
-            SelectOperator sigma = (SelectOperator) op1;
-            LinkedList<LogicalVariable> vars = new LinkedList<LogicalVariable>();
-            VariableUtilities.getUsedVariables(sigma, vars);
-            if (vars.size() == 1) {
-                // we can annotate Selection
-                AssignOperator assign1 = (AssignOperator) op1.getInputs().get(0).getValue();
-                AbstractLogicalExpression expr1 = (AbstractLogicalExpression) getFirstExpr(assign1);
-                if (expr1.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                    AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expr1;
-                    // f should be a call to a field/data access kind of
-                    // function
-                    sigma.getAnnotations().put(AsterixOperatorAnnotations.FIELD_ACCESS, f.getArguments().get(0));
-                }
-            }
-        }
-
-        // TODO: avoid having to recompute type env. here
-        if (res) {
-            OperatorPropertiesUtil.typeOpRec(opRef, context);
-        }
-        return res;
-    }
-
-    private static boolean pushFieldLoads(Mutable<ILogicalExpression> exprRef, AbstractLogicalOperator topOp,
-            IOptimizationContext context) throws AlgebricksException {
-        ILogicalExpression expr = exprRef.getValue();
-        if (expr == null) {
-            return false;
-        }
-        switch (expr.getExpressionTag()) {
-            case FUNCTION_CALL: {
-                AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expr;
-                FunctionIdentifier fi = f.getFunctionIdentifier();
-                if (AlgebricksBuiltinFunctions.isComparisonFunction(fi)) {
-                    boolean b1 = pushFieldLoads(f.getArguments().get(0), topOp, context);
-                    boolean b2 = pushFieldLoads(f.getArguments().get(1), topOp, context);
-                    return b1 || b2;
-                }
-                if (fi.equals(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME)) {
-                    if (AnalysisUtil.numberOfVarsInExpr(f) == 0) {
-                        return false;
-                    }
-                    // create an assign
-                    LogicalVariable v = context.newVar();
-                    AssignOperator a2 = new AssignOperator(v, new MutableObject<ILogicalExpression>(f));
-                    pushFieldAssign(a2, topOp, context);
-                    context.computeAndSetTypeEnvironmentForOperator(a2);
-                    ILogicalExpression arg = f.getArguments().get(0).getValue();
-                    if (arg.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-                        VariableReferenceExpression ref = (VariableReferenceExpression) arg;
-                        LogicalVariable var = ref.getVariableReference();
-                        List<LogicalVariable> keys = context.findPrimaryKey(var);
-                        if (keys != null) {
-                            List<LogicalVariable> tail = new ArrayList<LogicalVariable>();
-                            tail.add(v);
-                            FunctionalDependency pk = new FunctionalDependency(keys, tail);
-                            context.addPrimaryKey(pk);
-                        }
-                    }
-                    exprRef.setValue(new VariableReferenceExpression(v));
-                    return true;
-                } else {
-                    boolean pushed = false;
-                    for (Mutable<ILogicalExpression> argRef : f.getArguments()) {
-                        if (pushFieldLoads(argRef, topOp, context)) {
-                            pushed = true;
-                        }
-                    }
-                    return pushed;
-                }
-            }
-            case CONSTANT:
-            case VARIABLE: {
-                return false;
-            }
-            default: {
-                assert false;
-                throw new IllegalArgumentException();
-            }
-        }
-    }
-
-    private static void pushFieldAssign(AssignOperator a2, AbstractLogicalOperator topOp, IOptimizationContext context)
-            throws AlgebricksException {
-        if (topOp.getInputs().size() == 1 && !topOp.hasNestedPlans()) {
-            Mutable<ILogicalOperator> topChild = topOp.getInputs().get(0);
-            // plugAccessAboveOp(a2, topChild, context);
-            List<Mutable<ILogicalOperator>> a2InptList = a2.getInputs();
-            a2InptList.clear();
-            a2InptList.add(topChild);
-            // and link it as child in the op. tree
-            topOp.getInputs().set(0, new MutableObject<ILogicalOperator>(a2));
-            findAndEliminateRedundantFieldAccess(a2);
-        } else { // e.g., a join
-            LinkedList<LogicalVariable> usedInAccess = new LinkedList<LogicalVariable>();
-            VariableUtilities.getUsedVariables(a2, usedInAccess);
-
-            LinkedList<LogicalVariable> produced2 = new LinkedList<LogicalVariable>();
-            VariableUtilities.getProducedVariables(topOp, produced2);
-
-            if (OperatorPropertiesUtil.disjoint(produced2, usedInAccess)) {
-                for (Mutable<ILogicalOperator> inp : topOp.getInputs()) {
-                    HashSet<LogicalVariable> v2 = new HashSet<LogicalVariable>();
-                    VariableUtilities.getLiveVariables(inp.getValue(), v2);
-                    if (!OperatorPropertiesUtil.disjoint(usedInAccess, v2)) {
-                        pushAccessAboveOpRef(a2, inp, context);
-                        return;
-                    }
-                }
-                if (topOp.hasNestedPlans()) {
-                    AbstractOperatorWithNestedPlans nestedOp = (AbstractOperatorWithNestedPlans) topOp;
-                    for (ILogicalPlan plan : nestedOp.getNestedPlans()) {
-                        for (Mutable<ILogicalOperator> root : plan.getRoots()) {
-                            HashSet<LogicalVariable> v2 = new HashSet<LogicalVariable>();
-                            VariableUtilities.getLiveVariables(root.getValue(), v2);
-                            if (!OperatorPropertiesUtil.disjoint(usedInAccess, v2)) {
-                                pushAccessAboveOpRef(a2, root, context);
-                                return;
-                            }
-                        }
-                    }
-                }
-                throw new AsterixRuntimeException("Field access " + getFirstExpr(a2)
-                        + " does not correspond to any input of operator " + topOp);
-            }
-        }
-    }
-
-    /**
-     * Pushes one field-access assignment above toPushThroughChildRef
-     * 
-     * @param toPush
-     * @param toPushThroughChildRef
-     */
-    private static void pushAccessAboveOpRef(AssignOperator toPush, Mutable<ILogicalOperator> toPushThroughChildRef,
-            IOptimizationContext context) throws AlgebricksException {
-        List<Mutable<ILogicalOperator>> tpInpList = toPush.getInputs();
-        tpInpList.clear();
-        tpInpList.add(new MutableObject<ILogicalOperator>(toPushThroughChildRef.getValue()));
-        toPushThroughChildRef.setValue(toPush);
-        findAndEliminateRedundantFieldAccess(toPush);
-    }
-
-    /**
-     * Rewrite
-     * assign $x := field-access($y, "field")
-     * assign $y := record-constructor { "field": Expr, ... }
-     * into
-     * assign $x := Expr
-     * assign $y := record-constructor { "field": Expr, ... }
-     * 
-     * @param toPush
-     */
-    private static boolean findAndEliminateRedundantFieldAccess(AssignOperator assign) throws AlgebricksException {
-        ILogicalExpression expr = getFirstExpr(assign);
-        AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expr;
-        ILogicalExpression arg0 = f.getArguments().get(0).getValue();
-        if (arg0.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
-            return false;
-        }
-        VariableReferenceExpression vre = (VariableReferenceExpression) arg0;
-        LogicalVariable recordVar = vre.getVariableReference();
-        ILogicalExpression arg1 = f.getArguments().get(1).getValue();
-        if (arg1.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
-            return false;
-        }
-        ConstantExpression ce = (ConstantExpression) arg1;
-        if (f.getFunctionIdentifier().equals(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME)) {
-            String fldName = ((AString) ((AsterixConstantValue) ce.getValue()).getObject()).getStringValue();
-            ILogicalExpression fldExpr = findFieldExpression(assign, recordVar, fldName);
-
-            if (fldExpr != null) {
-                // check the liveness of the new expression
-                List<LogicalVariable> usedVariables = new ArrayList<LogicalVariable>();
-                fldExpr.getUsedVariables(usedVariables);
-                List<LogicalVariable> liveInputVars = new ArrayList<LogicalVariable>();
-                VariableUtilities.getLiveVariables(assign, liveInputVars);
-                usedVariables.removeAll(liveInputVars);
-                if (usedVariables.size() == 0) {
-                    assign.getExpressions().get(0).setValue(fldExpr);
-                    return true;
-                } else {
-                    return false;
-                }
-            } else {
-                return false;
-            }
-        } else if (f.getFunctionIdentifier().equals(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX)) {
-            // int fldIdx = ((IntegerLiteral) ce.getValue()).getValue();
-            // TODO
-            return false;
-        } else if (f.getFunctionIdentifier().equals(AsterixBuiltinFunctions.FIELD_ACCESS_NESTED)) {
-            return false;
-        } else {
-            throw new IllegalStateException();
-        }
-    }
-
-    private static ILogicalExpression findFieldExpression(AbstractLogicalOperator op, LogicalVariable recordVar,
-            String fldName) {
-        for (Mutable<ILogicalOperator> child : op.getInputs()) {
-            AbstractLogicalOperator opChild = (AbstractLogicalOperator) child.getValue();
-            if (opChild.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
-                AssignOperator op2 = (AssignOperator) opChild;
-                int i = 0;
-                for (LogicalVariable var : op2.getVariables()) {
-                    if (var == recordVar) {
-                        AbstractLogicalExpression constr = (AbstractLogicalExpression) op2.getExpressions().get(i)
-                                .getValue();
-                        if (constr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-                            return null;
-                        }
-                        AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) constr;
-                        if (!fce.getFunctionIdentifier().equals(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR)
-                                && !fce.getFunctionIdentifier().equals(
-                                        AsterixBuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR)) {
-                            return null;
-                        }
-                        Iterator<Mutable<ILogicalExpression>> fldIter = fce.getArguments().iterator();
-                        while (fldIter.hasNext()) {
-                            ILogicalExpression fldExpr = fldIter.next().getValue();
-                            if (fldExpr.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
-                                ConstantExpression ce = (ConstantExpression) fldExpr;
-                                String f2 = ((AString) ((AsterixConstantValue) ce.getValue()).getObject())
-                                        .getStringValue();
-                                if (fldName.equals(f2)) {
-                                    return fldIter.next().getValue();
-                                }
-                            }
-                            fldIter.next();
-                        }
-                        return null;
-                    }
-                    i++;
-                }
-            } else if (opChild.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE) {
-                NestedTupleSourceOperator nts = (NestedTupleSourceOperator) opChild;
-                AbstractLogicalOperator opBelowNestedPlan = (AbstractLogicalOperator) nts.getDataSourceReference()
-                        .getValue().getInputs().get(0).getValue();
-                ILogicalExpression expr1 = findFieldExpression(opBelowNestedPlan, recordVar, fldName);
-                if (expr1 != null) {
-                    return expr1;
-                }
-            }
-            ILogicalExpression expr2 = findFieldExpression(opChild, recordVar, fldName);
-            if (expr2 != null) {
-                return expr2;
-            }
-        }
-        return null;
-    }
-
-    private final class ExtractFieldLoadExpressionVisitor implements ILogicalExpressionReferenceTransform {
-
-        private AbstractLogicalOperator topOp;
-        private IOptimizationContext context;
-
-        public void setTopOp(AbstractLogicalOperator topOp) {
-            this.topOp = topOp;
-        }
-
-        public void setContext(IOptimizationContext context) {
-            this.context = context;
-        }
-
-        @Override
-        public boolean transform(Mutable<ILogicalExpression> exprRef) throws AlgebricksException {
-            return pushFieldLoads(exprRef, topOp, context);
-        }
-
-    }
-
-    private static ILogicalExpression getFirstExpr(AssignOperator assign) {
-        return assign.getExpressions().get(0).getValue();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/NestGroupByRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/NestGroupByRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/NestGroupByRule.java
deleted file mode 100644
index 3ae35bc..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/NestGroupByRule.java
+++ /dev/null
@@ -1,191 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.HashSet;
-import java.util.Set;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class NestGroupByRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
-        if (op1.getOperatorTag() != LogicalOperatorTag.SUBPLAN) {
-            return false;
-        }
-        SubplanOperator subplan = (SubplanOperator) op1;
-        if (subplan.getNestedPlans().size() != 1) {
-            return false;
-        }
-        ILogicalPlan p = subplan.getNestedPlans().get(0);
-        if (p.getRoots().size() != 1) {
-            return false;
-        }
-
-        Set<LogicalVariable> free = new HashSet<LogicalVariable>();
-        OperatorPropertiesUtil.getFreeVariablesInSubplans(subplan, free);
-        if (free.size() != 1) {
-            return false;
-        }
-        LogicalVariable fVar = null;
-        for (LogicalVariable v : free) {
-            fVar = v;
-            break;
-        }
-
-        AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
-        if (op2.getOperatorTag() != LogicalOperatorTag.GROUP) {
-            return false;
-        }
-        GroupByOperator gby = (GroupByOperator) op2;
-        if (gby.getNestedPlans().size() != 1) {
-            return false;
-        }
-        ILogicalPlan p2 = gby.getNestedPlans().get(0);
-        if (p2.getRoots().size() != 1) {
-            return false;
-        }
-        Mutable<ILogicalOperator> r2 = p2.getRoots().get(0);
-        AbstractLogicalOperator opr2 = (AbstractLogicalOperator) r2.getValue();
-        if (opr2.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
-            return false;
-        }
-        AggregateOperator aggOuter = (AggregateOperator) opr2;
-        int posInAggList = aggOuter.getVariables().indexOf(fVar);
-        if (posInAggList < 0) {
-            return false;
-        }
-        AbstractLogicalOperator outerAggSon = (AbstractLogicalOperator) aggOuter.getInputs().get(0).getValue();
-        if (outerAggSon.getOperatorTag() != LogicalOperatorTag.NESTEDTUPLESOURCE) {
-            return false;
-        }
-        ILogicalExpression eAgg = aggOuter.getExpressions().get(posInAggList).getValue();
-        if (eAgg.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            return false;
-        }
-        AbstractFunctionCallExpression listifyCall = (AbstractFunctionCallExpression) eAgg;
-        if (listifyCall.getFunctionIdentifier() != AsterixBuiltinFunctions.LISTIFY) {
-            return false;
-        }
-        ILogicalExpression argListify = listifyCall.getArguments().get(0).getValue();
-        if (argListify.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
-            return false;
-        }
-
-        Mutable<ILogicalOperator> r = p.getRoots().get(0);
-        AbstractLogicalOperator opInS = (AbstractLogicalOperator) r.getValue();
-        if (opInS.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
-            return false;
-        }
-        AggregateOperator aggInner = (AggregateOperator) opInS;
-        do {
-            opInS = (AbstractLogicalOperator) opInS.getInputs().get(0).getValue();
-        } while (opInS.getOperatorTag() == LogicalOperatorTag.ASSIGN);
-        if (opInS.getOperatorTag() != LogicalOperatorTag.GROUP) {
-            return false;
-        }
-        AbstractLogicalOperator unnestParent = opInS;
-        AbstractLogicalOperator opUnder = (AbstractLogicalOperator) opInS.getInputs().get(0).getValue();
-        // skip Assigns
-        while (opUnder.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
-            unnestParent = opUnder;
-            opUnder = (AbstractLogicalOperator) opUnder.getInputs().get(0).getValue();
-        }
-        if (opUnder.getOperatorTag() != LogicalOperatorTag.UNNEST) {
-            return false;
-        }
-        UnnestOperator unnest = (UnnestOperator) opUnder;
-        AbstractLogicalOperator unnestSon = (AbstractLogicalOperator) unnest.getInputs().get(0).getValue();
-        if (unnestSon.getOperatorTag() != LogicalOperatorTag.NESTEDTUPLESOURCE) {
-            return false;
-        }
-        NestedTupleSourceOperator innerNts = (NestedTupleSourceOperator) unnestSon;
-
-        ILogicalExpression eUnnest = unnest.getExpressionRef().getValue();
-        if (eUnnest.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            return false;
-        }
-        AbstractFunctionCallExpression uf = (AbstractFunctionCallExpression) eUnnest;
-        if (uf.getFunctionIdentifier() != AsterixBuiltinFunctions.SCAN_COLLECTION) {
-            return false;
-        }
-        ILogicalExpression scanArg = uf.getArguments().get(0).getValue();
-        if (scanArg.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
-            return false;
-        }
-        if (((VariableReferenceExpression) scanArg).getVariableReference() != fVar) {
-            return false;
-        }
-        LogicalVariable uVar = unnest.getVariable();
-        GroupByOperator innerGby = (GroupByOperator) opInS;
-        Set<LogicalVariable> freeInInnerGby = new HashSet<LogicalVariable>();
-        OperatorPropertiesUtil.getFreeVariablesInSubplans(innerGby, freeInInnerGby);
-        for (LogicalVariable v : freeInInnerGby) {
-            if (v != uVar) {
-                return false;
-            }
-        }
-
-        unnestParent.getInputs().get(0).setValue(innerNts);
-        LogicalVariable listifiedVar = ((VariableReferenceExpression) argListify).getVariableReference();
-        substInSubplan(aggInner, uVar, listifiedVar, context);
-        gby.getNestedPlans().add(p);
-        innerNts.getDataSourceReference().setValue(gby);
-        opRef.setValue(gby);
-        OperatorPropertiesUtil.typePlan(p, context);
-        OperatorPropertiesUtil.typePlan(p2, context);
-        context.computeAndSetTypeEnvironmentForOperator(gby);
-        return true;
-
-    }
-
-    private void substInSubplan(AggregateOperator aggInner, LogicalVariable v1, LogicalVariable v2,
-            IOptimizationContext context) throws AlgebricksException {
-        ILogicalOperator op = aggInner;
-        while (op.getInputs().size() == 1) {
-            VariableUtilities.substituteVariables(op, v1, v2, context);
-            op = op.getInputs().get(0).getValue();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PullPositionalVariableFromUnnestRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PullPositionalVariableFromUnnestRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PullPositionalVariableFromUnnestRule.java
deleted file mode 100644
index 482a82b..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PullPositionalVariableFromUnnestRule.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.StatefulFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.RunningAggregatePOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.UnpartitionedPropertyComputer;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class PullPositionalVariableFromUnnestRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (op.getOperatorTag() != LogicalOperatorTag.UNNEST) {
-            return false;
-        }
-        UnnestOperator unnest = (UnnestOperator) op;
-        LogicalVariable p = unnest.getPositionalVariable();
-        if (p == null) {
-            return false;
-        }
-        ArrayList<LogicalVariable> rOpVars = new ArrayList<LogicalVariable>();
-        rOpVars.add(p);
-        ArrayList<Mutable<ILogicalExpression>> rOpExprList = new ArrayList<Mutable<ILogicalExpression>>();
-        StatefulFunctionCallExpression fce = new StatefulFunctionCallExpression(
-                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.TID), UnpartitionedPropertyComputer.INSTANCE);
-        rOpExprList.add(new MutableObject<ILogicalExpression>(fce));
-        RunningAggregateOperator rOp = new RunningAggregateOperator(rOpVars, rOpExprList);
-        rOp.setExecutionMode(unnest.getExecutionMode());
-        RunningAggregatePOperator rPop = new RunningAggregatePOperator();
-        rOp.setPhysicalOperator(rPop);
-        rOp.getInputs().add(new MutableObject<ILogicalOperator>(unnest));
-        opRef.setValue(rOp);
-        unnest.setPositionalVariable(null);
-        context.computeAndSetTypeEnvironmentForOperator(rOp);
-        context.computeAndSetTypeEnvironmentForOperator(unnest);
-        return true;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushAggFuncIntoStandaloneAggregateRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushAggFuncIntoStandaloneAggregateRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushAggFuncIntoStandaloneAggregateRule.java
deleted file mode 100644
index a29ebb7..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushAggFuncIntoStandaloneAggregateRule.java
+++ /dev/null
@@ -1,265 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.LinkedList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-/**
- * Pushes aggregate functions into a stand alone aggregate operator (no group by).
- */
-public class PushAggFuncIntoStandaloneAggregateRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        // Pattern to match: assign <-- aggregate <-- !(group-by)
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (op.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
-            return false;
-        }
-        AssignOperator assignOp = (AssignOperator) op;
-
-        Mutable<ILogicalOperator> opRef2 = op.getInputs().get(0);
-        AbstractLogicalOperator op2 = (AbstractLogicalOperator) opRef2.getValue();
-        if (op2.getOperatorTag() == LogicalOperatorTag.AGGREGATE) {
-            AggregateOperator aggOp = (AggregateOperator) op2;
-            // Make sure the agg expr is a listify.
-            return pushAggregateFunction(aggOp, assignOp, context);
-        } else if (op2.getOperatorTag() == LogicalOperatorTag.INNERJOIN
-                || op2.getOperatorTag() == LogicalOperatorTag.LEFTOUTERJOIN) {
-            AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) op2;
-            // Tries to push aggregates through the join.
-            if (containsAggregate(assignOp.getExpressions()) && pushableThroughJoin(join)) {
-                pushAggregateFunctionThroughJoin(join, assignOp, context);
-                return true;
-            }
-        }
-        return false;
-    }
-
-    /**
-     * Recursively check whether the list of expressions contains an aggregate function.
-     * 
-     * @param exprRefs
-     * @return true if the list contains an aggregate function and false otherwise.
-     */
-    private boolean containsAggregate(List<Mutable<ILogicalExpression>> exprRefs) {
-        for (Mutable<ILogicalExpression> exprRef : exprRefs) {
-            ILogicalExpression expr = exprRef.getValue();
-            if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-                continue;
-            }
-            AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
-            FunctionIdentifier funcIdent = AsterixBuiltinFunctions.getAggregateFunction(funcExpr
-                    .getFunctionIdentifier());
-            if (funcIdent == null) {
-                // Recursively look in func args.
-                if (containsAggregate(funcExpr.getArguments())) {
-                    return true;
-                }
-            } else {
-                // This is an aggregation function.
-                return true;
-            }
-        }
-        return false;
-    }
-
-    /**
-     * Check whether the join is aggregate-pushable, that is,
-     * 1) the join condition is true;
-     * 2) each join branch produces only one tuple.
-     * 
-     * @param join
-     * @return true if pushable
-     */
-    private boolean pushableThroughJoin(AbstractBinaryJoinOperator join) {
-        ILogicalExpression condition = join.getCondition().getValue();
-        if (condition.equals(ConstantExpression.TRUE)) {
-            // Checks if the aggregation functions are pushable through the join
-            boolean pushable = true;
-            for (Mutable<ILogicalOperator> branchRef : join.getInputs()) {
-                AbstractLogicalOperator branch = (AbstractLogicalOperator) branchRef.getValue();
-                if (branch.getOperatorTag() == LogicalOperatorTag.AGGREGATE) {
-                    pushable &= true;
-                } else if (branch.getOperatorTag() == LogicalOperatorTag.INNERJOIN
-                        || branch.getOperatorTag() == LogicalOperatorTag.LEFTOUTERJOIN) {
-                    AbstractBinaryJoinOperator childJoin = (AbstractBinaryJoinOperator) branch;
-                    pushable &= pushableThroughJoin(childJoin);
-                } else {
-                    pushable &= false;
-                }
-            }
-            return pushable;
-        }
-        return false;
-    }
-
-    /**
-     * Does the actual push of aggregates for qualified joins.
-     * 
-     * @param join
-     * @param assignOp
-     *            that contains aggregate function calls.
-     * @param context
-     * @throws AlgebricksException
-     */
-    private void pushAggregateFunctionThroughJoin(AbstractBinaryJoinOperator join, AssignOperator assignOp,
-            IOptimizationContext context) throws AlgebricksException {
-        for (Mutable<ILogicalOperator> branchRef : join.getInputs()) {
-            AbstractLogicalOperator branch = (AbstractLogicalOperator) branchRef.getValue();
-            if (branch.getOperatorTag() == LogicalOperatorTag.AGGREGATE) {
-                AggregateOperator aggOp = (AggregateOperator) branch;
-                pushAggregateFunction(aggOp, assignOp, context);
-            } else if (branch.getOperatorTag() == LogicalOperatorTag.INNERJOIN
-                    || branch.getOperatorTag() == LogicalOperatorTag.LEFTOUTERJOIN) {
-                AbstractBinaryJoinOperator childJoin = (AbstractBinaryJoinOperator) branch;
-                pushAggregateFunctionThroughJoin(childJoin, assignOp, context);
-            }
-        }
-    }
-
-    private boolean pushAggregateFunction(AggregateOperator aggOp, AssignOperator assignOp, IOptimizationContext context)
-            throws AlgebricksException {
-        Mutable<ILogicalOperator> opRef3 = aggOp.getInputs().get(0);
-        AbstractLogicalOperator op3 = (AbstractLogicalOperator) opRef3.getValue();
-        // If there's a group by below the agg, then we want to have the agg pushed into the group by.
-        if (op3.getOperatorTag() == LogicalOperatorTag.GROUP) {
-            return false;
-        }
-        if (aggOp.getVariables().size() != 1) {
-            return false;
-        }
-        ILogicalExpression aggExpr = aggOp.getExpressions().get(0).getValue();
-        if (aggExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            return false;
-        }
-        AbstractFunctionCallExpression origAggFuncExpr = (AbstractFunctionCallExpression) aggExpr;
-        if (origAggFuncExpr.getFunctionIdentifier() != AsterixBuiltinFunctions.LISTIFY) {
-            return false;
-        }
-
-        LogicalVariable aggVar = aggOp.getVariables().get(0);
-        List<LogicalVariable> used = new LinkedList<LogicalVariable>();
-        VariableUtilities.getUsedVariables(assignOp, used);
-        if (!used.contains(aggVar)) {
-            return false;
-        }
-
-        List<Mutable<ILogicalExpression>> srcAssignExprRefs = new LinkedList<Mutable<ILogicalExpression>>();
-        if (fingAggFuncExprRef(assignOp.getExpressions(), aggVar, srcAssignExprRefs) == false) {
-            return false;
-        }
-        if (srcAssignExprRefs.isEmpty()) {
-            return false;
-        }
-
-        AbstractFunctionCallExpression aggOpExpr = (AbstractFunctionCallExpression) aggOp.getExpressions().get(0)
-                .getValue();
-        aggOp.getExpressions().clear();
-        aggOp.getVariables().clear();
-
-        for (Mutable<ILogicalExpression> srcAssignExprRef : srcAssignExprRefs) {
-            AbstractFunctionCallExpression assignFuncExpr = (AbstractFunctionCallExpression) srcAssignExprRef
-                    .getValue();
-            FunctionIdentifier aggFuncIdent = AsterixBuiltinFunctions.getAggregateFunction(assignFuncExpr
-                    .getFunctionIdentifier());
-
-            // Push the agg func into the agg op.                
-
-            List<Mutable<ILogicalExpression>> aggArgs = new ArrayList<Mutable<ILogicalExpression>>();
-            aggArgs.add(aggOpExpr.getArguments().get(0));
-            AggregateFunctionCallExpression aggFuncExpr = AsterixBuiltinFunctions.makeAggregateFunctionExpression(
-                    aggFuncIdent, aggArgs);
-            LogicalVariable newVar = context.newVar();
-            aggOp.getVariables().add(newVar);
-            aggOp.getExpressions().add(new MutableObject<ILogicalExpression>(aggFuncExpr));
-
-            // The assign now just "renames" the variable to make sure the upstream plan still works.
-            srcAssignExprRef.setValue(new VariableReferenceExpression(newVar));
-        }
-
-        context.computeAndSetTypeEnvironmentForOperator(aggOp);
-        context.computeAndSetTypeEnvironmentForOperator(assignOp);
-        return true;
-    }
-
-    private boolean fingAggFuncExprRef(List<Mutable<ILogicalExpression>> exprRefs, LogicalVariable aggVar,
-            List<Mutable<ILogicalExpression>> srcAssignExprRefs) {
-        for (Mutable<ILogicalExpression> exprRef : exprRefs) {
-            ILogicalExpression expr = exprRef.getValue();
-
-            if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-                if (((VariableReferenceExpression) expr).getVariableReference().equals(aggVar)) {
-                    return false;
-                }
-            }
-
-            if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-                continue;
-            }
-            AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
-            FunctionIdentifier funcIdent = AsterixBuiltinFunctions.getAggregateFunction(funcExpr
-                    .getFunctionIdentifier());
-            if (funcIdent == null) {
-                // Recursively look in func args.
-                if (fingAggFuncExprRef(funcExpr.getArguments(), aggVar, srcAssignExprRefs) == false) {
-                    return false;
-                }
-
-            } else {
-                // Check if this is the expr that uses aggVar.
-                Collection<LogicalVariable> usedVars = new HashSet<LogicalVariable>();
-                funcExpr.getUsedVariables(usedVars);
-                if (usedVars.contains(aggVar)) {
-                    srcAssignExprRefs.add(exprRef);
-                }
-            }
-        }
-        return true;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushAggregateIntoGroupbyRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushAggregateIntoGroupbyRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushAggregateIntoGroupbyRule.java
deleted file mode 100644
index 90b2f25..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/PushAggregateIntoGroupbyRule.java
+++ /dev/null
@@ -1,462 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorManipulationUtil;
-import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class PushAggregateIntoGroupbyRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        Map<LogicalVariable, Integer> gbyAggVars = new HashMap<LogicalVariable, Integer>();
-        Map<LogicalVariable, Integer> gbyAggVarToPlanIndex = new HashMap<LogicalVariable, Integer>();
-        Map<LogicalVariable, GroupByOperator> gbyWithAgg = new HashMap<LogicalVariable, GroupByOperator>();
-        Map<ILogicalExpression, ILogicalExpression> aggExprToVarExpr = new HashMap<ILogicalExpression, ILogicalExpression>();
-        // first collect vars. referring to listified sequences
-        boolean changed = collectVarsBottomUp(opRef, context, gbyAggVars, gbyWithAgg, gbyAggVarToPlanIndex,
-                aggExprToVarExpr);
-        if (changed) {
-            removeRedundantListifies(opRef, context, gbyAggVars, gbyWithAgg, gbyAggVarToPlanIndex);
-        }
-        return changed;
-    }
-
-    private void removeRedundantListifies(Mutable<ILogicalOperator> opRef, IOptimizationContext context,
-            Map<LogicalVariable, Integer> gbyAggVars, Map<LogicalVariable, GroupByOperator> gbyWithAgg,
-            Map<LogicalVariable, Integer> gbyAggVarToPlanIndex) throws AlgebricksException {
-        for (LogicalVariable aggVar : gbyAggVars.keySet()) {
-            int occurs = gbyAggVars.get(aggVar);
-            if (occurs == 0) {
-                GroupByOperator gbyOp = gbyWithAgg.get(aggVar);
-                AggregateOperator aggOp = (AggregateOperator) gbyOp.getNestedPlans()
-                        .get(gbyAggVarToPlanIndex.get(aggVar)).getRoots().get(0).getValue();
-                int pos = aggOp.getVariables().indexOf(aggVar);
-                if (pos >= 0) {
-                    aggOp.getVariables().remove(pos);
-                    aggOp.getExpressions().remove(pos);
-                    List<LogicalVariable> producedVarsAtAgg = new ArrayList<LogicalVariable>();
-                    VariableUtilities.getProducedVariablesInDescendantsAndSelf(aggOp, producedVarsAtAgg);
-                    if (producedVarsAtAgg.isEmpty()) {
-                        gbyOp.getNestedPlans().remove(gbyAggVarToPlanIndex.get(aggVar));
-                    }
-                }
-            }
-        }
-    }
-
-    private boolean collectVarsBottomUp(Mutable<ILogicalOperator> opRef, IOptimizationContext context,
-            Map<LogicalVariable, Integer> gbyListifyVarsCount, Map<LogicalVariable, GroupByOperator> gbyWithAgg,
-            Map<LogicalVariable, Integer> gbyAggVarToPlanIndex,
-            Map<ILogicalExpression, ILogicalExpression> aggregateExprToVarExpr) throws AlgebricksException {
-        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
-        context.addToDontApplySet(this, op1);
-        boolean change = false;
-        for (Mutable<ILogicalOperator> child : op1.getInputs()) {
-            if (collectVarsBottomUp(child, context, gbyListifyVarsCount, gbyWithAgg, gbyAggVarToPlanIndex,
-                    aggregateExprToVarExpr)) {
-                change = true;
-            }
-        }
-        // Need to use a list instead of a hash-set, because a var. may appear
-        // several times in the same op.
-        List<LogicalVariable> used = new LinkedList<LogicalVariable>();
-        VariableUtilities.getUsedVariables(op1, used);
-        switch (op1.getOperatorTag()) {
-            case ASSIGN:
-            case SELECT: {
-                boolean found = false;
-                // Do some prefiltering: check if the Assign uses any gby vars.
-                for (LogicalVariable v : used) {
-                    if (gbyListifyVarsCount.get(v) != null) {
-                        found = true;
-                        break;
-                    }
-                }
-                if (found) {
-                    if (op1.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
-                        AssignOperator assign = (AssignOperator) op1;
-                        for (Mutable<ILogicalExpression> exprRef : assign.getExpressions()) {
-                            Pair<Boolean, ILogicalExpression> p = extractAggFunctionsFromExpression(exprRef,
-                                    gbyWithAgg, aggregateExprToVarExpr, context);
-                            if (p.first) {
-                                change = true;
-                                exprRef.setValue(p.second);
-                            }
-                        }
-                    }
-                    if (op1.getOperatorTag() == LogicalOperatorTag.SELECT) {
-                        SelectOperator select = (SelectOperator) op1;
-                        Mutable<ILogicalExpression> exprRef = select.getCondition();
-                        Pair<Boolean, ILogicalExpression> p = extractAggFunctionsFromExpression(exprRef, gbyWithAgg,
-                                aggregateExprToVarExpr, context);
-                        if (p.first) {
-                            change = true;
-                            exprRef.setValue(p.second);
-                        }
-                    }
-                    used.clear();
-                    VariableUtilities.getUsedVariables(op1, used);
-                    // increment the count for the ones which are still used
-                    for (LogicalVariable v : used) {
-                        Integer m = gbyListifyVarsCount.get(v);
-                        if (m != null) {
-                            gbyListifyVarsCount.put(v, m + 1);
-                        }
-                    }
-                }
-                break;
-            }
-            case SUBPLAN: {
-                for (LogicalVariable v : used) {
-                    Integer m = gbyListifyVarsCount.get(v);
-                    if (m != null) {
-                        GroupByOperator gbyOp = gbyWithAgg.get(v);
-                        if (pushSubplanAsAggIntoGby(opRef, gbyOp, v, gbyListifyVarsCount, gbyWithAgg,
-                                gbyAggVarToPlanIndex, context)) {
-                            change = true;
-                        } else {
-                            gbyListifyVarsCount.put(v, m + 1);
-                        }
-                        break;
-                    }
-                }
-                break;
-            }
-            case GROUP: {
-                List<LogicalVariable> vars = collectOneVarPerAggFromGroupOp((GroupByOperator) op1);
-                if (vars != null) {
-                    for (int i = 0; i < vars.size(); i++) {
-                        LogicalVariable v = vars.get(i);
-                        if (v != null) {
-                            gbyListifyVarsCount.put(v, 0);
-                            gbyAggVarToPlanIndex.put(v, i);
-                            gbyWithAgg.put(v, (GroupByOperator) op1);
-                        }
-                    }
-                }
-                break;
-            }
-            default: {
-                for (LogicalVariable v : used) {
-                    Integer m = gbyListifyVarsCount.get(v);
-                    if (m != null) {
-                        gbyListifyVarsCount.put(v, m + 1);
-                    }
-                }
-            }
-        }
-        return change;
-    }
-
-    private List<LogicalVariable> collectOneVarPerAggFromGroupOp(GroupByOperator group) {
-        List<ILogicalPlan> nPlans = group.getNestedPlans();
-        if (nPlans == null || nPlans.size() < 1) {
-            return null;
-        }
-
-        List<LogicalVariable> aggVars = new ArrayList<LogicalVariable>();
-        // test that the group-by computes a "listify" aggregate
-        for (int i = 0; i < nPlans.size(); i++) {
-            AbstractLogicalOperator topOp = (AbstractLogicalOperator) nPlans.get(i).getRoots().get(0).getValue();
-            if (topOp.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
-                continue;
-            }
-            AggregateOperator agg = (AggregateOperator) topOp;
-            if (agg.getVariables().size() != 1) {
-                continue;
-            }
-            ILogicalExpression expr = agg.getExpressions().get(0).getValue();
-            if (((AbstractLogicalExpression) expr).getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-                continue;
-            }
-            AbstractFunctionCallExpression fceAgg = (AbstractFunctionCallExpression) expr;
-            if (fceAgg.getFunctionIdentifier() != AsterixBuiltinFunctions.LISTIFY) {
-                continue;
-            }
-            aggVars.add(agg.getVariables().get(0));
-        }
-        return aggVars;
-    }
-
-    /**
-     * @param expr
-     * @param aggVars
-     * @param gbyWithAgg
-     * @param context
-     * @return a pair whose first member is a boolean which is true iff
-     *         something was changed in the expression tree rooted at expr. The
-     *         second member is the result of transforming expr.
-     * @throws AlgebricksException
-     */
-    private Pair<Boolean, ILogicalExpression> extractAggFunctionsFromExpression(Mutable<ILogicalExpression> exprRef,
-            Map<LogicalVariable, GroupByOperator> gbyWithAgg,
-            Map<ILogicalExpression, ILogicalExpression> aggregateExprToVarExpr, IOptimizationContext context)
-            throws AlgebricksException {
-        ILogicalExpression expr = exprRef.getValue();
-        switch (expr.getExpressionTag()) {
-            case FUNCTION_CALL: {
-                AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expr;
-                FunctionIdentifier fi = AsterixBuiltinFunctions.getAggregateFunction(fce.getFunctionIdentifier());
-                if (fi != null) {
-                    ILogicalExpression a1 = fce.getArguments().get(0).getValue();
-                    if (a1.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-                        LogicalVariable argVar = ((VariableReferenceExpression) a1).getVariableReference();
-                        GroupByOperator gbyOp = gbyWithAgg.get(argVar);
-
-                        if (gbyOp != null) {
-                            if (!aggregateExprToVarExpr.containsKey(expr)) {
-                                LogicalVariable newVar = context.newVar();
-                                AggregateFunctionCallExpression aggFun = AsterixBuiltinFunctions
-                                        .makeAggregateFunctionExpression(fi, fce.getArguments());
-                                rewriteGroupByAggregate(argVar, gbyOp, aggFun, newVar, context);
-                                ILogicalExpression newVarExpr = new VariableReferenceExpression(newVar);
-                                aggregateExprToVarExpr.put(expr, newVarExpr);
-                                return new Pair<Boolean, ILogicalExpression>(Boolean.TRUE, newVarExpr);
-                            } else {
-                                ILogicalExpression varExpr = aggregateExprToVarExpr.get(expr);
-                                return new Pair<Boolean, ILogicalExpression>(Boolean.TRUE, varExpr);
-                            }
-                        }
-                    }
-                }
-
-                boolean change = false;
-                for (Mutable<ILogicalExpression> a : fce.getArguments()) {
-                    Pair<Boolean, ILogicalExpression> aggArg = extractAggFunctionsFromExpression(a, gbyWithAgg,
-                            aggregateExprToVarExpr, context);
-                    if (aggArg.first.booleanValue()) {
-                        a.setValue(aggArg.second);
-                        change = true;
-                    }
-                }
-                return new Pair<Boolean, ILogicalExpression>(change, fce);
-            }
-            case VARIABLE:
-            case CONSTANT: {
-                return new Pair<Boolean, ILogicalExpression>(Boolean.FALSE, expr);
-            }
-            default: {
-                throw new IllegalArgumentException();
-            }
-        }
-    }
-
-    private void rewriteGroupByAggregate(LogicalVariable oldAggVar, GroupByOperator gbyOp,
-            AggregateFunctionCallExpression aggFun, LogicalVariable newAggVar, IOptimizationContext context)
-            throws AlgebricksException {
-        for (int j = 0; j < gbyOp.getNestedPlans().size(); j++) {
-            AggregateOperator aggOp = (AggregateOperator) gbyOp.getNestedPlans().get(j).getRoots().get(0).getValue();
-            int n = aggOp.getVariables().size();
-            for (int i = 0; i < n; i++) {
-                LogicalVariable v = aggOp.getVariables().get(i);
-                if (v.equals(oldAggVar)) {
-                    AbstractFunctionCallExpression oldAggExpr = (AbstractFunctionCallExpression) aggOp.getExpressions()
-                            .get(i).getValue();
-                    AggregateFunctionCallExpression newAggFun = AsterixBuiltinFunctions
-                            .makeAggregateFunctionExpression(aggFun.getFunctionIdentifier(),
-                                    new ArrayList<Mutable<ILogicalExpression>>());
-                    for (Mutable<ILogicalExpression> arg : oldAggExpr.getArguments()) {
-                        ILogicalExpression cloned = ((AbstractLogicalExpression) arg.getValue()).cloneExpression();
-                        newAggFun.getArguments().add(new MutableObject<ILogicalExpression>(cloned));
-                    }
-                    aggOp.getVariables().add(newAggVar);
-                    aggOp.getExpressions().add(new MutableObject<ILogicalExpression>(newAggFun));
-                    context.computeAndSetTypeEnvironmentForOperator(aggOp);
-                    break;
-                }
-            }
-        }
-    }
-
-    private boolean pushSubplanAsAggIntoGby(Mutable<ILogicalOperator> subplanOpRef, GroupByOperator gbyOp,
-            LogicalVariable varFromGroupAgg, Map<LogicalVariable, Integer> gbyAggVars,
-            Map<LogicalVariable, GroupByOperator> gbyWithAgg, Map<LogicalVariable, Integer> gbyAggVarToPlanIndex,
-            IOptimizationContext context) throws AlgebricksException {
-        SubplanOperator subplan = (SubplanOperator) subplanOpRef.getValue();
-        // only free var can be varFromGroupAgg
-        HashSet<LogicalVariable> freeVars = new HashSet<LogicalVariable>();
-        OperatorPropertiesUtil.getFreeVariablesInSubplans(subplan, freeVars);
-        for (LogicalVariable vFree : freeVars) {
-            if (!vFree.equals(varFromGroupAgg)) {
-                return false;
-            }
-        }
-
-        List<ILogicalPlan> plans = subplan.getNestedPlans();
-        if (plans.size() > 1) {
-            return false;
-        }
-        ILogicalPlan p = plans.get(0);
-        if (p.getRoots().size() > 1) {
-            return false;
-        }
-        Mutable<ILogicalOperator> opRef = p.getRoots().get(0);
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (op.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
-            return false;
-        }
-        AggregateOperator aggInSubplanOp = (AggregateOperator) op;
-        LogicalVariable unnestVar = null;
-        boolean pushableNestedSubplan = false;
-        while (op.getInputs().size() == 1) {
-            opRef = op.getInputs().get(0);
-            op = (AbstractLogicalOperator) opRef.getValue();
-            switch (op.getOperatorTag()) {
-                case ASSIGN: {
-                    break;
-                }
-                case UNNEST: {
-                    UnnestOperator unnest = (UnnestOperator) op;
-                    if (unnest.getPositionalVariable() != null) {
-                        // TODO currently subplan with both accumulating and running aggregate is not supported.
-                        return false;
-                    }
-                    ILogicalExpression expr = unnest.getExpressionRef().getValue();
-                    if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-                        return false;
-                    }
-                    AbstractFunctionCallExpression fun = (AbstractFunctionCallExpression) expr;
-                    if (fun.getFunctionIdentifier() != AsterixBuiltinFunctions.SCAN_COLLECTION) {
-                        return false;
-                    }
-                    ILogicalExpression arg0 = fun.getArguments().get(0).getValue();
-                    if (arg0.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
-                        return false;
-                    }
-                    VariableReferenceExpression varExpr = (VariableReferenceExpression) arg0;
-                    if (!varExpr.getVariableReference().equals(varFromGroupAgg)) {
-                        return false;
-                    }
-                    opRef = op.getInputs().get(0);
-                    op = (AbstractLogicalOperator) opRef.getValue();
-                    if (op.getOperatorTag() != LogicalOperatorTag.NESTEDTUPLESOURCE) {
-                        return false;
-                    }
-                    pushableNestedSubplan = true;
-                    unnestVar = unnest.getVariable();
-                    break;
-                }
-                default: {
-                    return false;
-                }
-            }
-        }
-        if (pushableNestedSubplan) {
-            for (int i = 0; i < gbyOp.getNestedPlans().size(); i++) {
-                Mutable<ILogicalOperator> gbyAggRef = gbyOp.getNestedPlans().get(i).getRoots().get(0);
-                AggregateOperator gbyAgg = (AggregateOperator) gbyAggRef.getValue();
-                Mutable<ILogicalOperator> gbyAggChildRef = gbyAgg.getInputs().get(0);
-                OperatorManipulationUtil.substituteVarRec(aggInSubplanOp, unnestVar,
-                        findListifiedVariable(gbyAgg, varFromGroupAgg), true, context);
-                gbyAgg.getVariables().addAll(aggInSubplanOp.getVariables());
-                gbyAgg.getExpressions().addAll(aggInSubplanOp.getExpressions());
-                for (LogicalVariable v : aggInSubplanOp.getVariables()) {
-                    gbyWithAgg.put(v, gbyOp);
-                    gbyAggVars.put(v, 0);
-                    gbyAggVarToPlanIndex.put(v, i);
-                }
-
-                Mutable<ILogicalOperator> opRef1InSubplan = aggInSubplanOp.getInputs().get(0);
-                if (opRef1InSubplan.getValue().getInputs().size() > 0) {
-                    Mutable<ILogicalOperator> opRef2InSubplan = opRef1InSubplan.getValue().getInputs().get(0);
-                    AbstractLogicalOperator op2InSubplan = (AbstractLogicalOperator) opRef2InSubplan.getValue();
-                    if (op2InSubplan.getOperatorTag() != LogicalOperatorTag.NESTEDTUPLESOURCE) {
-                        List<Mutable<ILogicalOperator>> gbyInpList = gbyAgg.getInputs();
-                        gbyInpList.clear();
-                        gbyInpList.add(opRef1InSubplan);
-                        while (true) {
-                            opRef2InSubplan = opRef1InSubplan.getValue().getInputs().get(0);
-                            op2InSubplan = (AbstractLogicalOperator) opRef2InSubplan.getValue();
-                            if (op2InSubplan.getOperatorTag() == LogicalOperatorTag.UNNEST) {
-                                List<Mutable<ILogicalOperator>> opInpList = opRef1InSubplan.getValue().getInputs();
-                                opInpList.clear();
-                                opInpList.add(gbyAggChildRef);
-                                break;
-                            }
-                            opRef1InSubplan = opRef2InSubplan;
-                            if (opRef1InSubplan.getValue().getInputs().size() == 0) {
-                                throw new IllegalStateException("PushAggregateIntoGroupbyRule: could not find UNNEST.");
-                            }
-                        }
-                    }
-                }
-                subplanOpRef.setValue(subplan.getInputs().get(0).getValue());
-                OperatorPropertiesUtil.typeOpRec(gbyAggRef, context);
-            }
-            return true;
-        } else {
-            return false;
-        }
-    }
-
-    private LogicalVariable findListifiedVariable(AggregateOperator gbyAgg, LogicalVariable varFromGroupAgg) {
-        int n = gbyAgg.getVariables().size();
-
-        for (int i = 0; i < n; i++) {
-            if (gbyAgg.getVariables().get(i).equals(varFromGroupAgg)) {
-                AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) gbyAgg.getExpressions().get(i)
-                        .getValue();
-                if (fce.getFunctionIdentifier().equals(AsterixBuiltinFunctions.LISTIFY)) {
-                    ILogicalExpression argExpr = fce.getArguments().get(0).getValue();
-                    if (((AbstractLogicalExpression) argExpr).getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-                        return ((VariableReferenceExpression) argExpr).getVariableReference();
-                    }
-                }
-            }
-        }
-        return null;
-    }
-
-}
\ No newline at end of file


[32/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/InlineUnnestFunctionRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/InlineUnnestFunctionRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/InlineUnnestFunctionRule.java
new file mode 100644
index 0000000..418e114
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/InlineUnnestFunctionRule.java
@@ -0,0 +1,174 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * This rule is to inline unnest functions that are hold by variables.
+ * This rule is to fix issue 201.
+ */
+public class InlineUnnestFunctionRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
+        if (context.checkIfInDontApplySet(this, op1))
+            return false;
+        context.addToDontApplySet(this, op1);
+        if (op1.getOperatorTag() != LogicalOperatorTag.UNNEST)
+            return false;
+        UnnestOperator unnestOperator = (UnnestOperator) op1;
+        AbstractFunctionCallExpression expr = (AbstractFunctionCallExpression) unnestOperator.getExpressionRef()
+                .getValue();
+        //we only inline for the scan-collection function
+        if (expr.getFunctionIdentifier() != AsterixBuiltinFunctions.SCAN_COLLECTION)
+            return false;
+
+        // inline all variables from an unnesting function call
+        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+        List<Mutable<ILogicalExpression>> args = funcExpr.getArguments();
+        for (int i = 0; i < args.size(); i++) {
+            ILogicalExpression argExpr = args.get(i).getValue();
+            if (argExpr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+                VariableReferenceExpression varExpr = (VariableReferenceExpression) argExpr;
+                inlineVariable(varExpr.getVariableReference(), unnestOperator);
+            }
+        }
+        return true;
+    }
+
+    /**
+     * This method is to inline one variable
+     * 
+     * @param usedVar
+     *            A variable that is used by the scan-collection function in the unnest operator
+     * @param unnestOp
+     *            The unnest operator.
+     * @throws AlgebricksException
+     */
+    private void inlineVariable(LogicalVariable usedVar, UnnestOperator unnestOp) throws AlgebricksException {
+        AbstractFunctionCallExpression expr = (AbstractFunctionCallExpression) unnestOp.getExpressionRef().getValue();
+        List<Pair<AbstractFunctionCallExpression, Integer>> parentAndIndexList = new ArrayList<Pair<AbstractFunctionCallExpression, Integer>>();
+        getParentFunctionExpression(usedVar, expr, parentAndIndexList);
+        ILogicalExpression usedVarOrginExpr = findUsedVarOrigin(usedVar, unnestOp, (AbstractLogicalOperator) unnestOp
+                .getInputs().get(0).getValue());
+        if (usedVarOrginExpr != null) {
+            for (Pair<AbstractFunctionCallExpression, Integer> parentAndIndex : parentAndIndexList) {
+                //we only rewrite the top scan-collection function
+                if (parentAndIndex.first.getFunctionIdentifier() == AsterixBuiltinFunctions.SCAN_COLLECTION
+                        && parentAndIndex.first == expr) {
+                    unnestOp.getExpressionRef().setValue(usedVarOrginExpr);
+                }
+            }
+        }
+    }
+
+    private void getParentFunctionExpression(LogicalVariable usedVar, ILogicalExpression expr,
+            List<Pair<AbstractFunctionCallExpression, Integer>> parentAndIndexList) {
+        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+        List<Mutable<ILogicalExpression>> args = funcExpr.getArguments();
+        for (int i = 0; i < args.size(); i++) {
+            ILogicalExpression argExpr = args.get(i).getValue();
+            if (argExpr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+                VariableReferenceExpression varExpr = (VariableReferenceExpression) argExpr;
+                if (varExpr.getVariableReference().equals(usedVar))
+                    parentAndIndexList.add(new Pair<AbstractFunctionCallExpression, Integer>(funcExpr, i));
+            }
+            if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                getParentFunctionExpression(usedVar, argExpr, parentAndIndexList);
+            }
+        }
+    }
+
+    private ILogicalExpression findUsedVarOrigin(LogicalVariable usedVar, AbstractLogicalOperator parentOp,
+            AbstractLogicalOperator currentOp) throws AlgebricksException {
+        ILogicalExpression ret = null;
+        if (currentOp.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+            List<LogicalVariable> producedVars = new ArrayList<LogicalVariable>();
+            VariableUtilities.getProducedVariables(currentOp, producedVars);
+            if (producedVars.contains(usedVar)) {
+                AssignOperator assignOp = (AssignOperator) currentOp;
+                int index = assignOp.getVariables().indexOf(usedVar);
+                ILogicalExpression returnedExpr = assignOp.getExpressions().get(index).getValue();
+                if (returnedExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                    AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) returnedExpr;
+                    if (AsterixBuiltinFunctions.isBuiltinUnnestingFunction(funcExpr.getFunctionIdentifier())) {
+                        // we only inline for unnest functions
+                        removeUnecessaryAssign(parentOp, currentOp, assignOp, index);
+                        ret = returnedExpr;
+                    }
+                } else if (returnedExpr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+                    //recusively inline
+                    VariableReferenceExpression varExpr = (VariableReferenceExpression) returnedExpr;
+                    LogicalVariable var = varExpr.getVariableReference();
+                    ILogicalExpression finalExpr = findUsedVarOrigin(var, currentOp,
+                            (AbstractLogicalOperator) currentOp.getInputs().get(0).getValue());
+                    if (finalExpr != null) {
+                        removeUnecessaryAssign(parentOp, currentOp, assignOp, index);
+                        ret = finalExpr;
+                    }
+                }
+            }
+        } else {
+            for (Mutable<ILogicalOperator> child : currentOp.getInputs()) {
+                ILogicalExpression expr = findUsedVarOrigin(usedVar, currentOp,
+                        (AbstractLogicalOperator) child.getValue());
+                if (expr != null) {
+                    ret = expr;
+                }
+            }
+        }
+        return ret;
+    }
+
+    private void removeUnecessaryAssign(AbstractLogicalOperator parentOp, AbstractLogicalOperator currentOp,
+            AssignOperator assignOp, int index) {
+        assignOp.getVariables().remove(index);
+        assignOp.getExpressions().remove(index);
+        if (assignOp.getVariables().size() == 0) {
+            int opIndex = parentOp.getInputs().indexOf(new MutableObject<ILogicalOperator>(currentOp));
+            parentOp.getInputs().get(opIndex).setValue(assignOp.getInputs().get(0).getValue());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java
new file mode 100644
index 0000000..b64c0e7
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java
@@ -0,0 +1,168 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
+import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
+import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator.Kind;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class IntroduceAutogenerateIDRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+
+        // match: [insert to internal dataset with autogenerated id] - assign - project
+        // produce: insert - assign - assign* - project
+        // **
+        // OR [insert to internal dataset with autogenerated id] - assign - [datasource scan]
+        // produce insert - assign - assign* - datasource scan
+
+        AbstractLogicalOperator currentOp = (AbstractLogicalOperator) opRef.getValue();
+        if (currentOp.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE) {
+            return false;
+        }
+
+        InsertDeleteOperator insertOp = (InsertDeleteOperator) currentOp;
+        if (insertOp.getOperation() != Kind.INSERT) {
+            return false;
+        }
+
+        DatasetDataSource dds = (DatasetDataSource) insertOp.getDataSource();
+        boolean autogenerated = ((InternalDatasetDetails) dds.getDataset().getDatasetDetails()).isAutogenerated();
+        if (!autogenerated) {
+            return false;
+        }
+
+        if (((AqlDataSource) insertOp.getDataSource()).getDatasourceType() != AqlDataSourceType.INTERNAL_DATASET) {
+            return false;
+        }
+
+        AbstractLogicalOperator parentOp = (AbstractLogicalOperator) currentOp.getInputs().get(0).getValue();
+        if (parentOp.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
+            return false;
+        }
+        AssignOperator assignOp = (AssignOperator) parentOp;
+        LogicalVariable inputRecord;
+
+        //bug here. will not work for internal datasets with filters since the pattern becomes [project-assign-assign-insert] <-this should be fixed->
+        AbstractLogicalOperator grandparentOp = (AbstractLogicalOperator) parentOp.getInputs().get(0).getValue();
+        if (grandparentOp.getOperatorTag() == LogicalOperatorTag.PROJECT) {
+            ProjectOperator projectOp = (ProjectOperator) grandparentOp;
+            inputRecord = projectOp.getVariables().get(0);
+        } else if (grandparentOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
+            DataSourceScanOperator dssOp = (DataSourceScanOperator) grandparentOp;
+            inputRecord = dssOp.getVariables().get(0);
+        } else {
+            return false;
+        }
+
+        List<String> pkFieldName = ((InternalDatasetDetails) dds.getDataset().getDatasetDetails()).getPrimaryKey().get(
+                0);
+        ILogicalExpression rec0 = new VariableReferenceExpression(inputRecord);
+        ILogicalExpression rec1 = createPrimaryKeyRecordExpression(pkFieldName);
+        ILogicalExpression mergedRec = createRecordMergeFunction(rec0, rec1);
+        ILogicalExpression nonNullMergedRec = createNotNullFunction(mergedRec);
+
+        LogicalVariable v = context.newVar();
+        AssignOperator newAssign = new AssignOperator(v, new MutableObject<ILogicalExpression>(nonNullMergedRec));
+        newAssign.getInputs().add(new MutableObject<ILogicalOperator>(grandparentOp));
+        assignOp.getInputs().set(0, new MutableObject<ILogicalOperator>(newAssign));
+        VariableUtilities.substituteVariables(assignOp, inputRecord, v, context);
+        VariableUtilities.substituteVariables(insertOp, inputRecord, v, context);
+        context.computeAndSetTypeEnvironmentForOperator(newAssign);
+        context.computeAndSetTypeEnvironmentForOperator(assignOp);
+        context.computeAndSetTypeEnvironmentForOperator(insertOp);
+        return true;
+    }
+
+    private ILogicalExpression createNotNullFunction(ILogicalExpression mergedRec) {
+        List<Mutable<ILogicalExpression>> args = new ArrayList<>();
+        args.add(new MutableObject<ILogicalExpression>(mergedRec));
+        AbstractFunctionCallExpression notNullFn = new ScalarFunctionCallExpression(
+                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.NOT_NULL), args);
+        return notNullFn;
+    }
+
+    private AbstractFunctionCallExpression createPrimaryKeyRecordExpression(List<String> pkFieldName) {
+        //Create lowest level of nested uuid
+        AbstractFunctionCallExpression uuidFn = new ScalarFunctionCallExpression(
+                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.CREATE_UUID));
+        List<Mutable<ILogicalExpression>> openRecordConsArgs = new ArrayList<>();
+        Mutable<ILogicalExpression> pkFieldNameExpression = new MutableObject<ILogicalExpression>(
+                new ConstantExpression(new AsterixConstantValue(new AString(pkFieldName.get(pkFieldName.size() - 1)))));
+        openRecordConsArgs.add(pkFieldNameExpression);
+        Mutable<ILogicalExpression> pkFieldValueExpression = new MutableObject<ILogicalExpression>(uuidFn);
+        openRecordConsArgs.add(pkFieldValueExpression);
+        AbstractFunctionCallExpression openRecFn = new ScalarFunctionCallExpression(
+                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR), openRecordConsArgs);
+
+        //Create higher levels
+        for (int i = pkFieldName.size() - 2; i > -1; i--) {
+            AString fieldName = new AString(pkFieldName.get(i));
+            openRecordConsArgs = new ArrayList<>();
+            openRecordConsArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(
+                    new AsterixConstantValue(fieldName))));
+            openRecordConsArgs.add(new MutableObject<ILogicalExpression>(openRecFn));
+            openRecFn = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR), openRecordConsArgs);
+        }
+
+        return openRecFn;
+    }
+
+    private AbstractFunctionCallExpression createRecordMergeFunction(ILogicalExpression rec0, ILogicalExpression rec1) {
+        List<Mutable<ILogicalExpression>> recordMergeFnArgs = new ArrayList<>();
+        recordMergeFnArgs.add(new MutableObject<>(rec0));
+        recordMergeFnArgs.add(new MutableObject<>(rec1));
+        AbstractFunctionCallExpression recordMergeFn = new ScalarFunctionCallExpression(
+                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.RECORD_MERGE), recordMergeFnArgs);
+        return recordMergeFn;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastForExternalFunctionRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastForExternalFunctionRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastForExternalFunctionRule.java
new file mode 100644
index 0000000..545a336
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastForExternalFunctionRule.java
@@ -0,0 +1,141 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.metadata.functions.AsterixExternalScalarFunctionInfo;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * This rule provides the same type-casting handling as the IntroduceDynamicTypeCastRule does.
+ * The only difference is that this rule is intended for external functions (User-Defined Functions).
+ * Refer to IntroduceDynamicTypeCastRule for the detail.
+ */
+public class IntroduceDynamicTypeCastForExternalFunctionRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        /**
+         * pattern match: distribute_result - project - assign (external function call) - assign (open_record_constructor)
+         * resulting plan: distribute_result - project - assign (external function call) - assign (cast-record) - assign(open_record_constructor)
+         */
+        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
+        if (op1.getOperatorTag() != LogicalOperatorTag.DISTRIBUTE_RESULT)
+            return false;
+        AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
+        if (op2.getOperatorTag() != LogicalOperatorTag.PROJECT)
+            return false;
+        AbstractLogicalOperator op3 = (AbstractLogicalOperator) op2.getInputs().get(0).getValue();
+        if (op3.getOperatorTag() != LogicalOperatorTag.ASSIGN)
+            return false;
+        AbstractLogicalOperator op4 = (AbstractLogicalOperator) op3.getInputs().get(0).getValue();
+        if (op4.getOperatorTag() != LogicalOperatorTag.ASSIGN)
+            return false;
+
+        // Op1 : assign (external function call), Op2 : assign (open_record_constructor)
+        AssignOperator assignOp1 = (AssignOperator) op3;
+        AssignOperator assignOp2 = (AssignOperator) op4;
+
+        // Checks whether open-record-constructor is called to create a record in the first assign operator - assignOp2
+        FunctionIdentifier fid = null;
+        ILogicalExpression assignExpr = assignOp2.getExpressions().get(0).getValue();
+        if (assignExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+            ScalarFunctionCallExpression funcExpr = (ScalarFunctionCallExpression) assignOp2.getExpressions().get(0)
+                    .getValue();
+            fid = funcExpr.getFunctionIdentifier();
+
+            if (fid != AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR) {
+                return false;
+            }
+        } else {
+            return false;
+        }
+
+        // Checks whether an external function is called in the second assign operator - assignOp1
+        assignExpr = assignOp1.getExpressions().get(0).getValue();
+        ScalarFunctionCallExpression funcExpr = null;
+        if (assignExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+            funcExpr = (ScalarFunctionCallExpression) assignOp1.getExpressions().get(0).getValue();
+            fid = funcExpr.getFunctionIdentifier();
+
+            // Checks whether this is an internal function call. Then, we return false.
+            if (AsterixBuiltinFunctions.getBuiltinFunctionIdentifier(fid) != null) {
+                return false;
+            }
+
+        } else {
+            return false;
+        }
+
+        AsterixExternalScalarFunctionInfo finfo = (AsterixExternalScalarFunctionInfo) funcExpr.getFunctionInfo();
+        ARecordType requiredRecordType = (ARecordType) finfo.getArgumenTypes().get(0);
+
+        List<LogicalVariable> recordVar = new ArrayList<LogicalVariable>();
+        recordVar.addAll(assignOp2.getVariables());
+
+        IVariableTypeEnvironment env = assignOp2.computeOutputTypeEnvironment(context);
+        IAType inputRecordType = (IAType) env.getVarType(recordVar.get(0));
+
+        /** the input record type can be an union type -- for the case when it comes from a subplan or left-outer join */
+        boolean checkNull = false;
+        while (NonTaggedFormatUtil.isOptional(inputRecordType)) {
+            /** while-loop for the case there is a nested multi-level union */
+            inputRecordType = ((AUnionType) inputRecordType).getNullableType();
+            checkNull = true;
+        }
+
+        /** see whether the input record type needs to be casted */
+        boolean cast = !IntroduceDynamicTypeCastRule.compatible(requiredRecordType, inputRecordType);
+
+        if (checkNull) {
+            recordVar.set(0, IntroduceDynamicTypeCastRule.addWrapperFunction(requiredRecordType, recordVar.get(0),
+                    assignOp1, context, AsterixBuiltinFunctions.NOT_NULL));
+        }
+        if (cast) {
+            IntroduceDynamicTypeCastRule.addWrapperFunction(requiredRecordType, recordVar.get(0), assignOp1, context,
+                    AsterixBuiltinFunctions.CAST_RECORD);
+        }
+        return cast || checkNull;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
new file mode 100644
index 0000000..64c34ed
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
@@ -0,0 +1,288 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * Dynamically cast a variable from its type to a specified required type, in a
+ * recursive way. It enables: 1. bag-based fields in a record, 2. bidirectional
+ * cast of a open field and a matched closed field, and 3. put in null fields
+ * when necessary.
+ * Here is an example: A record { "hobby": {{"music", "coding"}}, "id": "001",
+ * "name": "Person Three"} which conforms to closed type ( id: string, name:
+ * string, hobby: {{string}}? ) can be cast to an open type (id: string ), or
+ * vice versa.
+ * However, if the input record is a variable, then we don't know its exact
+ * field layout at compile time. For example, records conforming to the same
+ * type can have different field orderings and different open parts. That's why
+ * we need dynamic type casting.
+ * Note that as we can see in the example, the ordering of fields of a record is
+ * not required. Since the open/closed part of a record has completely different
+ * underlying memory/storage layout, a cast-record function will change the
+ * layout as specified at runtime.
+ * Implementation wise, this rule checks the target dataset type and the input
+ * record type, and if the types are different, then it plugs in an assign with
+ * a cast-record function, and projects away the original (uncast) field.
+ */
+public class IntroduceDynamicTypeCastRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        // Depending on the operator type, we need to extract the following pieces of information.
+        AbstractLogicalOperator op;
+        ARecordType requiredRecordType;
+        LogicalVariable recordVar;
+
+        // We identify INSERT and DISTRIBUTE_RESULT operators.
+        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
+        switch (op1.getOperatorTag()) {
+            case SINK: {
+                /**
+                 * pattern match: sink insert assign
+                 * resulting plan: sink-insert-project-assign
+                 */
+
+                AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
+                if (op2.getOperatorTag() == LogicalOperatorTag.INSERT_DELETE) {
+                    InsertDeleteOperator insertDeleteOp = (InsertDeleteOperator) op2;
+                    if (insertDeleteOp.getOperation() == InsertDeleteOperator.Kind.DELETE)
+                        return false;
+
+                    // Remember this is the operator we need to modify
+                    op = insertDeleteOp;
+
+                    // Derive the required ARecordType based on the schema of the AqlDataSource
+                    InsertDeleteOperator insertDeleteOperator = (InsertDeleteOperator) op2;
+                    AqlDataSource dataSource = (AqlDataSource) insertDeleteOperator.getDataSource();
+                    IAType[] schemaTypes = (IAType[]) dataSource.getSchemaTypes();
+                    requiredRecordType = (ARecordType) schemaTypes[schemaTypes.length - 1];
+
+                    // Derive the Variable which we will potentially wrap with cast/null functions
+                    ILogicalExpression expr = insertDeleteOperator.getPayloadExpression().getValue();
+                    List<LogicalVariable> payloadVars = new ArrayList<LogicalVariable>();
+                    expr.getUsedVariables(payloadVars);
+                    recordVar = payloadVars.get(0);
+                } else {
+                    return false;
+                }
+
+                break;
+            }
+            case DISTRIBUTE_RESULT: {
+                // First, see if there was an output-record-type specified
+                requiredRecordType = (ARecordType) op1.getAnnotations().get("output-record-type");
+                if (requiredRecordType == null) {
+                    return false;
+                }
+
+                // Remember this is the operator we need to modify
+                op = op1;
+
+                // The Variable we want is the (hopefully singular, hopefully record-typed) live variable
+                // of the singular input operator of the DISTRIBUTE_RESULT
+                if (op.getInputs().size() > 1) {
+                    // Hopefully not possible?
+                    throw new AlgebricksException(
+                            "output-record-type defined for expression with multiple input operators");
+                }
+                AbstractLogicalOperator input = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
+                List<LogicalVariable> liveVars = new ArrayList<LogicalVariable>();
+                VariableUtilities.getLiveVariables(input, liveVars);
+                if (liveVars.size() > 1) {
+                    throw new AlgebricksException(
+                            "Expression with multiple fields cannot be cast to output-record-type!");
+                }
+                recordVar = liveVars.get(0);
+
+                break;
+            }
+            default: {
+                return false;
+            }
+        }
+
+        // Derive the statically-computed type of the record
+        IVariableTypeEnvironment env = op.computeOutputTypeEnvironment(context);
+        IAType inputRecordType = (IAType) env.getVarType(recordVar);
+
+        /** the input record type can be an union type -- for the case when it comes from a subplan or left-outer join */
+        boolean checkNull = false;
+        while (NonTaggedFormatUtil.isOptional(inputRecordType)) {
+            /** while-loop for the case there is a nested multi-level union */
+            inputRecordType = ((AUnionType) inputRecordType).getNullableType();
+            checkNull = true;
+        }
+
+        /** see whether the input record type needs to be casted */
+        boolean cast = !compatible(requiredRecordType, inputRecordType);
+
+        if (checkNull) {
+            recordVar = addWrapperFunction(requiredRecordType, recordVar, op, context, AsterixBuiltinFunctions.NOT_NULL);
+        }
+        if (cast) {
+            addWrapperFunction(requiredRecordType, recordVar, op, context, AsterixBuiltinFunctions.CAST_RECORD);
+        }
+        return cast || checkNull;
+    }
+
+    /**
+     * Inject a function to wrap a variable when necessary
+     *
+     * @param requiredRecordType
+     *            the required record type
+     * @param recordVar
+     *            the record variable
+     * @param parent
+     *            the current parent operator to be rewritten
+     * @param context
+     *            the optimization context
+     * @param fd
+     *            the function to be injected
+     * @return true if cast is injected; false otherwise.
+     * @throws AlgebricksException
+     */
+    public static LogicalVariable addWrapperFunction(ARecordType requiredRecordType, LogicalVariable recordVar,
+            ILogicalOperator parent, IOptimizationContext context, FunctionIdentifier fd) throws AlgebricksException {
+        List<Mutable<ILogicalOperator>> opRefs = parent.getInputs();
+        for (int index = 0; index < opRefs.size(); index++) {
+            Mutable<ILogicalOperator> opRef = opRefs.get(index);
+            ILogicalOperator op = opRef.getValue();
+
+            /** get produced vars */
+            List<LogicalVariable> producedVars = new ArrayList<LogicalVariable>();
+            VariableUtilities.getProducedVariables(op, producedVars);
+            IVariableTypeEnvironment env = op.computeOutputTypeEnvironment(context);
+            for (int i = 0; i < producedVars.size(); i++) {
+                LogicalVariable var = producedVars.get(i);
+                if (var.equals(recordVar)) {
+                    /** insert an assign operator to call the function on-top-of the variable */
+                    IAType actualType = (IAType) env.getVarType(var);
+                    AbstractFunctionCallExpression cast = new ScalarFunctionCallExpression(
+                            FunctionUtils.getFunctionInfo(fd));
+                    cast.getArguments()
+                            .add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(var)));
+                    /** enforce the required record type */
+                    TypeComputerUtilities.setRequiredAndInputTypes(cast, requiredRecordType, actualType);
+                    LogicalVariable newAssignVar = context.newVar();
+                    AssignOperator newAssignOperator = new AssignOperator(newAssignVar,
+                            new MutableObject<ILogicalExpression>(cast));
+                    newAssignOperator.getInputs().add(new MutableObject<ILogicalOperator>(op));
+                    opRef.setValue(newAssignOperator);
+                    context.computeAndSetTypeEnvironmentForOperator(newAssignOperator);
+                    newAssignOperator.computeOutputTypeEnvironment(context);
+                    VariableUtilities.substituteVariables(parent, recordVar, newAssignVar, context);
+                    return newAssignVar;
+                }
+            }
+            /** recursive descend to the operator who produced the recordVar */
+            LogicalVariable replacedVar = addWrapperFunction(requiredRecordType, recordVar, op, context, fd);
+            if (replacedVar != null) {
+                /** substitute the recordVar by the replacedVar for operators who uses recordVar */
+                VariableUtilities.substituteVariables(parent, recordVar, replacedVar, context);
+                return replacedVar;
+            }
+        }
+        return null;
+    }
+
+    /**
+     * Check whether the required record type and the input type is compatible
+     *
+     * @param reqType
+     * @param inputType
+     * @return true if compatible; false otherwise
+     * @throws AlgebricksException
+     */
+    public static boolean compatible(ARecordType reqType, IAType inputType) throws AlgebricksException {
+        if (inputType.getTypeTag() == ATypeTag.ANY) {
+            return false;
+        }
+        if (inputType.getTypeTag() != ATypeTag.RECORD) {
+            throw new AlgebricksException("The input type " + inputType + " is not a valid record type!");
+        }
+        ARecordType inputRecType = (ARecordType) inputType;
+        if (reqType.isOpen() != inputRecType.isOpen()) {
+            return false;
+        }
+
+        IAType[] reqTypes = reqType.getFieldTypes();
+        String[] reqFieldNames = reqType.getFieldNames();
+        IAType[] inputTypes = inputRecType.getFieldTypes();
+        String[] inputFieldNames = ((ARecordType) inputType).getFieldNames();
+
+        if (reqTypes.length != inputTypes.length) {
+            return false;
+        }
+        for (int i = 0; i < reqTypes.length; i++) {
+            if (!reqFieldNames[i].equals(inputFieldNames[i])) {
+                return false;
+            }
+            IAType reqTypeInside = reqTypes[i];
+            if (NonTaggedFormatUtil.isOptional(reqTypes[i])) {
+                reqTypeInside = ((AUnionType) reqTypes[i]).getNullableType();
+            }
+            IAType inputTypeInside = inputTypes[i];
+            if (NonTaggedFormatUtil.isOptional(inputTypes[i])) {
+                if (!NonTaggedFormatUtil.isOptional(reqTypes[i])) {
+                    /** if the required type is not optional, the two types are incompatible */
+                    return false;
+                }
+                inputTypeInside = ((AUnionType) inputTypes[i]).getNullableType();
+            }
+            if (inputTypeInside.getTypeTag() != ATypeTag.NULL && !reqTypeInside.equals(inputTypeInside)) {
+                return false;
+            }
+        }
+        return true;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceEnforcedListTypeRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceEnforcedListTypeRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceEnforcedListTypeRule.java
new file mode 100644
index 0000000..543a132
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceEnforcedListTypeRule.java
@@ -0,0 +1,96 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.optimizer.rules.typecast.StaticTypeCastUtil;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractAssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractUnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * This class is to enforce types for function expressions which contain list constructor function calls.
+ * The List constructor is very special because a nested list is of type List<ANY>.
+ * However, the bottom-up type inference (InferTypeRule in algebricks) did not infer that so we need this method to enforce the type.
+ * We do not want to break the generality of algebricks so this method is called in an ASTERIX rule: @ IntroduceEnforcedListTypeRule} .
+ */
+public class IntroduceEnforcedListTypeRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        if (context.checkIfInDontApplySet(this, opRef.getValue()))
+            return false;
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        context.addToDontApplySet(this, opRef.getValue());
+
+        /**
+         * rewrite list constructor types for list constructor functions
+         */
+        List<Mutable<ILogicalExpression>> expressions;
+        switch (op.getOperatorTag()) {
+            case ASSIGN:
+                AbstractAssignOperator assignOp = (AbstractAssignOperator) op;
+                expressions = assignOp.getExpressions();
+                break;
+            case UNNEST:
+                AbstractUnnestOperator unnestOp = (AbstractUnnestOperator) op;
+                expressions = Collections.singletonList(unnestOp.getExpressionRef());
+                break;
+            default:
+                return false;
+        }
+        IVariableTypeEnvironment env = op.computeOutputTypeEnvironment(context);
+        return rewriteExpressions(expressions, env);
+    }
+
+    private boolean rewriteExpressions(List<Mutable<ILogicalExpression>> expressions, IVariableTypeEnvironment env)
+            throws AlgebricksException {
+        boolean changed = false;
+        for (Mutable<ILogicalExpression> exprRef : expressions) {
+            ILogicalExpression expr = exprRef.getValue();
+            if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                AbstractFunctionCallExpression argFuncExpr = (AbstractFunctionCallExpression) expr;
+                IAType exprType = (IAType) env.getType(argFuncExpr);
+                if (StaticTypeCastUtil.rewriteListExpr(argFuncExpr, exprType, exprType, env)) {
+                    TypeComputerUtilities.resetRequiredAndInputTypes(argFuncExpr);
+                    changed = true;
+                }
+            }
+        }
+        return changed;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceInstantLockSearchCallbackRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceInstantLockSearchCallbackRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceInstantLockSearchCallbackRule.java
new file mode 100644
index 0000000..782f7ed
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceInstantLockSearchCallbackRule.java
@@ -0,0 +1,149 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.algebra.operators.CommitOperator;
+import edu.uci.ics.asterix.algebra.operators.physical.BTreeSearchPOperator;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataImplConfig;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.optimizer.rules.am.AccessMethodJobGenParams;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IPhysicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.DataSourceScanPOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class IntroduceInstantLockSearchCallbackRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    private void extractDataSourcesInfo(AbstractLogicalOperator op,
+            Map<String, Triple<Integer, LogicalOperatorTag, IPhysicalOperator>> dataSourcesMap) {
+
+        for (int i = 0; i < op.getInputs().size(); ++i) {
+            AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) op.getInputs().get(i).getValue();
+
+            if (descendantOp.getOperatorTag() == LogicalOperatorTag.UNNEST_MAP) {
+                UnnestMapOperator unnestMapOp = (UnnestMapOperator) descendantOp;
+                ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
+                if (unnestExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                    AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
+                    FunctionIdentifier fid = f.getFunctionIdentifier();
+                    if (!fid.equals(AsterixBuiltinFunctions.INDEX_SEARCH)) {
+                        throw new IllegalStateException();
+                    }
+                    AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
+                    jobGenParams.readFromFuncArgs(f.getArguments());
+                    boolean isPrimaryIndex = jobGenParams.isPrimaryIndex();
+                    String indexName = jobGenParams.getIndexName();
+                    if (isPrimaryIndex) {
+                        if (dataSourcesMap.containsKey(indexName)) {
+                            ++(dataSourcesMap.get(indexName).first);
+                        } else {
+                            dataSourcesMap.put(indexName, new Triple<Integer, LogicalOperatorTag, IPhysicalOperator>(1,
+                                    LogicalOperatorTag.UNNEST_MAP, unnestMapOp.getPhysicalOperator()));
+                        }
+                    }
+                }
+            } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
+                DataSourceScanOperator dataSourceScanOp = (DataSourceScanOperator) descendantOp;
+                String datasourceName = ((AqlDataSource) dataSourceScanOp.getDataSource()).getDatasourceName();
+                if (dataSourcesMap.containsKey(datasourceName)) {
+                    ++(dataSourcesMap.get(datasourceName).first);
+                } else {
+                    dataSourcesMap.put(datasourceName, new Triple<Integer, LogicalOperatorTag, IPhysicalOperator>(1,
+                            LogicalOperatorTag.DATASOURCESCAN, dataSourceScanOp.getPhysicalOperator()));
+                }
+            }
+            extractDataSourcesInfo(descendantOp, dataSourcesMap);
+        }
+
+    }
+
+    private boolean checkIfRuleIsApplicable(AbstractLogicalOperator op) {
+        if (op.getPhysicalOperator() == null) {
+            return false;
+        }
+        if (op.getOperatorTag() == LogicalOperatorTag.EXTENSION_OPERATOR) {
+            ExtensionOperator extensionOp = (ExtensionOperator) op;
+            if (extensionOp.getDelegate() instanceof CommitOperator) {
+                return true;
+            }
+        }
+        if (op.getOperatorTag() == LogicalOperatorTag.DISTRIBUTE_RESULT
+                || op.getOperatorTag() == LogicalOperatorTag.WRITE_RESULT) {
+            return true;
+        }
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+
+        if (!checkIfRuleIsApplicable(op)) {
+            return false;
+        }
+        Map<String, Triple<Integer, LogicalOperatorTag, IPhysicalOperator>> dataSourcesMap = new HashMap<String, Triple<Integer, LogicalOperatorTag, IPhysicalOperator>>();
+        extractDataSourcesInfo(op, dataSourcesMap);
+
+        boolean introducedInstantLock = false;
+
+        Iterator<Map.Entry<String, Triple<Integer, LogicalOperatorTag, IPhysicalOperator>>> it = dataSourcesMap
+                .entrySet().iterator();
+        while (it.hasNext()) {
+            Entry<String, Triple<Integer, LogicalOperatorTag, IPhysicalOperator>> entry = it.next();
+            Triple<Integer, LogicalOperatorTag, IPhysicalOperator> triple = entry.getValue();
+            if (triple.first == 1) {
+                AqlMetadataImplConfig aqlMetadataImplConfig = new AqlMetadataImplConfig(true);
+                if (triple.second == LogicalOperatorTag.UNNEST_MAP) {
+                    BTreeSearchPOperator pOperator = (BTreeSearchPOperator) triple.third;
+                    pOperator.setImplConfig(aqlMetadataImplConfig);
+                    introducedInstantLock = true;
+                } else {
+                    DataSourceScanPOperator pOperator = (DataSourceScanPOperator) triple.third;
+                    pOperator.setImplConfig(aqlMetadataImplConfig);
+                    introducedInstantLock = true;
+                }
+            }
+
+        }
+        return introducedInstantLock;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceMaterializationForInsertWithSelfScanRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceMaterializationForInsertWithSelfScanRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceMaterializationForInsertWithSelfScanRule.java
new file mode 100644
index 0000000..acd3b13
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceMaterializationForInsertWithSelfScanRule.java
@@ -0,0 +1,118 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
+import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.optimizer.rules.am.AccessMethodJobGenParams;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.MaterializePOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class IntroduceMaterializationForInsertWithSelfScanRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (op.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE) {
+            return false;
+        }
+
+        InsertDeleteOperator insertOp = (InsertDeleteOperator) op;
+        boolean sameDataset = checkIfInsertAndScanDatasetsSame(op, ((DatasetDataSource) insertOp.getDataSource())
+                .getDataset().getDatasetName());
+
+        if (sameDataset) {
+            MaterializeOperator materializeOperator = new MaterializeOperator();
+            MaterializePOperator materializePOperator = new MaterializePOperator(true);
+            materializeOperator.setPhysicalOperator(materializePOperator);
+
+            materializeOperator.getInputs().add(
+                    new MutableObject<ILogicalOperator>(insertOp.getInputs().get(0).getValue()));
+            context.computeAndSetTypeEnvironmentForOperator(materializeOperator);
+
+            insertOp.getInputs().clear();
+            insertOp.getInputs().add(new MutableObject<ILogicalOperator>(materializeOperator));
+            context.computeAndSetTypeEnvironmentForOperator(insertOp);
+            return true;
+        } else {
+            return false;
+        }
+
+    }
+
+    private boolean checkIfInsertAndScanDatasetsSame(AbstractLogicalOperator op, String insertDatasetName) {
+        boolean sameDataset = false;
+        for (int i = 0; i < op.getInputs().size(); ++i) {
+            AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) op.getInputs().get(i).getValue();
+
+            if (descendantOp.getOperatorTag() == LogicalOperatorTag.UNNEST_MAP) {
+                UnnestMapOperator unnestMapOp = (UnnestMapOperator) descendantOp;
+                ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
+                if (unnestExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                    AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
+                    FunctionIdentifier fid = f.getFunctionIdentifier();
+                    if (!fid.equals(AsterixBuiltinFunctions.INDEX_SEARCH)) {
+                        throw new IllegalStateException();
+                    }
+                    AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
+                    jobGenParams.readFromFuncArgs(f.getArguments());
+                    boolean isPrimaryIndex = jobGenParams.isPrimaryIndex();
+                    String indexName = jobGenParams.getIndexName();
+                    if (isPrimaryIndex && indexName.compareTo(insertDatasetName) == 0) {
+                        return true;
+                    }
+                }
+            } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
+                DataSourceScanOperator dataSourceScanOp = (DataSourceScanOperator) descendantOp;
+                AqlDataSource ds = (AqlDataSource) dataSourceScanOp.getDataSource();
+                if (ds.getDatasourceType() != AqlDataSourceType.FEED
+                        && ds.getDatasourceType() != AqlDataSourceType.LOADABLE) {
+                    if (((DatasetDataSource) ds).getDataset().getDatasetName().compareTo(insertDatasetName) == 0) {
+                        return true;
+                    }
+                }
+            }
+            sameDataset = checkIfInsertAndScanDatasetsSame(descendantOp, insertDatasetName);
+            if (sameDataset) {
+                break;
+            }
+        }
+        return sameDataset;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceRandomPartitioningFeedComputationRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceRandomPartitioningFeedComputationRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceRandomPartitioningFeedComputationRule.java
new file mode 100644
index 0000000..f4349c3
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceRandomPartitioningFeedComputationRule.java
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
+import edu.uci.ics.asterix.metadata.declared.FeedDataSource;
+import edu.uci.ics.asterix.metadata.entities.Feed;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.AssignPOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.RandomPartitionPOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.INodeDomain;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class IntroduceRandomPartitioningFeedComputationRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        ILogicalOperator op = opRef.getValue();
+        if (!op.getOperatorTag().equals(LogicalOperatorTag.ASSIGN)) {
+            return false;
+        }
+
+        ILogicalOperator opChild = op.getInputs().get(0).getValue();
+        if (!opChild.getOperatorTag().equals(LogicalOperatorTag.DATASOURCESCAN)) {
+            return false;
+        }
+
+        DataSourceScanOperator scanOp = (DataSourceScanOperator) opChild;
+        AqlDataSource dataSource = (AqlDataSource) scanOp.getDataSource();
+        if (!dataSource.getDatasourceType().equals(AqlDataSourceType.FEED)) {
+            return false;
+        }
+
+        final FeedDataSource feedDataSource = (FeedDataSource) dataSource;
+        Feed feed = feedDataSource.getFeed();
+        if (feed.getAppliedFunction() == null) {
+            return false;
+        }
+
+        ExchangeOperator exchangeOp = new ExchangeOperator();
+        INodeDomain domain = new INodeDomain() {
+            @Override
+            public boolean sameAs(INodeDomain domain) {
+                return domain == this;
+            }
+
+            @Override
+            public Integer cardinality() {
+                return feedDataSource.getComputeCardinality();
+            }
+        };
+
+        exchangeOp.setPhysicalOperator(new RandomPartitionPOperator(domain));
+        op.getInputs().get(0).setValue(exchangeOp);
+        exchangeOp.getInputs().add(new MutableObject<ILogicalOperator>(scanOp));
+        ExecutionMode em = ((AbstractLogicalOperator) scanOp).getExecutionMode();
+        exchangeOp.setExecutionMode(em);
+        exchangeOp.computeDeliveredPhysicalProperties(context);
+        context.computeAndSetTypeEnvironmentForOperator(exchangeOp);
+
+        AssignOperator assignOp = (AssignOperator) opRef.getValue();
+        AssignPOperator assignPhyOp = (AssignPOperator) assignOp.getPhysicalOperator();
+        assignPhyOp.setCardinalityConstraint(domain.cardinality());
+
+        return true;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        return false;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceRapidFrameFlushProjectAssignRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceRapidFrameFlushProjectAssignRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceRapidFrameFlushProjectAssignRule.java
new file mode 100644
index 0000000..d44d762
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceRapidFrameFlushProjectAssignRule.java
@@ -0,0 +1,102 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.algebra.operators.CommitOperator;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.AssignPOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.StreamProjectPOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * This rule will search for project and assign operators in an insert/delete/update plan and
+ * pass a hint to all of them. This hint is used by the project and assign operators so that frames are pushed to
+ * the next operator without waiting until they get full. The purpose of this is to
+ * reduce the time of holding exclusive locks on the keys that have been inserted. Also to allow feeds batching
+ * to work correctly.
+ *
+ * @author salsubaiee
+ */
+public class IntroduceRapidFrameFlushProjectAssignRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    private boolean checkIfRuleIsApplicable(AbstractLogicalOperator op) {
+        if (op.getOperatorTag() != LogicalOperatorTag.EXTENSION_OPERATOR) {
+            return false;
+        }
+        ExtensionOperator extensionOp = (ExtensionOperator) op;
+        if (!(extensionOp.getDelegate() instanceof CommitOperator)) {
+            return false;
+        }
+
+        for (int i = 0; i < op.getInputs().size(); ++i) {
+            AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) op.getInputs().get(i).getValue();
+
+            if (descendantOp.getOperatorTag() == LogicalOperatorTag.PROJECT
+                    || descendantOp.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+                if (descendantOp.getPhysicalOperator() == null) {
+                    return false;
+                }
+            }
+            checkIfRuleIsApplicable(descendantOp);
+        }
+        return true;
+    }
+
+    private boolean changeRule(AbstractLogicalOperator op) {
+        boolean planModified = false;
+        for (int i = 0; i < op.getInputs().size(); ++i) {
+            AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) op.getInputs().get(i).getValue();
+
+            if (descendantOp.getOperatorTag() == LogicalOperatorTag.PROJECT) {
+                ProjectOperator projectOp = (ProjectOperator) descendantOp;
+                StreamProjectPOperator physicalOp = (StreamProjectPOperator) projectOp.getPhysicalOperator();
+                physicalOp.setRapidFrameFlush(true);
+                planModified = true;
+            } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+                AssignOperator assignOp = (AssignOperator) descendantOp;
+                AssignPOperator physicalOp = (AssignPOperator) assignOp.getPhysicalOperator();
+                physicalOp.setRapidFrameFlush(true);
+                planModified = true;
+            }
+            changeRule(descendantOp);
+        }
+        return planModified;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (!checkIfRuleIsApplicable(op)) {
+            return false;
+        }
+        return changeRule(op);
+    }
+}
\ No newline at end of file


[26/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceJoinAccessMethodRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceJoinAccessMethodRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceJoinAccessMethodRule.java
new file mode 100644
index 0000000..7d93ecc
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceJoinAccessMethodRule.java
@@ -0,0 +1,246 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules.am;
+
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
+
+/**
+ * This rule optimizes a join with secondary indexes into an indexed nested-loop join.
+ * Matches the following operator pattern:
+ * (join) <-- (select)? <-- (assign | unnest)+ <-- (datasource scan)
+ * <-- (select)? <-- (assign | unnest)+ <-- (datasource scan | unnest-map)
+ * The order of the join inputs does not matter.
+ * Replaces the above pattern with the following simplified plan:
+ * (select) <-- (assign) <-- (btree search) <-- (sort) <-- (unnest(index search)) <-- (assign) <-- (datasource scan | unnest-map)
+ * The sort is optional, and some access methods may choose not to sort.
+ * Note that for some index-based optimizations we do not remove the triggering
+ * condition from the join, since the secondary index may only act as a filter, and the
+ * final verification must still be done with the original join condition.
+ * The basic outline of this rule is:
+ * 1. Match operator pattern.
+ * 2. Analyze join condition to see if there are optimizable functions (delegated to IAccessMethods).
+ * 3. Check metadata to see if there are applicable indexes.
+ * 4. Choose an index to apply (for now only a single index will be chosen).
+ * 5. Rewrite plan using index (delegated to IAccessMethods).
+ * For left-outer-join, additional patterns are checked and additional treatment is needed as follows:
+ * 1. First it checks if there is a groupByOp above the join: (groupby) <-- (leftouterjoin)
+ * 2. Inherently, only the right-subtree of the lojOp can be used as indexSubtree.
+ * So, the right-subtree must have at least one applicable index on join field(s)
+ * 3. If there is a groupByOp, the null placeholder variable introduced in groupByOp should be taken care of correctly.
+ * Here, the primary key variable from datasourceScanOp replaces the introduced null placeholder variable.
+ * If the primary key is composite key, then the first variable of the primary key variables becomes the
+ * null place holder variable. This null placeholder variable works for all three types of indexes.
+ */
+public class IntroduceJoinAccessMethodRule extends AbstractIntroduceAccessMethodRule {
+
+    protected Mutable<ILogicalOperator> joinRef = null;
+    protected AbstractBinaryJoinOperator join = null;
+    protected AbstractFunctionCallExpression joinCond = null;
+    protected final OptimizableOperatorSubTree leftSubTree = new OptimizableOperatorSubTree();
+    protected final OptimizableOperatorSubTree rightSubTree = new OptimizableOperatorSubTree();
+    protected boolean isLeftOuterJoin = false;
+    protected boolean hasGroupBy = true;
+
+    // Register access methods.
+    protected static Map<FunctionIdentifier, List<IAccessMethod>> accessMethods = new HashMap<FunctionIdentifier, List<IAccessMethod>>();
+    static {
+        registerAccessMethod(BTreeAccessMethod.INSTANCE, accessMethods);
+        registerAccessMethod(RTreeAccessMethod.INSTANCE, accessMethods);
+        registerAccessMethod(InvertedIndexAccessMethod.INSTANCE, accessMethods);
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        clear();
+        setMetadataDeclarations(context);
+
+        // Match operator pattern and initialize optimizable sub trees.
+        if (!matchesOperatorPattern(opRef, context)) {
+            return false;
+        }
+        // Analyze condition on those optimizable subtrees that have a datasource scan.
+        Map<IAccessMethod, AccessMethodAnalysisContext> analyzedAMs = new HashMap<IAccessMethod, AccessMethodAnalysisContext>();
+        boolean matchInLeftSubTree = false;
+        boolean matchInRightSubTree = false;
+        if (leftSubTree.hasDataSource()) {
+            matchInLeftSubTree = analyzeCondition(joinCond, leftSubTree.assignsAndUnnests, analyzedAMs);
+        }
+        if (rightSubTree.hasDataSource()) {
+            matchInRightSubTree = analyzeCondition(joinCond, rightSubTree.assignsAndUnnests, analyzedAMs);
+        }
+        if (!matchInLeftSubTree && !matchInRightSubTree) {
+            return false;
+        }
+
+        // Set dataset and type metadata.
+        AqlMetadataProvider metadataProvider = (AqlMetadataProvider) context.getMetadataProvider();
+        boolean checkLeftSubTreeMetadata = false;
+        boolean checkRightSubTreeMetadata = false;
+        if (matchInLeftSubTree) {
+            checkLeftSubTreeMetadata = leftSubTree.setDatasetAndTypeMetadata(metadataProvider);
+        }
+        if (matchInRightSubTree) {
+            checkRightSubTreeMetadata = rightSubTree.setDatasetAndTypeMetadata(metadataProvider);
+        }
+        if (!checkLeftSubTreeMetadata && !checkRightSubTreeMetadata) {
+            return false;
+        }
+        if (checkLeftSubTreeMetadata) {
+            fillSubTreeIndexExprs(leftSubTree, analyzedAMs, context);
+        }
+        if (checkRightSubTreeMetadata) {
+            fillSubTreeIndexExprs(rightSubTree, analyzedAMs, context);
+        }
+        pruneIndexCandidates(analyzedAMs);
+
+        //Remove possibly chosen indexes from left Tree
+        if (isLeftOuterJoin) {
+            Iterator<Map.Entry<IAccessMethod, AccessMethodAnalysisContext>> amIt = analyzedAMs.entrySet().iterator();
+            // Check applicability of indexes by access method type.
+            while (amIt.hasNext()) {
+                Map.Entry<IAccessMethod, AccessMethodAnalysisContext> entry = amIt.next();
+                AccessMethodAnalysisContext amCtx = entry.getValue();
+                Iterator<Map.Entry<Index, List<Pair<Integer, Integer>>>> indexIt = amCtx.indexExprsAndVars.entrySet()
+                        .iterator();
+                while (indexIt.hasNext()) {
+                    Map.Entry<Index, List<Pair<Integer, Integer>>> indexEntry = indexIt.next();
+
+                    Index chosenIndex = indexEntry.getKey();
+                    if (!chosenIndex.getDatasetName().equals(rightSubTree.dataset.getDatasetName())) {
+                        indexIt.remove();
+                    }
+                }
+            }
+        }
+
+        // Choose index to be applied.
+        Pair<IAccessMethod, Index> chosenIndex = chooseIndex(analyzedAMs);
+        if (chosenIndex == null) {
+            context.addToDontApplySet(this, join);
+            return false;
+        }
+
+        // Apply plan transformation using chosen index.
+        AccessMethodAnalysisContext analysisCtx = analyzedAMs.get(chosenIndex.first);
+
+        //For LOJ with GroupBy, prepare objects to reset LOJ nullPlaceHolderVariable in GroupByOp
+        if (isLeftOuterJoin && hasGroupBy) {
+            analysisCtx.setLOJGroupbyOpRef(opRef);
+            ScalarFunctionCallExpression isNullFuncExpr = AccessMethodUtils
+                    .findLOJIsNullFuncInGroupBy((GroupByOperator) opRef.getValue());
+            analysisCtx.setLOJIsNullFuncInGroupBy(isNullFuncExpr);
+        }
+        boolean res = chosenIndex.first.applyJoinPlanTransformation(joinRef, leftSubTree, rightSubTree,
+                chosenIndex.second, analysisCtx, context, isLeftOuterJoin, hasGroupBy);
+        if (res) {
+            OperatorPropertiesUtil.typeOpRec(opRef, context);
+        }
+        context.addToDontApplySet(this, join);
+        return res;
+    }
+
+    protected boolean matchesOperatorPattern(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+        // First check that the operator is a join and its condition is a function call.
+        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
+        if (context.checkIfInDontApplySet(this, op1)) {
+            return false;
+        }
+
+        boolean isInnerJoin = isInnerJoin(op1);
+        isLeftOuterJoin = isLeftOuterJoin(op1);
+
+        if (!isInnerJoin && !isLeftOuterJoin) {
+            return false;
+        }
+
+        // Set and analyze select.
+        if (isInnerJoin) {
+            joinRef = opRef;
+            join = (InnerJoinOperator) op1;
+        } else {
+            joinRef = op1.getInputs().get(0);
+            join = (LeftOuterJoinOperator) joinRef.getValue();
+        }
+
+        // Check that the select's condition is a function call.
+        ILogicalExpression condExpr = join.getCondition().getValue();
+        if (condExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return false;
+        }
+        joinCond = (AbstractFunctionCallExpression) condExpr;
+        leftSubTree.initFromSubTree(join.getInputs().get(0));
+        rightSubTree.initFromSubTree(join.getInputs().get(1));
+        // One of the subtrees must have a datasource scan.
+        if (leftSubTree.hasDataSourceScan() || rightSubTree.hasDataSourceScan()) {
+            return true;
+        }
+        return false;
+    }
+
+    private boolean isLeftOuterJoin(AbstractLogicalOperator op1) {
+        if (op1.getInputs().size() != 1) {
+            return false;
+        }
+        if (((AbstractLogicalOperator) op1.getInputs().get(0).getValue()).getOperatorTag() != LogicalOperatorTag.LEFTOUTERJOIN) {
+            return false;
+        }
+        if (op1.getOperatorTag() == LogicalOperatorTag.GROUP) {
+            return true;
+        }
+        hasGroupBy = false;
+        return true;
+    }
+
+    private boolean isInnerJoin(AbstractLogicalOperator op1) {
+        return op1.getOperatorTag() == LogicalOperatorTag.INNERJOIN;
+    }
+
+    @Override
+    public Map<FunctionIdentifier, List<IAccessMethod>> getAccessMethods() {
+        return accessMethods;
+    }
+
+    private void clear() {
+        joinRef = null;
+        join = null;
+        joinCond = null;
+        isLeftOuterJoin = false;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java
new file mode 100644
index 0000000..e441a20
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java
@@ -0,0 +1,451 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules.am;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions.ComparisonKind;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class IntroduceLSMComponentFilterRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+
+        if (!checkIfRuleIsApplicable(opRef, context)) {
+            return false;
+        }
+
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        ILogicalExpression condExpr = ((SelectOperator) op).getCondition().getValue();
+        AccessMethodAnalysisContext analysisCtx = analyzeCondition(condExpr);
+        if (analysisCtx.matchedFuncExprs.isEmpty()) {
+            return false;
+        }
+
+        Dataset dataset = getDataset(op, context);
+        List<String> filterFieldName = null;
+        ARecordType recType = null;
+        if (dataset != null && dataset.getDatasetType() == DatasetType.INTERNAL) {
+            filterFieldName = DatasetUtils.getFilterField(dataset);
+            IAType itemType = ((AqlMetadataProvider) context.getMetadataProvider()).findType(
+                    dataset.getDataverseName(), dataset.getItemTypeName());
+            if (itemType.getTypeTag() == ATypeTag.RECORD) {
+                recType = (ARecordType) itemType;
+            }
+        }
+        if (filterFieldName == null || recType == null) {
+            return false;
+        }
+        List<Index> datasetIndexes = ((AqlMetadataProvider) context.getMetadataProvider()).getDatasetIndexes(
+                dataset.getDataverseName(), dataset.getDatasetName());
+
+        List<IOptimizableFuncExpr> optFuncExprs = new ArrayList<IOptimizableFuncExpr>();
+
+        for (int i = 0; i < analysisCtx.matchedFuncExprs.size(); i++) {
+            IOptimizableFuncExpr optFuncExpr = analysisCtx.matchedFuncExprs.get(i);
+            boolean found = findMacthedExprFieldName(optFuncExpr, op, dataset, recType, datasetIndexes);
+            if (found && optFuncExpr.getFieldName(0).equals(filterFieldName)) {
+                optFuncExprs.add(optFuncExpr);
+            }
+        }
+        if (optFuncExprs.isEmpty()) {
+            return false;
+        }
+        changePlan(optFuncExprs, op, dataset, context);
+
+        OperatorPropertiesUtil.typeOpRec(opRef, context);
+        context.addToDontApplySet(this, op);
+        return true;
+    }
+
+    private AssignOperator createAssignOperator(List<IOptimizableFuncExpr> optFuncExprs,
+            List<LogicalVariable> minFilterVars, List<LogicalVariable> maxFilterVars, IOptimizationContext context) {
+        List<LogicalVariable> assignKeyVarList = new ArrayList<LogicalVariable>();
+        List<Mutable<ILogicalExpression>> assignKeyExprList = new ArrayList<Mutable<ILogicalExpression>>();
+
+        for (IOptimizableFuncExpr optFuncExpr : optFuncExprs) {
+            ComparisonKind ck = AlgebricksBuiltinFunctions.getComparisonType(optFuncExpr.getFuncExpr()
+                    .getFunctionIdentifier());
+            ILogicalExpression searchKeyExpr = new ConstantExpression(optFuncExpr.getConstantVal(0));
+            LogicalVariable var = context.newVar();
+            assignKeyExprList.add(new MutableObject<ILogicalExpression>(searchKeyExpr));
+            assignKeyVarList.add(var);
+            if (ck == ComparisonKind.GE || ck == ComparisonKind.GT) {
+                minFilterVars.add(var);
+            } else if (ck == ComparisonKind.LE || ck == ComparisonKind.LT) {
+                maxFilterVars.add(var);
+            } else if (ck == ComparisonKind.EQ) {
+                minFilterVars.add(var);
+                maxFilterVars.add(var);
+            }
+        }
+        return new AssignOperator(assignKeyVarList, assignKeyExprList);
+    }
+
+    private void changePlan(List<IOptimizableFuncExpr> optFuncExprs, AbstractLogicalOperator op, Dataset dataset,
+            IOptimizationContext context) throws AlgebricksException {
+
+        AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
+        while (descendantOp != null) {
+            if (descendantOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
+                DataSourceScanOperator dataSourceScanOp = (DataSourceScanOperator) descendantOp;
+                AqlDataSource ds = (AqlDataSource) dataSourceScanOp.getDataSource();
+                if (dataset.getDatasetName().compareTo(((DatasetDataSource) ds).getDataset().getDatasetName()) == 0) {
+                    List<LogicalVariable> minFilterVars = new ArrayList<LogicalVariable>();
+                    List<LogicalVariable> maxFilterVars = new ArrayList<LogicalVariable>();
+
+                    AssignOperator assignOp = createAssignOperator(optFuncExprs, minFilterVars, maxFilterVars, context);
+
+                    dataSourceScanOp.setMinFilterVars(minFilterVars);
+                    dataSourceScanOp.setMaxFilterVars(maxFilterVars);
+
+                    List<Mutable<ILogicalExpression>> additionalFilteringExpressions = new ArrayList<Mutable<ILogicalExpression>>();;
+                    for (LogicalVariable var : assignOp.getVariables()) {
+                        additionalFilteringExpressions.add(new MutableObject<ILogicalExpression>(
+                                new VariableReferenceExpression(var)));
+                    }
+
+                    dataSourceScanOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
+
+                    assignOp.getInputs().add(
+                            new MutableObject<ILogicalOperator>(dataSourceScanOp.getInputs().get(0).getValue()));
+                    dataSourceScanOp.getInputs().get(0).setValue(assignOp);
+                }
+            } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.UNNEST_MAP) {
+                UnnestMapOperator unnestMapOp = (UnnestMapOperator) descendantOp;
+                ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
+                if (unnestExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                    AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
+                    FunctionIdentifier fid = f.getFunctionIdentifier();
+                    if (!fid.equals(AsterixBuiltinFunctions.INDEX_SEARCH)) {
+                        throw new IllegalStateException();
+                    }
+                    AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
+                    jobGenParams.readFromFuncArgs(f.getArguments());
+                    if (dataset.getDatasetName().compareTo(jobGenParams.datasetName) == 0) {
+                        List<LogicalVariable> minFilterVars = new ArrayList<LogicalVariable>();
+                        List<LogicalVariable> maxFilterVars = new ArrayList<LogicalVariable>();
+
+                        AssignOperator assignOp = createAssignOperator(optFuncExprs, minFilterVars, maxFilterVars,
+                                context);
+
+                        unnestMapOp.setMinFilterVars(minFilterVars);
+                        unnestMapOp.setMaxFilterVars(maxFilterVars);
+
+                        List<Mutable<ILogicalExpression>> additionalFilteringExpressions = new ArrayList<Mutable<ILogicalExpression>>();;
+                        for (LogicalVariable var : assignOp.getVariables()) {
+                            additionalFilteringExpressions.add(new MutableObject<ILogicalExpression>(
+                                    new VariableReferenceExpression(var)));
+                        }
+                        unnestMapOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
+                        assignOp.getInputs().add(
+                                new MutableObject<ILogicalOperator>(unnestMapOp.getInputs().get(0).getValue()));
+                        unnestMapOp.getInputs().get(0).setValue(assignOp);
+                    }
+                }
+            }
+            if (descendantOp.getInputs().isEmpty()) {
+                break;
+            }
+            descendantOp = (AbstractLogicalOperator) descendantOp.getInputs().get(0).getValue();
+        }
+    }
+
+    private Dataset getDataset(AbstractLogicalOperator op, IOptimizationContext context) throws AlgebricksException {
+        AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
+        while (descendantOp != null) {
+            if (descendantOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
+                DataSourceScanOperator dataSourceScanOp = (DataSourceScanOperator) descendantOp;
+                AqlDataSource ds = (AqlDataSource) dataSourceScanOp.getDataSource();
+                return ((DatasetDataSource) ds).getDataset();
+            } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.UNNEST_MAP) {
+                UnnestMapOperator unnestMapOp = (UnnestMapOperator) descendantOp;
+                ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
+                if (unnestExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                    AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
+                    FunctionIdentifier fid = f.getFunctionIdentifier();
+                    if (!fid.equals(AsterixBuiltinFunctions.INDEX_SEARCH)) {
+                        throw new IllegalStateException();
+                    }
+                    AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
+                    jobGenParams.readFromFuncArgs(f.getArguments());
+                    return ((AqlMetadataProvider) context.getMetadataProvider()).findDataset(
+                            jobGenParams.dataverseName, jobGenParams.datasetName);
+                }
+            }
+            if (descendantOp.getInputs().isEmpty()) {
+                break;
+            }
+            descendantOp = (AbstractLogicalOperator) descendantOp.getInputs().get(0).getValue();
+        }
+        return null;
+    }
+
+    private boolean checkIfRuleIsApplicable(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+        // First check that the operator is a select and its condition is a function call.
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (context.checkIfInDontApplySet(this, op)) {
+            return false;
+        }
+        if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
+            return false;
+        }
+
+        ILogicalExpression condExpr = ((SelectOperator) op).getCondition().getValue();
+        if (condExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return false;
+        }
+        return true;
+    }
+
+    private AccessMethodAnalysisContext analyzeCondition(ILogicalExpression cond) {
+        AccessMethodAnalysisContext analysisCtx = new AccessMethodAnalysisContext();
+        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) cond;
+        FunctionIdentifier funcIdent = funcExpr.getFunctionIdentifier();
+        if (funcIdent != AlgebricksBuiltinFunctions.OR) {
+            analyzeFunctionExpr(funcExpr, analysisCtx);
+            for (Mutable<ILogicalExpression> arg : funcExpr.getArguments()) {
+                ILogicalExpression argExpr = arg.getValue();
+                if (argExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+                    continue;
+                }
+                analyzeFunctionExpr((AbstractFunctionCallExpression) argExpr, analysisCtx);
+            }
+        }
+        return analysisCtx;
+    }
+
+    private void analyzeFunctionExpr(AbstractFunctionCallExpression funcExpr, AccessMethodAnalysisContext analysisCtx) {
+        FunctionIdentifier funcIdent = funcExpr.getFunctionIdentifier();
+        if (funcIdent == AlgebricksBuiltinFunctions.LE || funcIdent == AlgebricksBuiltinFunctions.GE
+                || funcIdent == AlgebricksBuiltinFunctions.LT || funcIdent == AlgebricksBuiltinFunctions.GT
+                || funcIdent == AlgebricksBuiltinFunctions.EQ) {
+            AccessMethodUtils.analyzeFuncExprArgsForOneConstAndVar(funcExpr, analysisCtx);
+        }
+    }
+
+    private boolean findMacthedExprFieldName(IOptimizableFuncExpr optFuncExpr, AbstractLogicalOperator op,
+            Dataset dataset, ARecordType recType, List<Index> datasetIndexes) throws AlgebricksException {
+        AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
+        while (descendantOp != null) {
+            if (descendantOp.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+                AssignOperator assignOp = (AssignOperator) descendantOp;
+                List<LogicalVariable> varList = assignOp.getVariables();
+                for (int varIndex = 0; varIndex < varList.size(); varIndex++) {
+                    LogicalVariable var = varList.get(varIndex);
+                    int funcVarIndex = optFuncExpr.findLogicalVar(var);
+                    if (funcVarIndex == -1) {
+                        continue;
+                    }
+                    List<String> fieldName = getFieldNameFromSubAssignTree(optFuncExpr, descendantOp, varIndex, recType).second;
+                    if (fieldName == null) {
+                        return false;
+                    }
+                    optFuncExpr.setFieldName(funcVarIndex, fieldName);
+                    return true;
+                }
+            } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
+                DataSourceScanOperator scanOp = (DataSourceScanOperator) descendantOp;
+                List<LogicalVariable> varList = scanOp.getVariables();
+                for (int varIndex = 0; varIndex < varList.size(); varIndex++) {
+                    LogicalVariable var = varList.get(varIndex);
+                    int funcVarIndex = optFuncExpr.findLogicalVar(var);
+                    if (funcVarIndex == -1) {
+                        continue;
+                    }
+                    // The variable value is one of the partitioning fields.
+                    List<String> fieldName = DatasetUtils.getPartitioningKeys(dataset).get(varIndex);
+                    if (fieldName == null) {
+                        return false;
+                    }
+                    optFuncExpr.setFieldName(funcVarIndex, fieldName);
+                    return true;
+                }
+            } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.UNNEST_MAP) {
+                UnnestMapOperator unnestMapOp = (UnnestMapOperator) descendantOp;
+                List<LogicalVariable> varList = unnestMapOp.getVariables();
+                for (int varIndex = 0; varIndex < varList.size(); varIndex++) {
+                    LogicalVariable var = varList.get(varIndex);
+                    int funcVarIndex = optFuncExpr.findLogicalVar(var);
+                    if (funcVarIndex == -1) {
+                        continue;
+                    }
+
+                    String indexName = null;
+                    Index index = null;
+                    ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
+                    if (unnestExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                        AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
+                        FunctionIdentifier fid = f.getFunctionIdentifier();
+                        if (!fid.equals(AsterixBuiltinFunctions.INDEX_SEARCH)) {
+                            throw new IllegalStateException();
+                        }
+                        AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
+                        jobGenParams.readFromFuncArgs(f.getArguments());
+                        indexName = jobGenParams.indexName;
+                        for (Index idx : datasetIndexes) {
+                            if (idx.getIndexName().compareTo(indexName) == 0) {
+                                index = idx;
+                                break;
+                            }
+                        }
+                    }
+
+                    int numSecondaryKeys = AccessMethodUtils.getNumSecondaryKeys(index, recType);
+                    List<String> fieldName;
+                    if (varIndex >= numSecondaryKeys) {
+                        fieldName = DatasetUtils.getPartitioningKeys(dataset).get(varIndex - numSecondaryKeys);
+                    } else {
+                        fieldName = index.getKeyFieldNames().get(varIndex);
+                    }
+                    if (fieldName == null) {
+                        return false;
+                    }
+                    optFuncExpr.setFieldName(funcVarIndex, fieldName);
+                    return true;
+                }
+            }
+
+            if (descendantOp.getInputs().isEmpty()) {
+                break;
+            }
+            descendantOp = (AbstractLogicalOperator) descendantOp.getInputs().get(0).getValue();
+        }
+        return false;
+    }
+
+    private Pair<ARecordType, List<String>> getFieldNameFromSubAssignTree(IOptimizableFuncExpr optFuncExpr,
+            AbstractLogicalOperator op, int varIndex, ARecordType recType) {
+        AbstractLogicalExpression expr = null;
+        if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+            AssignOperator assignOp = (AssignOperator) op;
+            expr = (AbstractLogicalExpression) assignOp.getExpressions().get(varIndex).getValue();
+        }
+        if (expr == null || expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return null;
+        }
+        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+        FunctionIdentifier funcIdent = funcExpr.getFunctionIdentifier();
+        if (funcIdent == AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME
+                || funcIdent == AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX) {
+
+            //get the variable from here. Figure out which input it came from. Go to that input!!!
+            ArrayList<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
+            expr.getUsedVariables(usedVars);
+            LogicalVariable usedVar = usedVars.get(0);
+            List<String> returnList = new ArrayList<String>();
+
+            //Find the input that it came from
+            for (int varCheck = 0; varCheck < op.getInputs().size(); varCheck++) {
+                AbstractLogicalOperator nestedOp = (AbstractLogicalOperator) op.getInputs().get(varCheck).getValue();
+                if (nestedOp.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
+                    if (varCheck == op.getInputs().size() - 1) {
+
+                    }
+                } else {
+                    int nestedAssignVar = ((AssignOperator) nestedOp).getVariables().indexOf(usedVar);
+                    if (nestedAssignVar == -1) {
+                        continue;
+                    }
+                    //get the nested info from the lower input
+                    Pair<ARecordType, List<String>> lowerInfo = getFieldNameFromSubAssignTree(optFuncExpr,
+                            (AbstractLogicalOperator) op.getInputs().get(varCheck).getValue(), nestedAssignVar, recType);
+                    if (lowerInfo != null) {
+                        recType = lowerInfo.first;
+                        returnList = lowerInfo.second;
+                    }
+                }
+            }
+
+            if (funcIdent == AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME) {
+                ILogicalExpression nameArg = funcExpr.getArguments().get(1).getValue();
+                if (nameArg.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+                    return null;
+                }
+                ConstantExpression constExpr = (ConstantExpression) nameArg;
+                returnList.addAll(Arrays.asList(((AString) ((AsterixConstantValue) constExpr.getValue()).getObject())
+                        .getStringValue()));
+                return new Pair<ARecordType, List<String>>(recType, returnList);
+            } else if (funcIdent == AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX) {
+                ILogicalExpression idxArg = funcExpr.getArguments().get(1).getValue();
+                if (idxArg.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+                    return null;
+                }
+                ConstantExpression constExpr = (ConstantExpression) idxArg;
+                int fieldIndex = ((AInt32) ((AsterixConstantValue) constExpr.getValue()).getObject()).getIntegerValue();
+                returnList.addAll(Arrays.asList(recType.getFieldNames()[fieldIndex]));
+                IAType subType = recType.getFieldTypes()[fieldIndex];
+                if (subType.getTypeTag() == ATypeTag.RECORD) {
+                    recType = (ARecordType) subType;
+                }
+                return new Pair<ARecordType, List<String>>(recType, returnList);
+            }
+
+        }
+
+        ILogicalExpression argExpr = funcExpr.getArguments().get(0).getValue();
+        if (argExpr.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
+            return null;
+        }
+
+        return null;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceSelectAccessMethodRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceSelectAccessMethodRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceSelectAccessMethodRule.java
new file mode 100644
index 0000000..47e3dbf
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceSelectAccessMethodRule.java
@@ -0,0 +1,154 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules.am;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
+
+/**
+ * This rule optimizes simple selections with secondary or primary indexes. The use of an
+ * index is expressed as an unnest-map over an index-search function which will be
+ * replaced with the appropriate embodiment during codegen.
+ * Matches the following operator patterns:
+ * Standard secondary index pattern:
+ * There must be at least one assign, but there may be more, e.g., when matching similarity-jaccard-check().
+ * (select) <-- (assign | unnest)+ <-- (datasource scan)
+ * Primary index lookup pattern:
+ * Since no assign is necessary to get the primary key fields (they are already stored fields in the BTree tuples).
+ * (select) <-- (datasource scan)
+ * Replaces the above patterns with this plan:
+ * (select) <-- (assign) <-- (btree search) <-- (sort) <-- (unnest-map(index search)) <-- (assign)
+ * The sort is optional, and some access methods implementations may choose not to sort.
+ * Note that for some index-based optimizations we do not remove the triggering
+ * condition from the select, since the index may only acts as a filter, and the
+ * final verification must still be done with the original select condition.
+ * The basic outline of this rule is:
+ * 1. Match operator pattern.
+ * 2. Analyze select condition to see if there are optimizable functions (delegated to IAccessMethods).
+ * 3. Check metadata to see if there are applicable indexes.
+ * 4. Choose an index to apply (for now only a single index will be chosen).
+ * 5. Rewrite plan using index (delegated to IAccessMethods).
+ */
+public class IntroduceSelectAccessMethodRule extends AbstractIntroduceAccessMethodRule {
+
+    // Operators representing the patterns to be matched:
+    // These ops are set in matchesPattern()
+    protected Mutable<ILogicalOperator> selectRef = null;
+    protected SelectOperator select = null;
+    protected AbstractFunctionCallExpression selectCond = null;
+    protected final OptimizableOperatorSubTree subTree = new OptimizableOperatorSubTree();
+
+    // Register access methods.
+    protected static Map<FunctionIdentifier, List<IAccessMethod>> accessMethods = new HashMap<FunctionIdentifier, List<IAccessMethod>>();
+    static {
+        registerAccessMethod(BTreeAccessMethod.INSTANCE, accessMethods);
+        registerAccessMethod(RTreeAccessMethod.INSTANCE, accessMethods);
+        registerAccessMethod(InvertedIndexAccessMethod.INSTANCE, accessMethods);
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        clear();
+        setMetadataDeclarations(context);
+
+        // Match operator pattern and initialize operator members.
+        if (!matchesOperatorPattern(opRef, context)) {
+            return false;
+        }
+
+        // Analyze select condition.
+        Map<IAccessMethod, AccessMethodAnalysisContext> analyzedAMs = new HashMap<IAccessMethod, AccessMethodAnalysisContext>();
+        if (!analyzeCondition(selectCond, subTree.assignsAndUnnests, analyzedAMs)) {
+            return false;
+        }
+
+        // Set dataset and type metadata.
+        if (!subTree.setDatasetAndTypeMetadata((AqlMetadataProvider) context.getMetadataProvider())) {
+            return false;
+        }
+
+        fillSubTreeIndexExprs(subTree, analyzedAMs, context);
+        pruneIndexCandidates(analyzedAMs);
+
+        // Choose index to be applied.
+        Pair<IAccessMethod, Index> chosenIndex = chooseIndex(analyzedAMs);
+        if (chosenIndex == null) {
+            context.addToDontApplySet(this, select);
+            return false;
+        }
+
+        // Apply plan transformation using chosen index.
+        AccessMethodAnalysisContext analysisCtx = analyzedAMs.get(chosenIndex.first);
+        boolean res = chosenIndex.first.applySelectPlanTransformation(selectRef, subTree, chosenIndex.second,
+                analysisCtx, context);
+        if (res) {
+            OperatorPropertiesUtil.typeOpRec(opRef, context);
+        }
+        context.addToDontApplySet(this, select);
+        return res;
+    }
+
+    protected boolean matchesOperatorPattern(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+        // First check that the operator is a select and its condition is a function call.
+        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
+        if (context.checkIfInDontApplySet(this, op1)) {
+            return false;
+        }
+        if (op1.getOperatorTag() != LogicalOperatorTag.SELECT) {
+            return false;
+        }
+        // Set and analyze select.
+        selectRef = opRef;
+        select = (SelectOperator) op1;
+        // Check that the select's condition is a function call.
+        ILogicalExpression condExpr = select.getCondition().getValue();
+        if (condExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return false;
+        }
+        selectCond = (AbstractFunctionCallExpression) condExpr;
+        boolean res = subTree.initFromSubTree(op1.getInputs().get(0));
+        return res && subTree.hasDataSourceScan();
+    }
+
+    @Override
+    public Map<FunctionIdentifier, List<IAccessMethod>> getAccessMethods() {
+        return accessMethods;
+    }
+
+    private void clear() {
+        selectRef = null;
+        select = null;
+        selectCond = null;
+    }
+}



[07/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java b/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java
new file mode 100644
index 0000000..8f555cc
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java
@@ -0,0 +1,761 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.file;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+import edu.uci.ics.asterix.common.api.ILocalResourceMetadata;
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
+import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.config.DatasetConfig.ExternalDatasetTransactionState;
+import edu.uci.ics.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
+import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
+import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
+import edu.uci.ics.asterix.common.context.AsterixVirtualBufferCacheProvider;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
+import edu.uci.ics.asterix.common.ioopcallbacks.LSMBTreeWithBuddyIOOperationCallbackFactory;
+import edu.uci.ics.asterix.common.ioopcallbacks.LSMRTreeIOOperationCallbackFactory;
+import edu.uci.ics.asterix.dataflow.data.nontagged.valueproviders.AqlPrimitiveValueProviderFactory;
+import edu.uci.ics.asterix.external.adapter.factory.HDFSAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.HDFSIndexingAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.HiveAdapterFactory;
+import edu.uci.ics.asterix.external.indexing.operators.ExternalDatasetIndexesAbortOperatorDescriptor;
+import edu.uci.ics.asterix.external.indexing.operators.ExternalDatasetIndexesCommitOperatorDescriptor;
+import edu.uci.ics.asterix.external.indexing.operators.ExternalDatasetIndexesRecoverOperatorDescriptor;
+import edu.uci.ics.asterix.external.indexing.operators.IndexInfoOperatorDescriptor;
+import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.formats.nontagged.AqlTypeTraitProvider;
+import edu.uci.ics.asterix.metadata.MetadataException;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.ExternalDatasetDetails;
+import edu.uci.ics.asterix.metadata.entities.ExternalFile;
+import edu.uci.ics.asterix.metadata.entities.Index;
+import edu.uci.ics.asterix.metadata.external.FilesIndexDescription;
+import edu.uci.ics.asterix.metadata.external.IndexingConstants;
+import edu.uci.ics.asterix.metadata.feeds.ExternalDataScanOperatorDescriptor;
+import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
+import edu.uci.ics.asterix.metadata.utils.ExternalDatasetsRegistry;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
+import edu.uci.ics.asterix.tools.external.data.ExternalFilesIndexOperatorDescriptor;
+import edu.uci.ics.asterix.transaction.management.opcallbacks.SecondaryIndexOperationTrackerProvider;
+import edu.uci.ics.asterix.transaction.management.resource.ExternalBTreeLocalResourceMetadata;
+import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceFactoryProvider;
+import edu.uci.ics.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
+import edu.uci.ics.asterix.translator.CompiledStatements.CompiledCreateIndexStatement;
+import edu.uci.ics.asterix.translator.CompiledStatements.CompiledIndexDropStatement;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.ConnectorPolicyAssignmentPolicy;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow.ExternalBTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow.ExternalBTreeWithBuddyDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexCompactOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.lsm.rtree.dataflow.ExternalRTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
+import edu.uci.ics.hyracks.storage.common.file.LocalResource;
+
+public class ExternalIndexingOperations {
+
+    public static final List<List<String>> FILE_INDEX_FIELD_NAMES = new ArrayList<List<String>>();
+    public static final ArrayList<IAType> FILE_INDEX_FIELD_TYPES = new ArrayList<IAType>();
+    static {
+        FILE_INDEX_FIELD_NAMES.add(new ArrayList<String>(Arrays.asList("")));
+        FILE_INDEX_FIELD_TYPES.add(BuiltinType.ASTRING);
+    }
+
+    public static boolean isIndexible(ExternalDatasetDetails ds) {
+        String adapter = ds.getAdapter();
+        if (adapter.equalsIgnoreCase("hdfs") || adapter.equalsIgnoreCase("hive")
+                || adapter.equalsIgnoreCase("edu.uci.ics.asterix.external.dataset.adapter.HDFSAdapter")
+                || adapter.equalsIgnoreCase("edu.uci.ics.asterix.external.dataset.adapter.HIVEAdapter")) {
+            return true;
+        }
+        return false;
+    }
+
+    public static boolean isRefereshActive(ExternalDatasetDetails ds) {
+        return ds.getState() != ExternalDatasetTransactionState.COMMIT;
+    }
+
+    public static boolean datasetUsesHiveAdapter(ExternalDatasetDetails ds) {
+        String adapter = ds.getAdapter();
+        return (adapter.equalsIgnoreCase("hive") || adapter
+                .equalsIgnoreCase("edu.uci.ics.asterix.external.dataset.adapter.HIVEAdapter"));
+    }
+
+    public static boolean isValidIndexName(String datasetName, String indexName) {
+        return (!datasetName.concat(IndexingConstants.EXTERNAL_FILE_INDEX_NAME_SUFFIX).equals(indexName));
+    }
+
+    public static String getFilesIndexName(String datasetName) {
+        return datasetName.concat(IndexingConstants.EXTERNAL_FILE_INDEX_NAME_SUFFIX);
+    }
+
+    public static int getRIDSize(Dataset dataset) {
+        ExternalDatasetDetails dsd = ((ExternalDatasetDetails) dataset.getDatasetDetails());
+        return IndexingConstants.getRIDSize(dsd.getProperties().get(IndexingConstants.KEY_INPUT_FORMAT));
+    }
+
+    public static IBinaryComparatorFactory[] getComparatorFactories(Dataset dataset) {
+        ExternalDatasetDetails dsd = ((ExternalDatasetDetails) dataset.getDatasetDetails());
+        return IndexingConstants.getComparatorFactories((dsd.getProperties().get(IndexingConstants.KEY_INPUT_FORMAT)));
+    }
+
+    public static IBinaryComparatorFactory[] getBuddyBtreeComparatorFactories() {
+        return IndexingConstants.getBuddyBtreeComparatorFactories();
+    }
+
+    public static ArrayList<ExternalFile> getSnapshotFromExternalFileSystem(Dataset dataset) throws AlgebricksException {
+        ArrayList<ExternalFile> files = new ArrayList<ExternalFile>();
+        ExternalDatasetDetails datasetDetails = (ExternalDatasetDetails) dataset.getDatasetDetails();
+        try {
+            // Create the file system object
+            FileSystem fs = getFileSystemObject(datasetDetails.getProperties());
+            // If dataset uses hive adapter, add path to the dataset properties
+            if (datasetUsesHiveAdapter(datasetDetails)) {
+                HiveAdapterFactory.populateConfiguration(datasetDetails.getProperties());
+            }
+            // Get paths of dataset
+            String path = datasetDetails.getProperties().get(HDFSAdapterFactory.KEY_PATH);
+            String[] paths = path.split(",");
+
+            // Add fileStatuses to files
+            for (String aPath : paths) {
+                FileStatus[] fileStatuses = fs.listStatus(new Path(aPath));
+                for (int i = 0; i < fileStatuses.length; i++) {
+                    int nextFileNumber = files.size();
+                    if (fileStatuses[i].isDirectory()) {
+                        listSubFiles(dataset, fs, fileStatuses[i], files);
+                    } else {
+                        files.add(new ExternalFile(dataset.getDataverseName(), dataset.getDatasetName(),
+                                nextFileNumber, fileStatuses[i].getPath().toUri().getPath(), new Date(fileStatuses[i]
+                                        .getModificationTime()), fileStatuses[i].getLen(),
+                                ExternalFilePendingOp.PENDING_NO_OP));
+                    }
+                }
+            }
+            // Close file system
+            fs.close();
+            if (files.size() == 0) {
+                throw new AlgebricksException("File Snapshot retrieved from external file system is empty");
+            }
+            return files;
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new AlgebricksException("Unable to get list of HDFS files " + e);
+        }
+    }
+
+    /* list all files under the directory
+     * src is expected to be a folder
+     */
+    private static void listSubFiles(Dataset dataset, FileSystem srcFs, FileStatus src, ArrayList<ExternalFile> files)
+            throws IOException {
+        Path path = src.getPath();
+        FileStatus[] fileStatuses = srcFs.listStatus(path);
+        for (int i = 0; i < fileStatuses.length; i++) {
+            int nextFileNumber = files.size();
+            if (fileStatuses[i].isDirectory()) {
+                listSubFiles(dataset, srcFs, fileStatuses[i], files);
+            } else {
+                files.add(new ExternalFile(dataset.getDataverseName(), dataset.getDatasetName(), nextFileNumber,
+                        fileStatuses[i].getPath().toUri().getPath(), new Date(fileStatuses[i].getModificationTime()),
+                        fileStatuses[i].getLen(), ExternalFilePendingOp.PENDING_NO_OP));
+            }
+        }
+    }
+
+    public static FileSystem getFileSystemObject(Map<String, String> map) throws IOException {
+        Configuration conf = new Configuration();
+        conf.set("fs.default.name", map.get(HDFSAdapterFactory.KEY_HDFS_URL).trim());
+        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
+        return FileSystem.get(conf);
+    }
+
+    public static JobSpecification buildFilesIndexReplicationJobSpec(Dataset dataset,
+            ArrayList<ExternalFile> externalFilesSnapshot, AqlMetadataProvider metadataProvider, boolean createIndex)
+            throws MetadataException, AlgebricksException {
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
+        AsterixStorageProperties storageProperties = asterixPropertiesProvider.getStorageProperties();
+        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
+                metadataProvider.getMetadataTxnContext());
+        ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
+        Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider
+                .splitProviderAndPartitionConstraintsForFilesIndex(dataset.getDataverseName(),
+                        dataset.getDatasetName(), getFilesIndexName(dataset.getDatasetName()), true);
+        IFileSplitProvider secondaryFileSplitProvider = secondarySplitsAndConstraint.first;
+        ILocalResourceMetadata localResourceMetadata = new ExternalBTreeLocalResourceMetadata(
+                FilesIndexDescription.EXTERNAL_FILE_INDEX_TYPE_TRAITS,
+                FilesIndexDescription.FILES_INDEX_COMP_FACTORIES, new int[] { 0 }, false, dataset.getDatasetId(),
+                mergePolicyFactory, mergePolicyFactoryProperties);
+        PersistentLocalResourceFactoryProvider localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(
+                localResourceMetadata, LocalResource.ExternalBTreeResource);
+        ExternalBTreeDataflowHelperFactory indexDataflowHelperFactory = new ExternalBTreeDataflowHelperFactory(
+                mergePolicyFactory, mergePolicyFactoryProperties, new SecondaryIndexOperationTrackerProvider(
+                        dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                LSMBTreeIOOperationCallbackFactory.INSTANCE, storageProperties.getBloomFilterFalsePositiveRate(),
+                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true);
+        ExternalFilesIndexOperatorDescriptor externalFilesOp = new ExternalFilesIndexOperatorDescriptor(spec,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                secondaryFileSplitProvider, indexDataflowHelperFactory, localResourceFactoryProvider,
+                externalFilesSnapshot, createIndex);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, externalFilesOp,
+                secondarySplitsAndConstraint.second);
+        spec.addRoot(externalFilesOp);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        return spec;
+    }
+
+    /**
+     * This method create an indexing operator that index records in HDFS
+     * 
+     * @param jobSpec
+     * @param itemType
+     * @param dataset
+     * @param format
+     * @param files
+     * @param indexerDesc
+     * @return
+     * @throws Exception
+     */
+    private static Pair<ExternalDataScanOperatorDescriptor, AlgebricksPartitionConstraint> getExternalDataIndexingOperator(
+            JobSpecification jobSpec, IAType itemType, Dataset dataset, List<ExternalFile> files,
+            RecordDescriptor indexerDesc, AqlMetadataProvider metadataProvider) throws Exception {
+        HDFSIndexingAdapterFactory adapterFactory = new HDFSIndexingAdapterFactory();
+        adapterFactory.setFiles(files);
+        adapterFactory.configure(((ExternalDatasetDetails) dataset.getDatasetDetails()).getProperties(),
+                (ARecordType) itemType);
+        return new Pair<ExternalDataScanOperatorDescriptor, AlgebricksPartitionConstraint>(
+                new ExternalDataScanOperatorDescriptor(jobSpec, indexerDesc, adapterFactory),
+                adapterFactory.getPartitionConstraint());
+    }
+
+    public static Pair<ExternalDataScanOperatorDescriptor, AlgebricksPartitionConstraint> createExternalIndexingOp(
+            JobSpecification spec, AqlMetadataProvider metadataProvider, Dataset dataset, ARecordType itemType,
+            RecordDescriptor indexerDesc, List<ExternalFile> files) throws Exception {
+        if (files == null) {
+            files = MetadataManager.INSTANCE.getDatasetExternalFiles(metadataProvider.getMetadataTxnContext(), dataset);
+        }
+        return getExternalDataIndexingOperator(spec, itemType, dataset, files, indexerDesc, metadataProvider);
+    }
+
+    /**
+     * At the end of this method, we expect to have 4 sets as follows:
+     * metadataFiles should contain only the files that are appended in their original state
+     * addedFiles should contain new files that has number assigned starting after the max original file number
+     * deleteedFiles should contain files that are no longer there in the file system
+     * appendedFiles should have the new file information of existing files
+     * The method should return false in case of zero delta
+     * 
+     * @param dataset
+     * @param metadataFiles
+     * @param addedFiles
+     * @param deletedFiles
+     * @param appendedFiles
+     * @return
+     * @throws MetadataException
+     * @throws AlgebricksException
+     */
+    public static boolean isDatasetUptodate(Dataset dataset, List<ExternalFile> metadataFiles,
+            List<ExternalFile> addedFiles, List<ExternalFile> deletedFiles, List<ExternalFile> appendedFiles)
+            throws MetadataException, AlgebricksException {
+        boolean uptodate = true;
+        int newFileNumber = metadataFiles.get(metadataFiles.size() - 1).getFileNumber() + 1;
+
+        ArrayList<ExternalFile> fileSystemFiles = getSnapshotFromExternalFileSystem(dataset);
+
+        // Loop over file system files < taking care of added files >
+        for (ExternalFile fileSystemFile : fileSystemFiles) {
+            boolean fileFound = false;
+            Iterator<ExternalFile> mdFilesIterator = metadataFiles.iterator();
+            while (mdFilesIterator.hasNext()) {
+                ExternalFile metadataFile = mdFilesIterator.next();
+                if (fileSystemFile.getFileName().equals(metadataFile.getFileName())) {
+                    // Same file name
+                    if (fileSystemFile.getLastModefiedTime().equals(metadataFile.getLastModefiedTime())) {
+                        // Same timestamp
+                        if (fileSystemFile.getSize() == metadataFile.getSize()) {
+                            // Same size -> no op
+                            mdFilesIterator.remove();
+                            fileFound = true;
+                        } else {
+                            // Different size -> append op
+                            metadataFile.setPendingOp(ExternalFilePendingOp.PENDING_APPEND_OP);
+                            fileSystemFile.setPendingOp(ExternalFilePendingOp.PENDING_APPEND_OP);
+                            appendedFiles.add(fileSystemFile);
+                            fileFound = true;
+                            uptodate = false;
+                        }
+                    } else {
+                        // Same file name, Different file mod date -> delete and add
+                        metadataFile.setPendingOp(ExternalFilePendingOp.PENDING_DROP_OP);
+                        deletedFiles.add(new ExternalFile(metadataFile.getDataverseName(), metadataFile
+                                .getDatasetName(), 0, metadataFile.getFileName(), metadataFile.getLastModefiedTime(),
+                                metadataFile.getSize(), ExternalFilePendingOp.PENDING_DROP_OP));
+                        fileSystemFile.setPendingOp(ExternalFilePendingOp.PENDING_ADD_OP);
+                        fileSystemFile.setFileNumber(newFileNumber);
+                        addedFiles.add(fileSystemFile);
+                        newFileNumber++;
+                        fileFound = true;
+                        uptodate = false;
+                    }
+                }
+                if (fileFound)
+                    break;
+            }
+            if (!fileFound) {
+                // File not stored previously in metadata -> pending add op
+                fileSystemFile.setPendingOp(ExternalFilePendingOp.PENDING_ADD_OP);
+                fileSystemFile.setFileNumber(newFileNumber);
+                addedFiles.add(fileSystemFile);
+                newFileNumber++;
+                uptodate = false;
+            }
+        }
+
+        // Done with files from external file system -> metadata files now contain both deleted files and appended ones
+        // first, correct number assignment to deleted and updated files
+        for (ExternalFile deletedFile : deletedFiles) {
+            deletedFile.setFileNumber(newFileNumber);
+            newFileNumber++;
+        }
+        for (ExternalFile appendedFile : appendedFiles) {
+            appendedFile.setFileNumber(newFileNumber);
+            newFileNumber++;
+        }
+
+        // include the remaining deleted files
+        Iterator<ExternalFile> mdFilesIterator = metadataFiles.iterator();
+        while (mdFilesIterator.hasNext()) {
+            ExternalFile metadataFile = mdFilesIterator.next();
+            if (metadataFile.getPendingOp() == ExternalFilePendingOp.PENDING_NO_OP) {
+                metadataFile.setPendingOp(ExternalFilePendingOp.PENDING_DROP_OP);
+                deletedFiles.add(new ExternalFile(metadataFile.getDataverseName(), metadataFile.getDatasetName(),
+                        newFileNumber, metadataFile.getFileName(), metadataFile.getLastModefiedTime(), metadataFile
+                                .getSize(), metadataFile.getPendingOp()));
+                newFileNumber++;
+                uptodate = false;
+            }
+        }
+        return uptodate;
+    }
+
+    public static Dataset createTransactionDataset(Dataset dataset) {
+        ExternalDatasetDetails originalDsd = (ExternalDatasetDetails) dataset.getDatasetDetails();
+        ExternalDatasetDetails dsd = new ExternalDatasetDetails(originalDsd.getAdapter(), originalDsd.getProperties(),
+                originalDsd.getTimestamp(), ExternalDatasetTransactionState.BEGIN);
+        Dataset transactionDatset = new Dataset(dataset.getDataverseName(), dataset.getDatasetName(),
+                dataset.getItemTypeName(), dataset.getNodeGroupName(), dataset.getCompactionPolicy(),
+                dataset.getCompactionPolicyProperties(), dsd, dataset.getHints(), DatasetType.EXTERNAL,
+                dataset.getDatasetId(), dataset.getPendingOp());
+        return transactionDatset;
+    }
+
+    public static boolean isFileIndex(Index index) {
+        return (index.getIndexName().equals(getFilesIndexName(index.getDatasetName())));
+    }
+
+    public static JobSpecification buildDropFilesIndexJobSpec(CompiledIndexDropStatement indexDropStmt,
+            AqlMetadataProvider metadataProvider, Dataset dataset) throws AlgebricksException, MetadataException {
+        String dataverseName = indexDropStmt.getDataverseName() == null ? metadataProvider.getDefaultDataverseName()
+                : indexDropStmt.getDataverseName();
+        String datasetName = indexDropStmt.getDatasetName();
+        String indexName = indexDropStmt.getIndexName();
+        boolean temp = dataset.getDatasetDetails().isTemp();
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider
+                .splitProviderAndPartitionConstraintsForFilesIndex(dataverseName, datasetName, indexName, true);
+        AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
+        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
+                metadataProvider.getMetadataTxnContext());
+        IndexDropOperatorDescriptor btreeDrop = new IndexDropOperatorDescriptor(spec,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                splitsAndConstraint.first, new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
+                        dataset.getDatasetId()), compactionInfo.first, compactionInfo.second,
+                        new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                        storageProperties.getBloomFilterFalsePositiveRate(), false, null, null, null, null, !temp));
+        AlgebricksPartitionConstraintHelper
+                .setPartitionConstraintInJobSpec(spec, btreeDrop, splitsAndConstraint.second);
+        spec.addRoot(btreeDrop);
+
+        return spec;
+    }
+
+    public static JobSpecification buildFilesIndexUpdateOp(Dataset ds, List<ExternalFile> metadataFiles,
+            List<ExternalFile> deletedFiles, List<ExternalFile> addedFiles, List<ExternalFile> appendedFiles,
+            AqlMetadataProvider metadataProvider) throws MetadataException, AlgebricksException {
+        ArrayList<ExternalFile> files = new ArrayList<ExternalFile>();
+        for (ExternalFile file : metadataFiles) {
+            if (file.getPendingOp() == ExternalFilePendingOp.PENDING_DROP_OP)
+                files.add(file);
+            else if (file.getPendingOp() == ExternalFilePendingOp.PENDING_APPEND_OP) {
+                for (ExternalFile appendedFile : appendedFiles) {
+                    if (appendedFile.getFileName().equals(file.getFileName())) {
+                        files.add(new ExternalFile(file.getDataverseName(), file.getDatasetName(),
+                                file.getFileNumber(), file.getFileName(), file.getLastModefiedTime(), appendedFile
+                                        .getSize(), ExternalFilePendingOp.PENDING_NO_OP));
+                    }
+                }
+            }
+        }
+        for (ExternalFile file : addedFiles) {
+            files.add(file);
+        }
+        Collections.sort(files);
+        return buildFilesIndexReplicationJobSpec(ds, files, metadataProvider, false);
+    }
+
+    public static JobSpecification buildIndexUpdateOp(Dataset ds, Index index, List<ExternalFile> metadataFiles,
+            List<ExternalFile> deletedFiles, List<ExternalFile> addedFiles, List<ExternalFile> appendedFiles,
+            AqlMetadataProvider metadataProvider) throws AsterixException, AlgebricksException {
+        // Create files list
+        ArrayList<ExternalFile> files = new ArrayList<ExternalFile>();
+
+        for (ExternalFile metadataFile : metadataFiles) {
+            if (metadataFile.getPendingOp() != ExternalFilePendingOp.PENDING_APPEND_OP) {
+                files.add(metadataFile);
+            } else {
+                metadataFile.setPendingOp(ExternalFilePendingOp.PENDING_NO_OP);
+                files.add(metadataFile);
+            }
+        }
+        // add new files
+        for (ExternalFile file : addedFiles) {
+            files.add(file);
+        }
+        // add appended files
+        for (ExternalFile file : appendedFiles) {
+            files.add(file);
+        }
+
+        CompiledCreateIndexStatement ccis = new CompiledCreateIndexStatement(index.getIndexName(),
+                index.getDataverseName(), index.getDatasetName(), index.getKeyFieldNames(), index.getKeyFieldTypes(),
+                index.isEnforcingKeyFileds(), index.getGramLength(), index.getIndexType());
+        return IndexOperations.buildSecondaryIndexLoadingJobSpec(ccis, null, null, metadataProvider, files);
+    }
+
+    public static JobSpecification buildCommitJob(Dataset ds, List<Index> indexes, AqlMetadataProvider metadataProvider)
+            throws AlgebricksException, AsterixException {
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
+        AsterixStorageProperties storageProperties = asterixPropertiesProvider.getStorageProperties();
+        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(ds,
+                metadataProvider.getMetadataTxnContext());
+        boolean temp = ds.getDatasetDetails().isTemp();
+        ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
+        Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint = metadataProvider
+                .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
+                        getFilesIndexName(ds.getDatasetName()), temp);
+        IFileSplitProvider filesIndexSplitProvider = filesIndexSplitsAndConstraint.first;
+        ExternalBTreeDataflowHelperFactory filesIndexDataflowHelperFactory = getFilesIndexDataflowHelperFactory(ds,
+                mergePolicyFactory, mergePolicyFactoryProperties, storageProperties, spec);
+        IndexInfoOperatorDescriptor filesIndexInfo = new IndexInfoOperatorDescriptor(filesIndexSplitProvider,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER);
+
+        ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory> btreeDataflowHelperFactories = new ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory>();
+        ArrayList<IndexInfoOperatorDescriptor> btreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
+        ArrayList<ExternalRTreeDataflowHelperFactory> rtreeDataflowHelperFactories = new ArrayList<ExternalRTreeDataflowHelperFactory>();
+        ArrayList<IndexInfoOperatorDescriptor> rtreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
+
+        for (Index index : indexes) {
+            if (isValidIndexName(index.getDatasetName(), index.getIndexName())) {
+                Pair<IFileSplitProvider, AlgebricksPartitionConstraint> indexSplitsAndConstraint = metadataProvider
+                        .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
+                                index.getIndexName(), temp);
+                if (index.getIndexType() == IndexType.BTREE) {
+                    btreeDataflowHelperFactories.add(getBTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
+                            mergePolicyFactoryProperties, storageProperties, spec));
+                    btreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
+                } else if (index.getIndexType() == IndexType.RTREE) {
+                    rtreeDataflowHelperFactories.add(getRTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
+                            mergePolicyFactoryProperties, storageProperties, metadataProvider, spec));
+                    rtreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
+                }
+            }
+        }
+
+        ExternalDatasetIndexesCommitOperatorDescriptor op = new ExternalDatasetIndexesCommitOperatorDescriptor(spec,
+                filesIndexDataflowHelperFactory, filesIndexInfo, btreeDataflowHelperFactories, btreeInfos,
+                rtreeDataflowHelperFactories, rtreeInfos);
+
+        spec.addRoot(op);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op,
+                filesIndexSplitsAndConstraint.second);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        return spec;
+    }
+
+    private static ExternalBTreeDataflowHelperFactory getFilesIndexDataflowHelperFactory(Dataset ds,
+            ILSMMergePolicyFactory mergePolicyFactory, Map<String, String> mergePolicyFactoryProperties,
+            AsterixStorageProperties storageProperties, JobSpecification spec) {
+        return new ExternalBTreeDataflowHelperFactory(mergePolicyFactory, mergePolicyFactoryProperties,
+                new SecondaryIndexOperationTrackerProvider(ds.getDatasetId()),
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                storageProperties.getBloomFilterFalsePositiveRate(),
+                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(ds), true);
+    }
+
+    private static ExternalBTreeWithBuddyDataflowHelperFactory getBTreeDataflowHelperFactory(Dataset ds, Index index,
+            ILSMMergePolicyFactory mergePolicyFactory, Map<String, String> mergePolicyFactoryProperties,
+            AsterixStorageProperties storageProperties, JobSpecification spec) {
+        return new ExternalBTreeWithBuddyDataflowHelperFactory(mergePolicyFactory, mergePolicyFactoryProperties,
+                new SecondaryIndexOperationTrackerProvider(ds.getDatasetId()),
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                LSMBTreeWithBuddyIOOperationCallbackFactory.INSTANCE,
+                storageProperties.getBloomFilterFalsePositiveRate(), new int[] { index.getKeyFieldNames().size() },
+                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(ds), true);
+    }
+
+    @SuppressWarnings("rawtypes")
+    private static ExternalRTreeDataflowHelperFactory getRTreeDataflowHelperFactory(Dataset ds, Index index,
+            ILSMMergePolicyFactory mergePolicyFactory, Map<String, String> mergePolicyFactoryProperties,
+            AsterixStorageProperties storageProperties, AqlMetadataProvider metadataProvider, JobSpecification spec)
+            throws AlgebricksException, AsterixException {
+        int numPrimaryKeys = getRIDSize(ds);
+        List<List<String>> secondaryKeyFields = index.getKeyFieldNames();
+        secondaryKeyFields.size();
+        ARecordType itemType = (ARecordType) metadataProvider.findType(ds.getDataverseName(), ds.getItemTypeName());
+        Pair<IAType, Boolean> spatialTypePair = Index.getNonNullableKeyFieldType(secondaryKeyFields.get(0), itemType);
+        IAType spatialType = spatialTypePair.first;
+        if (spatialType == null) {
+            throw new AsterixException("Could not find field " + secondaryKeyFields.get(0) + " in the schema.");
+        }
+        int numDimensions = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
+        int numNestedSecondaryKeyFields = numDimensions * 2;
+        IPrimitiveValueProviderFactory[] valueProviderFactories = new IPrimitiveValueProviderFactory[numNestedSecondaryKeyFields];
+        IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[numNestedSecondaryKeyFields];
+
+        ISerializerDeserializer[] secondaryRecFields = new ISerializerDeserializer[numPrimaryKeys
+                + numNestedSecondaryKeyFields];
+        ITypeTraits[] secondaryTypeTraits = new ITypeTraits[numNestedSecondaryKeyFields + numPrimaryKeys];
+        IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(spatialType.getTypeTag());
+        ATypeTag keyType = nestedKeyType.getTypeTag();
+
+        keyType = nestedKeyType.getTypeTag();
+        for (int i = 0; i < numNestedSecondaryKeyFields; i++) {
+            ISerializerDeserializer keySerde = AqlSerializerDeserializerProvider.INSTANCE
+                    .getSerializerDeserializer(nestedKeyType);
+            secondaryRecFields[i] = keySerde;
+
+            secondaryComparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(
+                    nestedKeyType, true);
+            secondaryTypeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(nestedKeyType);
+            valueProviderFactories[i] = AqlPrimitiveValueProviderFactory.INSTANCE;
+        }
+        // Add serializers and comparators for primary index fields.
+        for (int i = 0; i < numPrimaryKeys; i++) {
+            secondaryRecFields[numNestedSecondaryKeyFields + i] = IndexingConstants.getSerializerDeserializer(i);
+            secondaryTypeTraits[numNestedSecondaryKeyFields + i] = IndexingConstants.getTypeTraits(i);
+        }
+        int[] primaryKeyFields = new int[numPrimaryKeys];
+        for (int i = 0; i < primaryKeyFields.length; i++) {
+            primaryKeyFields[i] = i + numNestedSecondaryKeyFields;
+        }
+
+        return new ExternalRTreeDataflowHelperFactory(valueProviderFactories, RTreePolicyType.RTREE,
+                getBuddyBtreeComparatorFactories(), mergePolicyFactory, mergePolicyFactoryProperties,
+                new SecondaryIndexOperationTrackerProvider(ds.getDatasetId()),
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMRTreeIOOperationCallbackFactory.INSTANCE,
+                AqlMetadataProvider.proposeLinearizer(keyType, secondaryComparatorFactories.length),
+                storageProperties.getBloomFilterFalsePositiveRate(), new int[] { index.getKeyFieldNames().size() },
+                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(ds), true);
+    }
+
+    public static JobSpecification buildAbortOp(Dataset ds, List<Index> indexes, AqlMetadataProvider metadataProvider)
+            throws AlgebricksException, AsterixException {
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
+        AsterixStorageProperties storageProperties = asterixPropertiesProvider.getStorageProperties();
+        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(ds,
+                metadataProvider.getMetadataTxnContext());
+        ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
+        Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
+
+        boolean temp = ds.getDatasetDetails().isTemp();
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint = metadataProvider
+                .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
+                        getFilesIndexName(ds.getDatasetName()), temp);
+        IFileSplitProvider filesIndexSplitProvider = filesIndexSplitsAndConstraint.first;
+        ExternalBTreeDataflowHelperFactory filesIndexDataflowHelperFactory = getFilesIndexDataflowHelperFactory(ds,
+                mergePolicyFactory, mergePolicyFactoryProperties, storageProperties, spec);
+        IndexInfoOperatorDescriptor filesIndexInfo = new IndexInfoOperatorDescriptor(filesIndexSplitProvider,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER);
+
+        ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory> btreeDataflowHelperFactories = new ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory>();
+        ArrayList<IndexInfoOperatorDescriptor> btreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
+        ArrayList<ExternalRTreeDataflowHelperFactory> rtreeDataflowHelperFactories = new ArrayList<ExternalRTreeDataflowHelperFactory>();
+        ArrayList<IndexInfoOperatorDescriptor> rtreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
+
+        for (Index index : indexes) {
+            if (isValidIndexName(index.getDatasetName(), index.getIndexName())) {
+                Pair<IFileSplitProvider, AlgebricksPartitionConstraint> indexSplitsAndConstraint = metadataProvider
+                        .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
+                                index.getIndexName(), temp);
+                if (index.getIndexType() == IndexType.BTREE) {
+                    btreeDataflowHelperFactories.add(getBTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
+                            mergePolicyFactoryProperties, storageProperties, spec));
+                    btreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
+                } else if (index.getIndexType() == IndexType.RTREE) {
+                    rtreeDataflowHelperFactories.add(getRTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
+                            mergePolicyFactoryProperties, storageProperties, metadataProvider, spec));
+                    rtreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
+                }
+            }
+        }
+
+        ExternalDatasetIndexesAbortOperatorDescriptor op = new ExternalDatasetIndexesAbortOperatorDescriptor(spec,
+                filesIndexDataflowHelperFactory, filesIndexInfo, btreeDataflowHelperFactories, btreeInfos,
+                rtreeDataflowHelperFactories, rtreeInfos);
+
+        spec.addRoot(op);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op,
+                filesIndexSplitsAndConstraint.second);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        return spec;
+
+    }
+
+    public static JobSpecification buildRecoverOp(Dataset ds, List<Index> indexes, AqlMetadataProvider metadataProvider)
+            throws AlgebricksException, AsterixException {
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
+        AsterixStorageProperties storageProperties = asterixPropertiesProvider.getStorageProperties();
+        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(ds,
+                metadataProvider.getMetadataTxnContext());
+        ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
+        Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
+        boolean temp = ds.getDatasetDetails().isTemp();
+
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint = metadataProvider
+                .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
+                        getFilesIndexName(ds.getDatasetName()), temp);
+        IFileSplitProvider filesIndexSplitProvider = filesIndexSplitsAndConstraint.first;
+        ExternalBTreeDataflowHelperFactory filesIndexDataflowHelperFactory = getFilesIndexDataflowHelperFactory(ds,
+                mergePolicyFactory, mergePolicyFactoryProperties, storageProperties, spec);
+        IndexInfoOperatorDescriptor filesIndexInfo = new IndexInfoOperatorDescriptor(filesIndexSplitProvider,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER);
+
+        ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory> btreeDataflowHelperFactories = new ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory>();
+        ArrayList<IndexInfoOperatorDescriptor> btreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
+        ArrayList<ExternalRTreeDataflowHelperFactory> rtreeDataflowHelperFactories = new ArrayList<ExternalRTreeDataflowHelperFactory>();
+        ArrayList<IndexInfoOperatorDescriptor> rtreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
+
+        for (Index index : indexes) {
+            if (isValidIndexName(index.getDatasetName(), index.getIndexName())) {
+                Pair<IFileSplitProvider, AlgebricksPartitionConstraint> indexSplitsAndConstraint = metadataProvider
+                        .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
+                                index.getIndexName(), temp);
+                if (index.getIndexType() == IndexType.BTREE) {
+                    btreeDataflowHelperFactories.add(getBTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
+                            mergePolicyFactoryProperties, storageProperties, spec));
+                    btreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
+                } else if (index.getIndexType() == IndexType.RTREE) {
+                    rtreeDataflowHelperFactories.add(getRTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
+                            mergePolicyFactoryProperties, storageProperties, metadataProvider, spec));
+                    rtreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
+                }
+            }
+        }
+
+        ExternalDatasetIndexesRecoverOperatorDescriptor op = new ExternalDatasetIndexesRecoverOperatorDescriptor(spec,
+                filesIndexDataflowHelperFactory, filesIndexInfo, btreeDataflowHelperFactories, btreeInfos,
+                rtreeDataflowHelperFactories, rtreeInfos);
+
+        spec.addRoot(op);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op,
+                filesIndexSplitsAndConstraint.second);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        return spec;
+    }
+
+    public static JobSpecification compactFilesIndexJobSpec(Dataset dataset, AqlMetadataProvider metadataProvider)
+            throws MetadataException, AlgebricksException {
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
+        AsterixStorageProperties storageProperties = asterixPropertiesProvider.getStorageProperties();
+        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
+                metadataProvider.getMetadataTxnContext());
+        ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
+        Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider
+                .splitProviderAndPartitionConstraintsForFilesIndex(dataset.getDataverseName(),
+                        dataset.getDatasetName(), getFilesIndexName(dataset.getDatasetName()), true);
+        IFileSplitProvider secondaryFileSplitProvider = secondarySplitsAndConstraint.first;
+        ExternalBTreeDataflowHelperFactory indexDataflowHelperFactory = new ExternalBTreeDataflowHelperFactory(
+                mergePolicyFactory, mergePolicyFactoryProperties, new SecondaryIndexOperationTrackerProvider(
+                        dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                LSMBTreeIOOperationCallbackFactory.INSTANCE, storageProperties.getBloomFilterFalsePositiveRate(),
+                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true);
+        LSMTreeIndexCompactOperatorDescriptor compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                secondaryFileSplitProvider, FilesIndexDescription.EXTERNAL_FILE_INDEX_TYPE_TRAITS,
+                FilesIndexDescription.FILES_INDEX_COMP_FACTORIES, new int[] { 0 }, indexDataflowHelperFactory,
+                NoOpOperationCallbackFactory.INSTANCE);
+        spec.addRoot(compactOp);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp,
+                secondarySplitsAndConstraint.second);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        return spec;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/file/FeedOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/FeedOperations.java b/asterix-app/src/main/java/org/apache/asterix/file/FeedOperations.java
new file mode 100644
index 0000000..379a4f0
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/file/FeedOperations.java
@@ -0,0 +1,251 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.file;
+
+import java.util.Collection;
+import java.util.List;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.feeds.FeedConnectJobInfo;
+import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
+import edu.uci.ics.asterix.common.feeds.FeedConstants;
+import edu.uci.ics.asterix.common.feeds.FeedId;
+import edu.uci.ics.asterix.common.feeds.FeedPolicyAccessor;
+import edu.uci.ics.asterix.common.feeds.FeedTupleCommitResponseMessage;
+import edu.uci.ics.asterix.common.feeds.api.IFeedJoint;
+import edu.uci.ics.asterix.common.feeds.api.IFeedMessage;
+import edu.uci.ics.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
+import edu.uci.ics.asterix.common.feeds.message.EndFeedMessage;
+import edu.uci.ics.asterix.common.feeds.message.ThrottlingEnabledFeedMessage;
+import edu.uci.ics.asterix.feeds.FeedLifecycleListener;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.PrimaryFeed;
+import edu.uci.ics.asterix.metadata.feeds.FeedMessageOperatorDescriptor;
+import edu.uci.ics.asterix.metadata.feeds.IFeedAdapterFactory;
+import edu.uci.ics.asterix.metadata.feeds.PrepareStallMessage;
+import edu.uci.ics.asterix.metadata.feeds.TerminateDataFlowMessage;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
+
+/**
+ * Provides helper method(s) for creating JobSpec for operations on a feed.
+ */
+public class FeedOperations {
+
+    /**
+     * Builds the job spec for ingesting a (primary) feed from its external source via the feed adaptor.
+     * 
+     * @param primaryFeed
+     * @param metadataProvider
+     * @return JobSpecification the Hyracks job specification for receiving data from external source
+     * @throws Exception
+     */
+    public static Pair<JobSpecification, IFeedAdapterFactory> buildFeedIntakeJobSpec(PrimaryFeed primaryFeed,
+            AqlMetadataProvider metadataProvider, FeedPolicyAccessor policyAccessor) throws Exception {
+
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        spec.setFrameSize(FeedConstants.JobConstants.DEFAULT_FRAME_SIZE);
+        IFeedAdapterFactory adapterFactory = null;
+        IOperatorDescriptor feedIngestor;
+        AlgebricksPartitionConstraint ingesterPc;
+
+        try {
+            Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, IFeedAdapterFactory> t = metadataProvider
+                    .buildFeedIntakeRuntime(spec, primaryFeed, policyAccessor);
+            feedIngestor = t.first;
+            ingesterPc = t.second;
+            adapterFactory = t.third;
+        } catch (AlgebricksException e) {
+            e.printStackTrace();
+            throw new AsterixException(e);
+        }
+
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, feedIngestor, ingesterPc);
+
+        NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, nullSink, ingesterPc);
+        spec.connect(new OneToOneConnectorDescriptor(spec), feedIngestor, 0, nullSink, 0);
+        spec.addRoot(nullSink);
+        return new Pair<JobSpecification, IFeedAdapterFactory>(spec, adapterFactory);
+    }
+
+    public static JobSpecification buildDiscontinueFeedSourceSpec(AqlMetadataProvider metadataProvider, FeedId feedId)
+            throws AsterixException, AlgebricksException {
+
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        IOperatorDescriptor feedMessenger = null;
+        AlgebricksPartitionConstraint messengerPc = null;
+
+        List<String> locations = FeedLifecycleListener.INSTANCE.getIntakeLocations(feedId);
+        Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = buildDiscontinueFeedMessengerRuntime(spec, feedId,
+                locations);
+
+        feedMessenger = p.first;
+        messengerPc = p.second;
+
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, feedMessenger, messengerPc);
+        NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, nullSink, messengerPc);
+        spec.connect(new OneToOneConnectorDescriptor(spec), feedMessenger, 0, nullSink, 0);
+        spec.addRoot(nullSink);
+
+        return spec;
+    }
+
+    /**
+     * Builds the job spec for sending message to an active feed to disconnect it from the
+     * its source.
+     */
+    public static Pair<JobSpecification, Boolean> buildDisconnectFeedJobSpec(AqlMetadataProvider metadataProvider,
+            FeedConnectionId connectionId) throws AsterixException, AlgebricksException {
+
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        IOperatorDescriptor feedMessenger;
+        AlgebricksPartitionConstraint messengerPc;
+        List<String> locations = null;
+        FeedRuntimeType sourceRuntimeType;
+        try {
+            FeedConnectJobInfo cInfo = FeedLifecycleListener.INSTANCE.getFeedConnectJobInfo(connectionId);
+            IFeedJoint sourceFeedJoint = cInfo.getSourceFeedJoint();
+            IFeedJoint computeFeedJoint = cInfo.getComputeFeedJoint();
+
+            boolean terminateIntakeJob = false;
+            boolean completeDisconnect = computeFeedJoint == null || computeFeedJoint.getReceivers().isEmpty();
+            if (completeDisconnect) {
+                sourceRuntimeType = FeedRuntimeType.INTAKE;
+                locations = cInfo.getCollectLocations();
+                terminateIntakeJob = sourceFeedJoint.getReceivers().size() == 1;
+            } else {
+                locations = cInfo.getComputeLocations();
+                sourceRuntimeType = FeedRuntimeType.COMPUTE;
+            }
+
+            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = buildDisconnectFeedMessengerRuntime(spec,
+                    connectionId, locations, sourceRuntimeType, completeDisconnect, sourceFeedJoint.getOwnerFeedId());
+
+            feedMessenger = p.first;
+            messengerPc = p.second;
+
+            AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, feedMessenger, messengerPc);
+            NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
+            AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, nullSink, messengerPc);
+            spec.connect(new OneToOneConnectorDescriptor(spec), feedMessenger, 0, nullSink, 0);
+            spec.addRoot(nullSink);
+            return new Pair<JobSpecification, Boolean>(spec, terminateIntakeJob);
+
+        } catch (AlgebricksException e) {
+            throw new AsterixException(e);
+        }
+
+    }
+
+    public static JobSpecification buildPrepareStallMessageJob(PrepareStallMessage stallMessage,
+            Collection<String> collectLocations) throws AsterixException {
+        JobSpecification messageJobSpec = JobSpecificationUtils.createJobSpecification();
+        try {
+            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = FeedOperations.buildSendFeedMessageRuntime(
+                    messageJobSpec, stallMessage.getConnectionId(), stallMessage, collectLocations);
+            buildSendFeedMessageJobSpec(p.first, p.second, messageJobSpec);
+        } catch (AlgebricksException ae) {
+            throw new AsterixException(ae);
+        }
+        return messageJobSpec;
+    }
+
+    public static JobSpecification buildNotifyThrottlingEnabledMessageJob(
+            ThrottlingEnabledFeedMessage throttlingEnabledMesg, Collection<String> locations) throws AsterixException {
+        JobSpecification messageJobSpec = JobSpecificationUtils.createJobSpecification();
+        try {
+            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = FeedOperations.buildSendFeedMessageRuntime(
+                    messageJobSpec, throttlingEnabledMesg.getConnectionId(), throttlingEnabledMesg, locations);
+            buildSendFeedMessageJobSpec(p.first, p.second, messageJobSpec);
+        } catch (AlgebricksException ae) {
+            throw new AsterixException(ae);
+        }
+        return messageJobSpec;
+    }
+
+    public static JobSpecification buildTerminateFlowMessageJob(TerminateDataFlowMessage terminateMessage,
+            List<String> collectLocations) throws AsterixException {
+        JobSpecification messageJobSpec = JobSpecificationUtils.createJobSpecification();
+        try {
+            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = FeedOperations.buildSendFeedMessageRuntime(
+                    messageJobSpec, terminateMessage.getConnectionId(), terminateMessage, collectLocations);
+            buildSendFeedMessageJobSpec(p.first, p.second, messageJobSpec);
+        } catch (AlgebricksException ae) {
+            throw new AsterixException(ae);
+        }
+        return messageJobSpec;
+    }
+
+    public static JobSpecification buildCommitAckResponseJob(FeedTupleCommitResponseMessage commitResponseMessage,
+            Collection<String> targetLocations) throws AsterixException {
+        JobSpecification messageJobSpec = JobSpecificationUtils.createJobSpecification();
+        try {
+            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = FeedOperations.buildSendFeedMessageRuntime(
+                    messageJobSpec, commitResponseMessage.getConnectionId(), commitResponseMessage, targetLocations);
+            buildSendFeedMessageJobSpec(p.first, p.second, messageJobSpec);
+        } catch (AlgebricksException ae) {
+            throw new AsterixException(ae);
+        }
+        return messageJobSpec;
+    }
+
+    public static Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildDiscontinueFeedMessengerRuntime(
+            JobSpecification jobSpec, FeedId feedId, List<String> locations) throws AlgebricksException {
+        FeedConnectionId feedConnectionId = new FeedConnectionId(feedId, null);
+        IFeedMessage feedMessage = new EndFeedMessage(feedConnectionId, FeedRuntimeType.INTAKE,
+                feedConnectionId.getFeedId(), true, EndFeedMessage.EndMessageType.DISCONTINUE_SOURCE);
+        return buildSendFeedMessageRuntime(jobSpec, feedConnectionId, feedMessage, locations);
+    }
+
+    private static Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildSendFeedMessageRuntime(
+            JobSpecification jobSpec, FeedConnectionId feedConenctionId, IFeedMessage feedMessage,
+            Collection<String> locations) throws AlgebricksException {
+        AlgebricksPartitionConstraint partitionConstraint = new AlgebricksAbsolutePartitionConstraint(
+                locations.toArray(new String[] {}));
+        FeedMessageOperatorDescriptor feedMessenger = new FeedMessageOperatorDescriptor(jobSpec, feedConenctionId,
+                feedMessage);
+        return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(feedMessenger, partitionConstraint);
+    }
+
+    private static JobSpecification buildSendFeedMessageJobSpec(IOperatorDescriptor operatorDescriptor,
+            AlgebricksPartitionConstraint messengerPc, JobSpecification messageJobSpec) {
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(messageJobSpec, operatorDescriptor,
+                messengerPc);
+        NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(messageJobSpec);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(messageJobSpec, nullSink, messengerPc);
+        messageJobSpec.connect(new OneToOneConnectorDescriptor(messageJobSpec), operatorDescriptor, 0, nullSink, 0);
+        messageJobSpec.addRoot(nullSink);
+        return messageJobSpec;
+    }
+
+    private static Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildDisconnectFeedMessengerRuntime(
+            JobSpecification jobSpec, FeedConnectionId feedConenctionId, List<String> locations,
+            FeedRuntimeType sourceFeedRuntimeType, boolean completeDisconnection, FeedId sourceFeedId)
+            throws AlgebricksException {
+        IFeedMessage feedMessage = new EndFeedMessage(feedConenctionId, sourceFeedRuntimeType, sourceFeedId,
+                completeDisconnection, EndFeedMessage.EndMessageType.DISCONNECT_FEED);
+        return buildSendFeedMessageRuntime(jobSpec, feedConenctionId, feedMessage, locations);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/file/IndexOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/IndexOperations.java b/asterix-app/src/main/java/org/apache/asterix/file/IndexOperations.java
new file mode 100644
index 0000000..0642151
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/file/IndexOperations.java
@@ -0,0 +1,131 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.file;
+
+import java.util.List;
+import java.util.Map;
+
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
+import edu.uci.ics.asterix.common.config.OptimizationConfUtil;
+import edu.uci.ics.asterix.common.context.AsterixVirtualBufferCacheProvider;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
+import edu.uci.ics.asterix.metadata.MetadataException;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.ExternalFile;
+import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.asterix.transaction.management.opcallbacks.SecondaryIndexOperationTrackerProvider;
+import edu.uci.ics.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
+import edu.uci.ics.asterix.translator.CompiledStatements.CompiledCreateIndexStatement;
+import edu.uci.ics.asterix.translator.CompiledStatements.CompiledIndexCompactStatement;
+import edu.uci.ics.asterix.translator.CompiledStatements.CompiledIndexDropStatement;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
+
+public class IndexOperations {
+
+    private static final PhysicalOptimizationConfig physicalOptimizationConfig = OptimizationConfUtil
+            .getPhysicalOptimizationConfig();
+
+    public static JobSpecification buildSecondaryIndexCreationJobSpec(CompiledCreateIndexStatement createIndexStmt,
+            ARecordType recType, ARecordType enforcedType, AqlMetadataProvider metadataProvider)
+            throws AsterixException, AlgebricksException {
+        SecondaryIndexOperationsHelper secondaryIndexHelper = SecondaryIndexOperationsHelper
+                .createIndexOperationsHelper(createIndexStmt.getIndexType(), createIndexStmt.getDataverseName(),
+                        createIndexStmt.getDatasetName(), createIndexStmt.getIndexName(),
+                        createIndexStmt.getKeyFields(), createIndexStmt.getKeyFieldTypes(),
+                        createIndexStmt.isEnforced(), createIndexStmt.getGramLength(), metadataProvider,
+                        physicalOptimizationConfig, recType, enforcedType);
+        return secondaryIndexHelper.buildCreationJobSpec();
+    }
+
+    public static JobSpecification buildSecondaryIndexLoadingJobSpec(CompiledCreateIndexStatement createIndexStmt,
+            ARecordType recType, ARecordType enforcedType, AqlMetadataProvider metadataProvider)
+            throws AsterixException, AlgebricksException {
+        SecondaryIndexOperationsHelper secondaryIndexHelper = SecondaryIndexOperationsHelper
+                .createIndexOperationsHelper(createIndexStmt.getIndexType(), createIndexStmt.getDataverseName(),
+                        createIndexStmt.getDatasetName(), createIndexStmt.getIndexName(),
+                        createIndexStmt.getKeyFields(), createIndexStmt.getKeyFieldTypes(),
+                        createIndexStmt.isEnforced(), createIndexStmt.getGramLength(), metadataProvider,
+                        physicalOptimizationConfig, recType, enforcedType);
+        return secondaryIndexHelper.buildLoadingJobSpec();
+    }
+
+    public static JobSpecification buildSecondaryIndexLoadingJobSpec(CompiledCreateIndexStatement createIndexStmt,
+            ARecordType recType, ARecordType enforcedType, AqlMetadataProvider metadataProvider,
+            List<ExternalFile> files) throws AsterixException, AlgebricksException {
+        SecondaryIndexOperationsHelper secondaryIndexHelper = SecondaryIndexOperationsHelper
+                .createIndexOperationsHelper(createIndexStmt.getIndexType(), createIndexStmt.getDataverseName(),
+                        createIndexStmt.getDatasetName(), createIndexStmt.getIndexName(),
+                        createIndexStmt.getKeyFields(), createIndexStmt.getKeyFieldTypes(),
+                        createIndexStmt.isEnforced(), createIndexStmt.getGramLength(), metadataProvider,
+                        physicalOptimizationConfig, recType, enforcedType);
+        secondaryIndexHelper.setExternalFiles(files);
+        return secondaryIndexHelper.buildLoadingJobSpec();
+    }
+
+    public static JobSpecification buildDropSecondaryIndexJobSpec(CompiledIndexDropStatement indexDropStmt,
+            AqlMetadataProvider metadataProvider, Dataset dataset) throws AlgebricksException, MetadataException {
+        String dataverseName = indexDropStmt.getDataverseName() == null ? metadataProvider.getDefaultDataverseName()
+                : indexDropStmt.getDataverseName();
+        String datasetName = indexDropStmt.getDatasetName();
+        String indexName = indexDropStmt.getIndexName();
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        boolean temp = dataset.getDatasetDetails().isTemp();
+
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider
+                .splitProviderAndPartitionConstraintsForDataset(dataverseName, datasetName, indexName, temp);
+        AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
+        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
+                metadataProvider.getMetadataTxnContext());
+
+        // The index drop operation should be persistent regardless of temp datasets or permanent dataset.
+        IndexDropOperatorDescriptor btreeDrop = new IndexDropOperatorDescriptor(spec,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                splitsAndConstraint.first, new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
+                        dataset.getDatasetId()), compactionInfo.first, compactionInfo.second,
+                        new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                        storageProperties.getBloomFilterFalsePositiveRate(), false, null, null, null, null, true));
+        AlgebricksPartitionConstraintHelper
+                .setPartitionConstraintInJobSpec(spec, btreeDrop, splitsAndConstraint.second);
+        spec.addRoot(btreeDrop);
+
+        return spec;
+    }
+
+    public static JobSpecification buildSecondaryIndexCompactJobSpec(CompiledIndexCompactStatement indexCompactStmt,
+            ARecordType recType, ARecordType enforcedType, AqlMetadataProvider metadataProvider, Dataset dataset)
+            throws AsterixException, AlgebricksException {
+        SecondaryIndexOperationsHelper secondaryIndexHelper = SecondaryIndexOperationsHelper
+                .createIndexOperationsHelper(indexCompactStmt.getIndexType(), indexCompactStmt.getDataverseName(),
+                        indexCompactStmt.getDatasetName(), indexCompactStmt.getIndexName(),
+                        indexCompactStmt.getKeyFields(), indexCompactStmt.getKeyTypes(), indexCompactStmt.isEnforced(),
+                        indexCompactStmt.getGramLength(), metadataProvider, physicalOptimizationConfig, recType,
+                        enforcedType);
+        return secondaryIndexHelper.buildCompactJobSpec();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/file/JobSpecificationUtils.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/JobSpecificationUtils.java b/asterix-app/src/main/java/org/apache/asterix/file/JobSpecificationUtils.java
new file mode 100644
index 0000000..5e76fdf
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/file/JobSpecificationUtils.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.file;
+
+import edu.uci.ics.asterix.common.config.AsterixCompilerProperties;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+public class JobSpecificationUtils {
+    public static JobSpecification createJobSpecification() {
+        AsterixCompilerProperties compilerProperties = AsterixAppContextInfo.getInstance().getCompilerProperties();
+        int frameSize = compilerProperties.getFrameSize();
+        JobSpecification spec = new JobSpecification(frameSize);
+        return spec;
+    }
+}
\ No newline at end of file


[33/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java
new file mode 100644
index 0000000..be1653d
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java
@@ -0,0 +1,264 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.io.DataInputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.dataflow.data.common.AqlExpressionTypeComputer;
+import edu.uci.ics.asterix.dataflow.data.common.AqlNullableTypeComputer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.AqlNullWriterFactory;
+import edu.uci.ics.asterix.formats.nontagged.AqlBinaryBooleanInspectorImpl;
+import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
+import edu.uci.ics.asterix.formats.nontagged.AqlBinaryHashFunctionFactoryProvider;
+import edu.uci.ics.asterix.formats.nontagged.AqlBinaryHashFunctionFamilyProvider;
+import edu.uci.ics.asterix.formats.nontagged.AqlBinaryIntegerInspector;
+import edu.uci.ics.asterix.formats.nontagged.AqlPrinterFactoryProvider;
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.formats.nontagged.AqlTypeTraitProvider;
+import edu.uci.ics.asterix.jobgen.AqlLogicalExpressionJobGen;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.base.IAObject;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AbstractCollectionType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.LogicalExpressionJobGenToExpressionRuntimeProviderAdapter;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.StatefulFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
+import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
+import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionVisitor;
+import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+import edu.uci.ics.hyracks.algebricks.runtime.base.IScalarEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.api.IPointable;
+import edu.uci.ics.hyracks.data.std.primitive.VoidPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
+
+public class ConstantFoldingRule implements IAlgebraicRewriteRule {
+
+    private final ConstantFoldingVisitor cfv = new ConstantFoldingVisitor();
+
+    /** Throws exceptions in substituiteProducedVariable, setVarType, and one getVarType method. */
+    private static final IVariableTypeEnvironment _emptyTypeEnv = new IVariableTypeEnvironment() {
+
+        @Override
+        public boolean substituteProducedVariable(LogicalVariable v1, LogicalVariable v2) throws AlgebricksException {
+            throw new IllegalStateException();
+        }
+
+        @Override
+        public void setVarType(LogicalVariable var, Object type) {
+            throw new IllegalStateException();
+        }
+
+        @Override
+        public Object getVarType(LogicalVariable var, List<LogicalVariable> nonNullVariables,
+                List<List<LogicalVariable>> correlatedNullableVariableLists) throws AlgebricksException {
+            throw new IllegalStateException();
+        }
+
+        @Override
+        public Object getVarType(LogicalVariable var) throws AlgebricksException {
+            throw new IllegalStateException();
+        }
+
+        @Override
+        public Object getType(ILogicalExpression expr) throws AlgebricksException {
+            return AqlExpressionTypeComputer.INSTANCE.getType(expr, null, this);
+        }
+    };
+
+    private static final JobGenContext _jobGenCtx = new JobGenContext(null, null, null,
+            AqlSerializerDeserializerProvider.INSTANCE, AqlBinaryHashFunctionFactoryProvider.INSTANCE,
+            AqlBinaryHashFunctionFamilyProvider.INSTANCE, AqlBinaryComparatorFactoryProvider.INSTANCE,
+            AqlTypeTraitProvider.INSTANCE, AqlBinaryBooleanInspectorImpl.FACTORY, AqlBinaryIntegerInspector.FACTORY,
+            AqlPrinterFactoryProvider.INSTANCE, AqlNullWriterFactory.INSTANCE, null,
+            new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter(AqlLogicalExpressionJobGen.INSTANCE),
+            AqlExpressionTypeComputer.INSTANCE, AqlNullableTypeComputer.INSTANCE, null, null, null, null,
+            GlobalConfig.DEFAULT_FRAME_SIZE, null);
+
+    private static final IOperatorSchema[] _emptySchemas = new IOperatorSchema[] {};
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        ILogicalOperator op = opRef.getValue();
+        if (context.checkIfInDontApplySet(this, op)) {
+            return false;
+        }
+        return op.acceptExpressionTransform(cfv);
+    }
+
+    private class ConstantFoldingVisitor implements ILogicalExpressionVisitor<Pair<Boolean, ILogicalExpression>, Void>,
+            ILogicalExpressionReferenceTransform {
+
+        private final IPointable p = VoidPointable.FACTORY.createPointable();
+        private final ByteBufferInputStream bbis = new ByteBufferInputStream();
+        private final DataInputStream dis = new DataInputStream(bbis);
+
+        @Override
+        public boolean transform(Mutable<ILogicalExpression> exprRef) throws AlgebricksException {
+            AbstractLogicalExpression expr = (AbstractLogicalExpression) exprRef.getValue();
+            Pair<Boolean, ILogicalExpression> p = expr.accept(this, null);
+            if (p.first) {
+                exprRef.setValue(p.second);
+            }
+            return p.first;
+        }
+
+        @Override
+        public Pair<Boolean, ILogicalExpression> visitConstantExpression(ConstantExpression expr, Void arg)
+                throws AlgebricksException {
+            return new Pair<Boolean, ILogicalExpression>(false, expr);
+        }
+
+        @Override
+        public Pair<Boolean, ILogicalExpression> visitVariableReferenceExpression(VariableReferenceExpression expr,
+                Void arg) throws AlgebricksException {
+            return new Pair<Boolean, ILogicalExpression>(false, expr);
+        }
+
+        @Override
+        public Pair<Boolean, ILogicalExpression> visitScalarFunctionCallExpression(ScalarFunctionCallExpression expr,
+                Void arg) throws AlgebricksException {
+            boolean changed = changeRec(expr, arg);
+            if (!checkArgs(expr) || !expr.isFunctional()) {
+                return new Pair<Boolean, ILogicalExpression>(changed, expr);
+            }
+            //Current ARecord SerDe assumes a closed record, so we do not constant fold open record constructors
+            if (expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR)
+                    || expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.CAST_RECORD)) {
+                return new Pair<Boolean, ILogicalExpression>(false, null);
+            }
+            //Current List SerDe assumes a strongly typed list, so we do not constant fold the list constructors if they are not strongly typed
+            if (expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR)
+                    || expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR)) {
+                AbstractCollectionType listType = (AbstractCollectionType) TypeComputerUtilities.getRequiredType(expr);
+                if (listType != null
+                        && (listType.getItemType().getTypeTag() == ATypeTag.ANY || listType.getItemType() instanceof AbstractCollectionType)) {
+                    //case1: listType == null,  could be a nested list inside a list<ANY>
+                    //case2: itemType = ANY
+                    //case3: itemType = a nested list
+                    return new Pair<Boolean, ILogicalExpression>(false, null);
+                }
+            }
+            if (expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME)) {
+                ARecordType rt = (ARecordType) _emptyTypeEnv.getType(expr.getArguments().get(0).getValue());
+                String str = ((AString) ((AsterixConstantValue) ((ConstantExpression) expr.getArguments().get(1)
+                        .getValue()).getValue()).getObject()).getStringValue();
+                int k;
+                try {
+                    k = rt.findFieldPosition(str);
+                } catch (IOException e) {
+                    throw new AlgebricksException(e);
+                }
+                if (k >= 0) {
+                    // wait for the ByNameToByIndex rule to apply
+                    return new Pair<Boolean, ILogicalExpression>(changed, expr);
+                }
+            }
+            IScalarEvaluatorFactory fact = _jobGenCtx.getExpressionRuntimeProvider().createEvaluatorFactory(expr,
+                    _emptyTypeEnv, _emptySchemas, _jobGenCtx);
+            IScalarEvaluator eval = fact.createScalarEvaluator(null);
+            eval.evaluate(null, p);
+            Object t = _emptyTypeEnv.getType(expr);
+
+            @SuppressWarnings("rawtypes")
+            ISerializerDeserializer serde = _jobGenCtx.getSerializerDeserializerProvider().getSerializerDeserializer(t);
+            bbis.setByteBuffer(ByteBuffer.wrap(p.getByteArray(), p.getStartOffset(), p.getLength()), 0);
+            IAObject o;
+            try {
+                o = (IAObject) serde.deserialize(dis);
+            } catch (HyracksDataException e) {
+                throw new AlgebricksException(e);
+            }
+            return new Pair<Boolean, ILogicalExpression>(true, new ConstantExpression(new AsterixConstantValue(o)));
+        }
+
+        @Override
+        public Pair<Boolean, ILogicalExpression> visitAggregateFunctionCallExpression(
+                AggregateFunctionCallExpression expr, Void arg) throws AlgebricksException {
+            boolean changed = changeRec(expr, arg);
+            return new Pair<Boolean, ILogicalExpression>(changed, expr);
+        }
+
+        @Override
+        public Pair<Boolean, ILogicalExpression> visitStatefulFunctionCallExpression(
+                StatefulFunctionCallExpression expr, Void arg) throws AlgebricksException {
+            boolean changed = changeRec(expr, arg);
+            return new Pair<Boolean, ILogicalExpression>(changed, expr);
+        }
+
+        @Override
+        public Pair<Boolean, ILogicalExpression> visitUnnestingFunctionCallExpression(
+                UnnestingFunctionCallExpression expr, Void arg) throws AlgebricksException {
+            boolean changed = changeRec(expr, arg);
+            return new Pair<Boolean, ILogicalExpression>(changed, expr);
+        }
+
+        private boolean changeRec(AbstractFunctionCallExpression expr, Void arg) throws AlgebricksException {
+            boolean changed = false;
+            for (Mutable<ILogicalExpression> r : expr.getArguments()) {
+                Pair<Boolean, ILogicalExpression> p2 = r.getValue().accept(this, arg);
+                if (p2.first) {
+                    r.setValue(p2.second);
+                    changed = true;
+                }
+            }
+            return changed;
+        }
+
+        private boolean checkArgs(AbstractFunctionCallExpression expr) throws AlgebricksException {
+            for (Mutable<ILogicalExpression> r : expr.getArguments()) {
+                if (r.getValue().getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+                    return false;
+                }
+            }
+            return true;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/CountVarToCountOneRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/CountVarToCountOneRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/CountVarToCountOneRule.java
new file mode 100644
index 0000000..218cd4b
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/CountVarToCountOneRule.java
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.om.base.AInt64;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class CountVarToCountOneRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    // It is only for a group-by having just one aggregate which is a count.
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
+        if (op1.getOperatorTag() != LogicalOperatorTag.GROUP) {
+            return false;
+        }
+        GroupByOperator g = (GroupByOperator) op1;
+        if (g.getNestedPlans().size() != 1) {
+            return false;
+        }
+        ILogicalPlan p = g.getNestedPlans().get(0);
+        if (p.getRoots().size() != 1) {
+            return false;
+        }
+        AbstractLogicalOperator op2 = (AbstractLogicalOperator) p.getRoots().get(0).getValue();
+        if (op2.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
+            return false;
+        }
+        AggregateOperator agg = (AggregateOperator) op2;
+        if (agg.getExpressions().size() != 1) {
+            return false;
+        }
+        ILogicalExpression exp2 = agg.getExpressions().get(0).getValue();
+        if (exp2.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return false;
+        }
+        AbstractFunctionCallExpression fun = (AbstractFunctionCallExpression) exp2;
+        if (fun.getFunctionIdentifier() != AsterixBuiltinFunctions.COUNT) {
+            return false;
+        }
+        ILogicalExpression exp3 = fun.getArguments().get(0).getValue();
+        if (exp3.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
+            return false;
+        }
+        if (((AbstractLogicalOperator) agg.getInputs().get(0).getValue()).getOperatorTag() != LogicalOperatorTag.NESTEDTUPLESOURCE) {
+            return false;
+        }
+        fun.getArguments().get(0).setValue(new ConstantExpression(new AsterixConstantValue(new AInt64(1L))));
+        return true;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/DisjunctivePredicateToJoinRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/DisjunctivePredicateToJoinRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/DisjunctivePredicateToJoinRule.java
new file mode 100644
index 0000000..0fb205d
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/DisjunctivePredicateToJoinRule.java
@@ -0,0 +1,191 @@
+/*
+ * Copyright 2014 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.HashSet;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.om.base.AOrderedList;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.types.AOrderedListType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IndexedNLJoinExpressionAnnotation;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class DisjunctivePredicateToJoinRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+
+        SelectOperator select;
+        if ((select = asSelectOperator(opRef)) == null) {
+            return false;
+        }
+
+        AbstractFunctionCallExpression condEx;
+        if ((condEx = asFunctionCallExpression(select.getCondition(), AlgebricksBuiltinFunctions.OR)) == null) {
+            return false;
+        }
+
+        List<Mutable<ILogicalExpression>> args = condEx.getArguments();
+
+        VariableReferenceExpression varEx = null;
+        IAType valType = null;
+        HashSet<AsterixConstantValue> values = new HashSet<AsterixConstantValue>();
+
+        for (Mutable<ILogicalExpression> arg : args) {
+            AbstractFunctionCallExpression fctCall;
+            if ((fctCall = asFunctionCallExpression(arg, AlgebricksBuiltinFunctions.EQ)) == null) {
+                return false;
+            }
+
+            boolean haveConst = false;
+            boolean haveVar = false;
+            List<Mutable<ILogicalExpression>> fctArgs = fctCall.getArguments();
+            for (Mutable<ILogicalExpression> fctArg : fctArgs) {
+                final ILogicalExpression argExpr = fctArg.getValue();
+                switch (argExpr.getExpressionTag()) {
+                    case CONSTANT:
+                        haveConst = true;
+                        AsterixConstantValue value = (AsterixConstantValue) ((ConstantExpression) argExpr).getValue();
+                        if (valType == null) {
+                            valType = value.getObject().getType();
+                        } else if (!isCompatible(valType, value.getObject().getType())) {
+                            return false;
+                        }
+                        values.add(value);
+                        break;
+                    case VARIABLE:
+                        haveVar = true;
+                        final VariableReferenceExpression varArg = (VariableReferenceExpression) argExpr;
+                        if (varEx == null) {
+                            varEx = varArg;
+                        } else if (!varEx.getVariableReference().equals(varArg.getVariableReference())) {
+                            return false;
+                        }
+                        break;
+                    default:
+                        return false;
+                }
+            }
+            if (!(haveVar && haveConst)) {
+                return false;
+            }
+        }
+
+        AOrderedList list = new AOrderedList(new AOrderedListType(valType, "orderedlist"));
+        for (AsterixConstantValue value : values) {
+            list.add(value.getObject());
+        }
+
+        EmptyTupleSourceOperator ets = new EmptyTupleSourceOperator();
+        context.computeAndSetTypeEnvironmentForOperator(ets);
+
+        ILogicalExpression cExp = new ConstantExpression(new AsterixConstantValue(list));
+        Mutable<ILogicalExpression> mutCExp = new MutableObject<ILogicalExpression>(cExp);
+        IFunctionInfo scanFctInfo = AsterixBuiltinFunctions
+                .getAsterixFunctionInfo(AsterixBuiltinFunctions.SCAN_COLLECTION);
+        UnnestingFunctionCallExpression scanExp = new UnnestingFunctionCallExpression(scanFctInfo, mutCExp);
+        LogicalVariable scanVar = context.newVar();
+        UnnestOperator unn = new UnnestOperator(scanVar, new MutableObject<ILogicalExpression>(scanExp));
+        unn.getInputs().add(new MutableObject<ILogicalOperator>(ets));
+        context.computeAndSetTypeEnvironmentForOperator(unn);
+
+        IFunctionInfo eqFctInfo = AsterixBuiltinFunctions.getAsterixFunctionInfo(AlgebricksBuiltinFunctions.EQ);
+        AbstractFunctionCallExpression eqExp = new ScalarFunctionCallExpression(eqFctInfo);
+        eqExp.getArguments().add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(scanVar)));
+        eqExp.getArguments().add(new MutableObject<ILogicalExpression>(varEx.cloneExpression()));
+        eqExp.getAnnotations().put(IndexedNLJoinExpressionAnnotation.INSTANCE,
+                IndexedNLJoinExpressionAnnotation.INSTANCE);
+
+        InnerJoinOperator jOp = new InnerJoinOperator(new MutableObject<ILogicalExpression>(eqExp));
+        jOp.getInputs().add(new MutableObject<ILogicalOperator>(unn));
+        jOp.getInputs().add(select.getInputs().get(0));
+
+        opRef.setValue(jOp);
+        context.computeAndSetTypeEnvironmentForOperator(jOp);
+
+        return true;
+    }
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+        return false;
+    }
+
+    /**
+     * This checks the compatibility the types of the constants to ensure that the comparison behaves as expected
+     * when joining. Right now this compatibility is defined as type equality, but it could we relaxed.
+     * Once type promotion works correctly in all parts of the system, this check should not be needed anymore.
+     * (see https://code.google.com/p/asterixdb/issues/detail?id=716)
+     * 
+     * @param t1
+     *            one type
+     * @param t2
+     *            another type
+     * @return true, if types are equal
+     */
+    private static boolean isCompatible(IAType t1, IAType t2) {
+        return t1.equals(t2);
+    }
+
+    // some helpers
+
+    private static SelectOperator asSelectOperator(ILogicalOperator op) {
+        return op.getOperatorTag() == LogicalOperatorTag.SELECT ? (SelectOperator) op : null;
+    }
+
+    private static SelectOperator asSelectOperator(Mutable<ILogicalOperator> op) {
+        return asSelectOperator(op.getValue());
+    }
+
+    private static AbstractFunctionCallExpression asFunctionCallExpression(ILogicalExpression ex, FunctionIdentifier fi) {
+        AbstractFunctionCallExpression fctCall = (ex.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL ? (AbstractFunctionCallExpression) ex
+                : null);
+        if (fctCall != null && (fi == null || fctCall.getFunctionIdentifier().equals(fi)))
+            return fctCall;
+        return null;
+    }
+
+    private static AbstractFunctionCallExpression asFunctionCallExpression(Mutable<ILogicalExpression> ex,
+            FunctionIdentifier fi) {
+        return asFunctionCallExpression(ex.getValue(), fi);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ExtractDistinctByExpressionsRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ExtractDistinctByExpressionsRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ExtractDistinctByExpressionsRule.java
new file mode 100644
index 0000000..7c36cb7
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ExtractDistinctByExpressionsRule.java
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.AbstractExtractExprRule;
+
+/**
+ * Needed only bc. current Hyracks operators require keys to be fields.
+ */
+public class ExtractDistinctByExpressionsRule extends AbstractExtractExprRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
+        if (op1.getOperatorTag() != LogicalOperatorTag.DISTINCT) {
+            return false;
+        }
+
+        if (context.checkIfInDontApplySet(this, op1)) {
+            return false;
+        }
+        context.addToDontApplySet(this, op1);
+        DistinctOperator d = (DistinctOperator) op1;
+        boolean changed = false;
+        Mutable<ILogicalOperator> opRef2 = d.getInputs().get(0);
+        List<Mutable<ILogicalExpression>> newExprList = new ArrayList<Mutable<ILogicalExpression>>();
+        for (Mutable<ILogicalExpression> expr : d.getExpressions()) {
+            LogicalExpressionTag tag = expr.getValue().getExpressionTag();
+            if (tag == LogicalExpressionTag.VARIABLE || tag == LogicalExpressionTag.CONSTANT) {
+                newExprList.add(expr);
+                continue;
+            }
+            LogicalVariable v = extractExprIntoAssignOpRef(expr.getValue(), opRef2, context);
+            ILogicalExpression newExpr = new VariableReferenceExpression(v);
+            newExprList.add(new MutableObject<ILogicalExpression>(newExpr));
+            changed = true;
+        }
+        if (changed) {
+            d.getExpressions().clear();
+            d.getExpressions().addAll(newExprList);
+            context.computeAndSetTypeEnvironmentForOperator(d);
+        }
+        return changed;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ExtractOrderExpressionsRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ExtractOrderExpressionsRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ExtractOrderExpressionsRule.java
new file mode 100644
index 0000000..b53c0cc
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ExtractOrderExpressionsRule.java
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.optimizer.base.AnalysisUtil;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.AbstractExtractExprRule;
+
+public class ExtractOrderExpressionsRule extends AbstractExtractExprRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+
+        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
+        if (op1.getOperatorTag() != LogicalOperatorTag.ORDER) {
+            return false;
+        }
+
+        if (context.checkIfInDontApplySet(this, op1)) {
+            return false;
+        }
+        context.addToDontApplySet(this, op1);
+        OrderOperator oo = (OrderOperator) op1;
+
+        if (!orderHasComplexExpr(oo)) {
+            return false;
+        }
+        Mutable<ILogicalOperator> opRef2 = oo.getInputs().get(0);
+        for (Pair<IOrder, Mutable<ILogicalExpression>> orderPair : oo.getOrderExpressions()) {
+            ILogicalExpression expr = orderPair.second.getValue();
+            if (expr.getExpressionTag() != LogicalExpressionTag.VARIABLE && !AnalysisUtil.isAccessToFieldRecord(expr)) {
+                LogicalVariable v = extractExprIntoAssignOpRef(expr, opRef2, context);
+                orderPair.second.setValue(new VariableReferenceExpression(v));
+            }
+        }
+        context.computeAndSetTypeEnvironmentForOperator(oo);
+        return true;
+    }
+
+    private boolean orderHasComplexExpr(OrderOperator oo) {
+        for (Pair<IOrder, Mutable<ILogicalExpression>> orderPair : oo.getOrderExpressions()) {
+            if (orderPair.second.getValue().getExpressionTag() != LogicalExpressionTag.VARIABLE) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FeedScanCollectionToUnnest.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FeedScanCollectionToUnnest.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FeedScanCollectionToUnnest.java
new file mode 100644
index 0000000..fa7c7f4
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FeedScanCollectionToUnnest.java
@@ -0,0 +1,135 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression.FunctionKind;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractAssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class FeedScanCollectionToUnnest implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        if (context.checkIfInDontApplySet(this, op)) {
+            return false;
+        }
+        if (op.getOperatorTag() != LogicalOperatorTag.UNNEST) {
+            context.addToDontApplySet(this, op);
+            return false;
+        }
+        UnnestOperator unnest = (UnnestOperator) op;
+        ILogicalExpression unnestExpr = unnest.getExpressionRef().getValue();
+        if (needsScanCollection(unnestExpr, op)) {
+            ILogicalExpression newExpr = new UnnestingFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.SCAN_COLLECTION),
+                    new MutableObject<ILogicalExpression>(unnestExpr));
+            unnest.getExpressionRef().setValue(newExpr);
+            context.addToDontApplySet(this, op);
+            return true;
+        }
+        context.addToDontApplySet(this, op);
+        return false;
+    }
+
+    private ILogicalExpression findVarOriginExpression(LogicalVariable v, ILogicalOperator op)
+            throws AlgebricksException {
+        boolean searchInputs = false;
+        if (!(op instanceof AbstractAssignOperator)) {
+            searchInputs = true;
+        } else {
+            AbstractAssignOperator aao = (AbstractAssignOperator) op;
+            List<LogicalVariable> producedVars = new ArrayList<>();
+            VariableUtilities.getProducedVariables(op, producedVars);
+            int exprIndex = producedVars.indexOf(v);
+            if (exprIndex == -1) {
+                searchInputs = true;
+            } else {
+                ILogicalExpression originalCandidate = aao.getExpressions().get(exprIndex).getValue();
+                if (originalCandidate.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+                    searchInputs = true;
+                } else {
+                    return originalCandidate;
+                }
+            }
+        }
+
+        if (searchInputs) {
+            for (Mutable<ILogicalOperator> childOp : op.getInputs()) {
+                ILogicalExpression ret = findVarOriginExpression(v, childOp.getValue());
+                if (ret != null) {
+                    return ret;
+                }
+            }
+        }
+
+        throw new IllegalStateException("Unable to find the original expression that produced variable " + v);
+    }
+
+    private boolean needsScanCollection(ILogicalExpression unnestExpr, ILogicalOperator op) throws AlgebricksException {
+        switch (unnestExpr.getExpressionTag()) {
+            case VARIABLE: {
+                LogicalVariable v = ((VariableReferenceExpression) unnestExpr).getVariableReference();
+                ILogicalExpression originalExpr = findVarOriginExpression(v, op);
+                if (originalExpr.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
+                    return false;
+                } else {
+                    return !isUnnestingFunction(originalExpr);
+                }
+            }
+            case FUNCTION_CALL: {
+                return !isUnnestingFunction(unnestExpr);
+            }
+            default: {
+                return false;
+            }
+        }
+    }
+
+    private boolean isUnnestingFunction(ILogicalExpression expr) {
+        if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+            AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expr;
+            return fce.getKind() == FunctionKind.UNNEST;
+        }
+        return false;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FuzzyEqRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FuzzyEqRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FuzzyEqRule.java
new file mode 100644
index 0000000..3a06168
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FuzzyEqRule.java
@@ -0,0 +1,124 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.om.base.IAObject;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.optimizer.base.FuzzyUtils;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class FuzzyEqRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+
+        // current operator is INNERJOIN or LEFTOUTERJOIN or SELECT
+        Mutable<ILogicalExpression> expRef;
+        if (op.getOperatorTag() == LogicalOperatorTag.INNERJOIN
+                || op.getOperatorTag() == LogicalOperatorTag.LEFTOUTERJOIN) {
+            AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) op;
+            expRef = joinOp.getCondition();
+        } else if (op.getOperatorTag() == LogicalOperatorTag.SELECT) {
+            SelectOperator selectOp = (SelectOperator) op;
+            expRef = selectOp.getCondition();
+        } else {
+            return false;
+        }
+
+        AqlMetadataProvider metadataProvider = ((AqlMetadataProvider) context.getMetadataProvider());
+
+        IVariableTypeEnvironment env = context.getOutputTypeEnvironment(op);
+        if (expandFuzzyEq(expRef, context, env, metadataProvider)) {
+            context.computeAndSetTypeEnvironmentForOperator(op);
+            return true;
+        }
+        return false;
+    }
+
+    private boolean expandFuzzyEq(Mutable<ILogicalExpression> expRef, IOptimizationContext context,
+            IVariableTypeEnvironment env, AqlMetadataProvider metadataProvider) throws AlgebricksException {
+        ILogicalExpression exp = expRef.getValue();
+
+        if (exp.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return false;
+        }
+
+        boolean expanded = false;
+        AbstractFunctionCallExpression funcExp = (AbstractFunctionCallExpression) exp;
+        FunctionIdentifier fi = funcExp.getFunctionIdentifier();
+        if (fi.equals(AsterixBuiltinFunctions.FUZZY_EQ)) {
+            List<Mutable<ILogicalExpression>> inputExps = funcExp.getArguments();
+
+            String simFuncName = FuzzyUtils.getSimFunction(metadataProvider);
+            ArrayList<Mutable<ILogicalExpression>> similarityArgs = new ArrayList<Mutable<ILogicalExpression>>();
+            for (int i = 0; i < inputExps.size(); ++i) {
+                Mutable<ILogicalExpression> inputExpRef = inputExps.get(i);
+                similarityArgs.add(inputExpRef);
+            }
+
+            FunctionIdentifier simFunctionIdentifier = FuzzyUtils.getFunctionIdentifier(simFuncName);
+            ScalarFunctionCallExpression similarityExp = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(simFunctionIdentifier), similarityArgs);
+            // Add annotations from the original fuzzy-eq function.
+            similarityExp.getAnnotations().putAll(funcExp.getAnnotations());
+            ArrayList<Mutable<ILogicalExpression>> cmpArgs = new ArrayList<Mutable<ILogicalExpression>>();
+            cmpArgs.add(new MutableObject<ILogicalExpression>(similarityExp));
+            IAObject simThreshold = FuzzyUtils.getSimThreshold(metadataProvider, simFuncName);
+            cmpArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
+                    simThreshold))));
+            ScalarFunctionCallExpression cmpExpr = FuzzyUtils.getComparisonExpr(simFuncName, cmpArgs);
+            expRef.setValue(cmpExpr);
+            return true;
+        } else if (fi.equals(AlgebricksBuiltinFunctions.AND) || fi.equals(AlgebricksBuiltinFunctions.OR)) {
+            for (int i = 0; i < 2; i++) {
+                if (expandFuzzyEq(funcExp.getArguments().get(i), context, env, metadataProvider)) {
+                    expanded = true;
+                }
+            }
+        }
+        return expanded;
+    }
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FuzzyJoinRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FuzzyJoinRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FuzzyJoinRule.java
new file mode 100644
index 0000000..9bcfafa
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FuzzyJoinRule.java
@@ -0,0 +1,404 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.io.StringReader;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Locale;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.algebra.base.LogicalOperatorDeepCopyVisitor;
+import edu.uci.ics.asterix.aql.base.Clause;
+import edu.uci.ics.asterix.aql.expression.Identifier;
+import edu.uci.ics.asterix.aqlplus.parser.AQLPlusParser;
+import edu.uci.ics.asterix.aqlplus.parser.ParseException;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.types.TypeHelper;
+import edu.uci.ics.asterix.optimizer.base.FuzzyUtils;
+import edu.uci.ics.asterix.translator.AqlPlusExpressionToPlanTranslator;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.Counter;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IndexedNLJoinExpressionAnnotation;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class FuzzyJoinRule implements IAlgebraicRewriteRule {
+
+    private static HashSet<FunctionIdentifier> simFuncs = new HashSet<FunctionIdentifier>();
+    static {
+        simFuncs.add(AsterixBuiltinFunctions.SIMILARITY_JACCARD_CHECK);
+    }
+
+    private static final String AQLPLUS = ""
+            //
+            // -- - Stage 3 - --
+            //
+            + "((#RIGHT), "
+            + "  (join((#LEFT), "
+            //
+            // -- -- - Stage 2 - --
+            //
+            + "    ("
+            + "    join( "
+            + "      ( "
+            + "      #LEFT_1 "
+            + "      let $tokensUnrankedLeft := %s($$LEFT_1) "
+            + "      let $lenLeft := len($tokensUnrankedLeft) "
+            + "      let $tokensLeft := "
+            + "        for $token in $tokensUnrankedLeft "
+            + "        for $tokenRanked at $i in "
+            //
+            // -- -- -- - Stage 1 - --
+            //
+            // + "          #LEFT_2 "
+            // + "          let $id := $$LEFTPK_2 "
+            // + "          for $token in %s($$LEFT_2) "
+            + "          #RIGHT_2 "
+            + "          let $id := $$RIGHTPK_2 "
+            + "          for $token in %s($$RIGHT_2) "
+            + "          /*+ hash */ "
+            + "          group by $tokenGroupped := $token with $id "
+            + "          /*+ inmem 34 198608 */ "
+            + "          order by count($id), $tokenGroupped "
+            + "          return $tokenGroupped "
+            //
+            // -- -- -- -
+            //
+            + "        where $token = /*+ bcast */ $tokenRanked "
+            + "        order by $i "
+            + "        return $i "
+            + "      for $prefixTokenLeft in subset-collection($tokensLeft, 0, prefix-len-%s(len($tokensLeft), %ff)) "
+            + "      ),( "
+            + "      #RIGHT_1 "
+            + "      let $tokensUnrankedRight := %s($$RIGHT_1) "
+            + "      let $lenRight := len($tokensUnrankedRight) "
+            + "      let $tokensRight := "
+            + "        for $token in $tokensUnrankedRight "
+            + "        for $tokenRanked at $i in "
+            //
+            // -- -- -- - Stage 1 - --
+            //
+            // + "          #LEFT_3 "
+            // + "          let $id := $$LEFTPK_3 "
+            // + "          for $token in %s($$LEFT_3) "
+            + "          #RIGHT_3 "
+            + "          let $id := $$RIGHTPK_3 "
+            + "          for $token in %s($$RIGHT_3) "
+            + "          /*+ hash */ "
+            + "          group by $tokenGroupped := $token with $id "
+            + "          /*+ inmem 34 198608 */ "
+            + "          order by count($id), $tokenGroupped "
+            + "          return $tokenGroupped "
+            //
+            // -- -- -- -
+            //
+            + "        where $token = /*+ bcast */ $tokenRanked "
+            + "        order by $i "
+            + "        return $i "
+            + "      for $prefixTokenRight in subset-collection($tokensRight, 0, prefix-len-%s(len($tokensRight), %ff)) "
+            + "      ), $prefixTokenLeft = $prefixTokenRight) "
+            + "    let $sim := similarity-%s-prefix($lenLeft, $tokensLeft, $lenRight, $tokensRight, $prefixTokenLeft, %ff) "
+            + "    where $sim >= %ff " + "    /*+ hash*/ "
+            + "    group by $idLeft := $$LEFTPK_1, $idRight := $$RIGHTPK_1 with $sim "
+            //
+            // -- -- -
+            //
+            + "    ), $$LEFTPK = $idLeft)),  $$RIGHTPK = $idRight)";
+
+    private Collection<LogicalVariable> liveVars = new HashSet<LogicalVariable>();
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        // current opperator is join
+        if (op.getOperatorTag() != LogicalOperatorTag.INNERJOIN
+                && op.getOperatorTag() != LogicalOperatorTag.LEFTOUTERJOIN) {
+            return false;
+        }
+
+        // Find GET_ITEM function.
+        AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) op;
+        Mutable<ILogicalExpression> expRef = joinOp.getCondition();
+        Mutable<ILogicalExpression> getItemExprRef = getSimilarityExpression(expRef);
+        if (getItemExprRef == null) {
+            return false;
+        }
+        // Check if the GET_ITEM function is on one of the supported similarity-check functions.
+        AbstractFunctionCallExpression getItemFuncExpr = (AbstractFunctionCallExpression) getItemExprRef.getValue();
+        Mutable<ILogicalExpression> argRef = getItemFuncExpr.getArguments().get(0);
+        AbstractFunctionCallExpression simFuncExpr = (AbstractFunctionCallExpression) argRef.getValue();
+        if (!simFuncs.contains(simFuncExpr.getFunctionIdentifier())) {
+            return false;
+        }
+        // Skip this rule based on annotations.
+        if (simFuncExpr.getAnnotations().containsKey(IndexedNLJoinExpressionAnnotation.INSTANCE)) {
+            return false;
+        }
+
+        List<Mutable<ILogicalOperator>> inputOps = joinOp.getInputs();
+        ILogicalOperator leftInputOp = inputOps.get(0).getValue();
+        ILogicalOperator rightInputOp = inputOps.get(1).getValue();
+
+        List<Mutable<ILogicalExpression>> inputExps = simFuncExpr.getArguments();
+
+        ILogicalExpression inputExp0 = inputExps.get(0).getValue();
+        ILogicalExpression inputExp1 = inputExps.get(1).getValue();
+
+        // left and right expressions are variables
+        if (inputExp0.getExpressionTag() != LogicalExpressionTag.VARIABLE
+                || inputExp1.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
+            return false;
+        }
+
+        LogicalVariable inputVar0 = ((VariableReferenceExpression) inputExp0).getVariableReference();
+        LogicalVariable inputVar1 = ((VariableReferenceExpression) inputExp1).getVariableReference();
+
+        LogicalVariable leftInputVar;
+        LogicalVariable rightInputVar;
+
+        liveVars.clear();
+        VariableUtilities.getLiveVariables(leftInputOp, liveVars);
+        if (liveVars.contains(inputVar0)) {
+            leftInputVar = inputVar0;
+            rightInputVar = inputVar1;
+        } else {
+            leftInputVar = inputVar1;
+            rightInputVar = inputVar0;
+        }
+
+        List<LogicalVariable> leftInputPKs = context.findPrimaryKey(leftInputVar);
+        List<LogicalVariable> rightInputPKs = context.findPrimaryKey(rightInputVar);
+        // Bail if primary keys could not be inferred.
+        if (leftInputPKs == null || rightInputPKs == null) {
+            return false;
+        }
+        // primary key has only one variable
+        if (leftInputPKs.size() != 1 || rightInputPKs.size() != 1) {
+            return false;
+        }
+        IAType leftType = (IAType) context.getOutputTypeEnvironment(leftInputOp).getVarType(leftInputVar);
+        IAType rightType = (IAType) context.getOutputTypeEnvironment(rightInputOp).getVarType(rightInputVar);
+        // left-hand side and right-hand side of "~=" has the same type
+        IAType left2 = TypeHelper.getNonOptionalType(leftType);
+        IAType right2 = TypeHelper.getNonOptionalType(rightType);
+        if (!left2.deepEqual(right2)) {
+            return false;
+        }
+        //
+        // -- - FIRE - --
+        //
+        AqlMetadataProvider metadataProvider = ((AqlMetadataProvider) context.getMetadataProvider());
+        FunctionIdentifier funcId = FuzzyUtils.getTokenizer(leftType.getTypeTag());
+        String tokenizer;
+        if (funcId == null) {
+            tokenizer = "";
+        } else {
+            tokenizer = funcId.getName();
+        }
+
+        float simThreshold = FuzzyUtils.getSimThreshold(metadataProvider);
+        String simFunction = FuzzyUtils.getSimFunction(metadataProvider);
+
+        // finalize AQL+ query
+        String prepareJoin;
+        switch (joinOp.getJoinKind()) {
+            case INNER: {
+                prepareJoin = "join" + AQLPLUS;
+                break;
+            }
+            case LEFT_OUTER: {
+                // TODO To make it work for Left Outer Joins, we should permute
+                // the #LEFT and #RIGHT at the top of the AQL+ query. But, when
+                // doing this, the
+                // fuzzyjoin/user-vis-int-vis-user-lot-aqlplus_1.aql (the one
+                // doing 3-way fuzzy joins) gives a different result. But even
+                // if we don't change the FuzzyJoinRule, permuting the for
+                // clauses in fuzzyjoin/user-vis-int-vis-user-lot-aqlplus_1.aql
+                // leads to different results, which suggests there is some
+                // other sort of bug.
+                return false;
+                // prepareJoin = "loj" + AQLPLUS;
+                // break;
+            }
+            default: {
+                throw new IllegalStateException();
+            }
+        }
+        String aqlPlus = String.format(Locale.US, prepareJoin, tokenizer, tokenizer, simFunction, simThreshold,
+                tokenizer, tokenizer, simFunction, simThreshold, simFunction, simThreshold, simThreshold);
+
+        LogicalVariable leftPKVar = leftInputPKs.get(0);
+        LogicalVariable rightPKVar = rightInputPKs.get(0);
+
+        Counter counter = new Counter(context.getVarCounter());
+
+        AQLPlusParser parser = new AQLPlusParser(new StringReader(aqlPlus));
+        parser.initScope();
+        parser.setVarCounter(counter);
+        List<Clause> clauses;
+        try {
+            clauses = parser.Clauses();
+        } catch (ParseException e) {
+            throw new AlgebricksException(e);
+        }
+        // The translator will compile metadata internally. Run this compilation
+        // under the same transaction id as the "outer" compilation.
+        AqlPlusExpressionToPlanTranslator translator = new AqlPlusExpressionToPlanTranslator(
+                metadataProvider.getJobId(), metadataProvider, counter, null, null);
+
+        LogicalOperatorDeepCopyVisitor deepCopyVisitor = new LogicalOperatorDeepCopyVisitor(counter);
+
+        translator.addOperatorToMetaScope(new Identifier("#LEFT"), leftInputOp);
+        translator.addVariableToMetaScope(new Identifier("$$LEFT"), leftInputVar);
+        translator.addVariableToMetaScope(new Identifier("$$LEFTPK"), leftPKVar);
+
+        translator.addOperatorToMetaScope(new Identifier("#RIGHT"), rightInputOp);
+        translator.addVariableToMetaScope(new Identifier("$$RIGHT"), rightInputVar);
+        translator.addVariableToMetaScope(new Identifier("$$RIGHTPK"), rightPKVar);
+
+        translator.addOperatorToMetaScope(new Identifier("#LEFT_1"), deepCopyVisitor.deepCopy(leftInputOp, null));
+        translator.addVariableToMetaScope(new Identifier("$$LEFT_1"), deepCopyVisitor.varCopy(leftInputVar));
+        translator.addVariableToMetaScope(new Identifier("$$LEFTPK_1"), deepCopyVisitor.varCopy(leftPKVar));
+        deepCopyVisitor.updatePrimaryKeys(context);
+        deepCopyVisitor.reset();
+
+        // translator.addOperatorToMetaScope(new Identifier("#LEFT_2"),
+        // deepCopyVisitor.deepCopy(leftInputOp, null));
+        // translator.addVariableToMetaScope(new Identifier("$$LEFT_2"),
+        // deepCopyVisitor.varCopy(leftInputVar));
+        // translator.addVariableToMetaScope(new Identifier("$$LEFTPK_2"),
+        // deepCopyVisitor.varCopy(leftPKVar));
+        // deepCopyVisitor.updatePrimaryKeys(context);
+        // deepCopyVisitor.reset();
+        //
+        // translator.addOperatorToMetaScope(new Identifier("#LEFT_3"),
+        // deepCopyVisitor.deepCopy(leftInputOp, null));
+        // translator.addVariableToMetaScope(new Identifier("$$LEFT_3"),
+        // deepCopyVisitor.varCopy(leftInputVar));
+        // translator.addVariableToMetaScope(new Identifier("$$LEFTPK_3"),
+        // deepCopyVisitor.varCopy(leftPKVar));
+        // deepCopyVisitor.updatePrimaryKeys(context);
+        // deepCopyVisitor.reset();
+
+        translator.addOperatorToMetaScope(new Identifier("#RIGHT_1"), deepCopyVisitor.deepCopy(rightInputOp, null));
+        translator.addVariableToMetaScope(new Identifier("$$RIGHT_1"), deepCopyVisitor.varCopy(rightInputVar));
+        translator.addVariableToMetaScope(new Identifier("$$RIGHTPK_1"), deepCopyVisitor.varCopy(rightPKVar));
+        deepCopyVisitor.updatePrimaryKeys(context);
+        deepCopyVisitor.reset();
+
+        // TODO pick side to run Stage 1, currently always picks RIGHT side
+        translator.addOperatorToMetaScope(new Identifier("#RIGHT_2"), deepCopyVisitor.deepCopy(rightInputOp, null));
+        translator.addVariableToMetaScope(new Identifier("$$RIGHT_2"), deepCopyVisitor.varCopy(rightInputVar));
+        translator.addVariableToMetaScope(new Identifier("$$RIGHTPK_2"), deepCopyVisitor.varCopy(rightPKVar));
+        deepCopyVisitor.updatePrimaryKeys(context);
+        deepCopyVisitor.reset();
+
+        translator.addOperatorToMetaScope(new Identifier("#RIGHT_3"), deepCopyVisitor.deepCopy(rightInputOp, null));
+        translator.addVariableToMetaScope(new Identifier("$$RIGHT_3"), deepCopyVisitor.varCopy(rightInputVar));
+        translator.addVariableToMetaScope(new Identifier("$$RIGHTPK_3"), deepCopyVisitor.varCopy(rightPKVar));
+        deepCopyVisitor.updatePrimaryKeys(context);
+        deepCopyVisitor.reset();
+
+        ILogicalPlan plan;
+        try {
+            plan = translator.translate(clauses);
+        } catch (AsterixException e) {
+            throw new AlgebricksException(e);
+        }
+        context.setVarCounter(counter.get());
+
+        ILogicalOperator outputOp = plan.getRoots().get(0).getValue();
+
+        SelectOperator extraSelect = null;
+        if (getItemExprRef != expRef) {
+            // more than one join condition
+            getItemExprRef.setValue(ConstantExpression.TRUE);
+            switch (joinOp.getJoinKind()) {
+                case INNER: {
+                    extraSelect = new SelectOperator(expRef, false, null);
+                    extraSelect.getInputs().add(new MutableObject<ILogicalOperator>(outputOp));
+                    outputOp = extraSelect;
+                    break;
+                }
+                case LEFT_OUTER: {
+                    if (((AbstractLogicalOperator) outputOp).getOperatorTag() != LogicalOperatorTag.LEFTOUTERJOIN) {
+                        throw new IllegalStateException();
+                    }
+                    LeftOuterJoinOperator topJoin = (LeftOuterJoinOperator) outputOp;
+                    topJoin.getCondition().setValue(expRef.getValue());
+                    break;
+                }
+                default: {
+                    throw new IllegalStateException();
+                }
+            }
+        }
+        opRef.setValue(outputOp);
+        OperatorPropertiesUtil.typeOpRec(opRef, context);
+        return true;
+    }
+
+    /**
+     * Look for GET_ITEM function call.
+     */
+    private Mutable<ILogicalExpression> getSimilarityExpression(Mutable<ILogicalExpression> expRef) {
+        ILogicalExpression exp = expRef.getValue();
+        if (exp.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+            AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) exp;
+            if (funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.GET_ITEM)) {
+                return expRef;
+            }
+            if (funcExpr.getFunctionIdentifier().equals(AlgebricksBuiltinFunctions.AND)) {
+                for (int i = 0; i < 2; i++) {
+                    Mutable<ILogicalExpression> expRefRet = getSimilarityExpression(funcExpr.getArguments().get(i));
+                    if (expRefRet != null) {
+                        return expRefRet;
+                    }
+                }
+            }
+        }
+        return null;
+    }
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IfElseToSwitchCaseFunctionRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IfElseToSwitchCaseFunctionRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IfElseToSwitchCaseFunctionRule.java
new file mode 100644
index 0000000..cd18517
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IfElseToSwitchCaseFunctionRule.java
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+public class IfElseToSwitchCaseFunctionRule implements IAlgebraicRewriteRule {
+
+    @Override
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
+        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
+        if (op1.getOperatorTag() != LogicalOperatorTag.ASSIGN)
+            return false;
+
+        AssignOperator assignOp = (AssignOperator) op1;
+        List<Mutable<ILogicalExpression>> assignExprs = assignOp.getExpressions();
+        if (assignExprs.size() > 1)
+            return false;
+        ILogicalExpression expr = assignExprs.get(0).getValue();
+        if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+            AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+            if (!funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.CONCAT_NON_NULL))
+                return false;
+        }
+
+        AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
+        if (op2.getOperatorTag() != LogicalOperatorTag.SUBPLAN)
+            return false;
+
+        SubplanOperator subplan = (SubplanOperator) op2;
+        List<ILogicalPlan> subPlans = subplan.getNestedPlans();
+        List<Mutable<ILogicalExpression>> arguments = new ArrayList<Mutable<ILogicalExpression>>();
+        for (ILogicalPlan plan : subPlans) {
+            List<Mutable<ILogicalOperator>> roots = plan.getRoots();
+
+            AbstractLogicalOperator nestedRoot = (AbstractLogicalOperator) roots.get(0).getValue();
+            if (nestedRoot.getOperatorTag() != LogicalOperatorTag.SELECT)
+                return false;
+            SelectOperator selectOp = (SelectOperator) nestedRoot;
+
+            AbstractLogicalOperator nestedNextOp = (AbstractLogicalOperator) nestedRoot.getInputs().get(0).getValue();
+            if (nestedNextOp.getOperatorTag() != LogicalOperatorTag.ASSIGN)
+                return false;
+            AssignOperator assignRoot = (AssignOperator) nestedNextOp;
+            Mutable<ILogicalExpression> actionExprRef = assignRoot.getExpressions().get(0);
+
+            arguments.add(selectOp.getCondition());
+            arguments.add(actionExprRef);
+            AbstractLogicalOperator nestedBottomOp = (AbstractLogicalOperator) assignRoot.getInputs().get(0).getValue();
+
+            if (nestedBottomOp.getOperatorTag() != LogicalOperatorTag.NESTEDTUPLESOURCE)
+                return false;
+        }
+
+        AbstractLogicalOperator op3 = (AbstractLogicalOperator) op2.getInputs().get(0).getValue();
+        if (op3.getOperatorTag() != LogicalOperatorTag.ASSIGN)
+            return false;
+
+        AssignOperator bottomAssign = (AssignOperator) op3;
+        LogicalVariable conditionVar = bottomAssign.getVariables().get(0);
+        Mutable<ILogicalExpression> switchCondition = new MutableObject<ILogicalExpression>(
+                new VariableReferenceExpression(conditionVar));
+        List<Mutable<ILogicalExpression>> argumentRefs = new ArrayList<Mutable<ILogicalExpression>>();
+        argumentRefs.add(switchCondition);
+        argumentRefs.addAll(arguments);
+
+        /** replace the branch conditions */
+        for (int i = 0; i < arguments.size(); i += 2) {
+            if (arguments.get(i).getValue().equals(switchCondition.getValue())) {
+                arguments.get(i).setValue(ConstantExpression.TRUE);
+            } else {
+                arguments.get(i).setValue(ConstantExpression.FALSE);
+            }
+        }
+
+        ILogicalExpression callExpr = new ScalarFunctionCallExpression(
+                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.SWITCH_CASE), argumentRefs);
+
+        assignOp.getInputs().get(0).setValue(op3);
+        assignOp.getExpressions().get(0).setValue(callExpr);
+        context.computeAndSetTypeEnvironmentForOperator(assignOp);
+        return true;
+    }
+}


[23/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/org/apache/asterix/translator/AqlExpressionToPlanTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/translator/AqlExpressionToPlanTranslator.java b/asterix-algebra/src/main/java/org/apache/asterix/translator/AqlExpressionToPlanTranslator.java
new file mode 100644
index 0000000..0360ffb
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/translator/AqlExpressionToPlanTranslator.java
@@ -0,0 +1,1608 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.translator;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicLong;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.aql.base.Clause;
+import edu.uci.ics.asterix.aql.base.Expression;
+import edu.uci.ics.asterix.aql.base.Expression.Kind;
+import edu.uci.ics.asterix.aql.expression.CallExpr;
+import edu.uci.ics.asterix.aql.expression.CompactStatement;
+import edu.uci.ics.asterix.aql.expression.ConnectFeedStatement;
+import edu.uci.ics.asterix.aql.expression.CreateDataverseStatement;
+import edu.uci.ics.asterix.aql.expression.CreateFeedPolicyStatement;
+import edu.uci.ics.asterix.aql.expression.CreateFunctionStatement;
+import edu.uci.ics.asterix.aql.expression.CreateIndexStatement;
+import edu.uci.ics.asterix.aql.expression.CreatePrimaryFeedStatement;
+import edu.uci.ics.asterix.aql.expression.CreateSecondaryFeedStatement;
+import edu.uci.ics.asterix.aql.expression.DatasetDecl;
+import edu.uci.ics.asterix.aql.expression.DataverseDecl;
+import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
+import edu.uci.ics.asterix.aql.expression.DeleteStatement;
+import edu.uci.ics.asterix.aql.expression.DisconnectFeedStatement;
+import edu.uci.ics.asterix.aql.expression.DistinctClause;
+import edu.uci.ics.asterix.aql.expression.DropStatement;
+import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
+import edu.uci.ics.asterix.aql.expression.FeedDropStatement;
+import edu.uci.ics.asterix.aql.expression.FeedPolicyDropStatement;
+import edu.uci.ics.asterix.aql.expression.FieldAccessor;
+import edu.uci.ics.asterix.aql.expression.FieldBinding;
+import edu.uci.ics.asterix.aql.expression.ForClause;
+import edu.uci.ics.asterix.aql.expression.FunctionDecl;
+import edu.uci.ics.asterix.aql.expression.FunctionDropStatement;
+import edu.uci.ics.asterix.aql.expression.GbyVariableExpressionPair;
+import edu.uci.ics.asterix.aql.expression.GroupbyClause;
+import edu.uci.ics.asterix.aql.expression.IfExpr;
+import edu.uci.ics.asterix.aql.expression.IndexAccessor;
+import edu.uci.ics.asterix.aql.expression.IndexDropStatement;
+import edu.uci.ics.asterix.aql.expression.InsertStatement;
+import edu.uci.ics.asterix.aql.expression.LetClause;
+import edu.uci.ics.asterix.aql.expression.LimitClause;
+import edu.uci.ics.asterix.aql.expression.ListConstructor;
+import edu.uci.ics.asterix.aql.expression.ListConstructor.Type;
+import edu.uci.ics.asterix.aql.expression.LiteralExpr;
+import edu.uci.ics.asterix.aql.expression.LoadStatement;
+import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
+import edu.uci.ics.asterix.aql.expression.NodegroupDecl;
+import edu.uci.ics.asterix.aql.expression.OperatorExpr;
+import edu.uci.ics.asterix.aql.expression.OperatorType;
+import edu.uci.ics.asterix.aql.expression.OrderbyClause;
+import edu.uci.ics.asterix.aql.expression.OrderbyClause.OrderModifier;
+import edu.uci.ics.asterix.aql.expression.OrderedListTypeDefinition;
+import edu.uci.ics.asterix.aql.expression.QuantifiedExpression;
+import edu.uci.ics.asterix.aql.expression.QuantifiedExpression.Quantifier;
+import edu.uci.ics.asterix.aql.expression.QuantifiedPair;
+import edu.uci.ics.asterix.aql.expression.Query;
+import edu.uci.ics.asterix.aql.expression.RecordConstructor;
+import edu.uci.ics.asterix.aql.expression.RecordTypeDefinition;
+import edu.uci.ics.asterix.aql.expression.SetStatement;
+import edu.uci.ics.asterix.aql.expression.TypeDecl;
+import edu.uci.ics.asterix.aql.expression.TypeDropStatement;
+import edu.uci.ics.asterix.aql.expression.TypeReferenceExpression;
+import edu.uci.ics.asterix.aql.expression.UnaryExpr;
+import edu.uci.ics.asterix.aql.expression.UnaryExpr.Sign;
+import edu.uci.ics.asterix.aql.expression.UnionExpr;
+import edu.uci.ics.asterix.aql.expression.UnorderedListTypeDefinition;
+import edu.uci.ics.asterix.aql.expression.UpdateClause;
+import edu.uci.ics.asterix.aql.expression.UpdateStatement;
+import edu.uci.ics.asterix.aql.expression.VariableExpr;
+import edu.uci.ics.asterix.aql.expression.WhereClause;
+import edu.uci.ics.asterix.aql.expression.WriteStatement;
+import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
+import edu.uci.ics.asterix.aql.util.FunctionUtils;
+import edu.uci.ics.asterix.aql.util.RangeMapBuilder;
+import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
+import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.functions.FunctionConstants;
+import edu.uci.ics.asterix.common.functions.FunctionSignature;
+import edu.uci.ics.asterix.metadata.MetadataException;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
+import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
+import edu.uci.ics.asterix.metadata.declared.LoadableDataSource;
+import edu.uci.ics.asterix.metadata.declared.ResultSetDataSink;
+import edu.uci.ics.asterix.metadata.declared.ResultSetSinkId;
+import edu.uci.ics.asterix.metadata.entities.Dataset;
+import edu.uci.ics.asterix.metadata.entities.Function;
+import edu.uci.ics.asterix.metadata.functions.ExternalFunctionCompilerUtil;
+import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
+import edu.uci.ics.asterix.om.base.AOrderedList;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.AsterixFunctionInfo;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.asterix.runtime.formats.FormatUtils;
+import edu.uci.ics.asterix.translator.CompiledStatements.CompiledLoadFromFileStatement;
+import edu.uci.ics.asterix.translator.CompiledStatements.ICompiledDmlStatement;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.Counter;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.OperatorAnnotations;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression.FunctionKind;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.BroadcastExpressionAnnotation;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.BroadcastExpressionAnnotation.BroadcastSide;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionAnnotation;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder.OrderKind;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.LocalOrderProperty;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.OrderColumn;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+
+/**
+ * Each visit returns a pair of an operator and a variable. The variable
+ * corresponds to the new column, if any, added to the tuple flow. E.g., for
+ * Unnest, the column is the variable bound to the elements in the list, for
+ * Subplan it is null. The first argument of a visit method is the expression
+ * which is translated. The second argument of a visit method is the tuple
+ * source for the current subtree.
+ */
+
+public class AqlExpressionToPlanTranslator extends AbstractAqlTranslator implements
+        IAqlExpressionVisitor<Pair<ILogicalOperator, LogicalVariable>, Mutable<ILogicalOperator>> {
+
+    private final AqlMetadataProvider metadataProvider;
+    private final TranslationContext context;
+    private final String outputDatasetName;
+    private final ICompiledDmlStatement stmt;
+    private static AtomicLong outputFileID = new AtomicLong(0);
+    private static final String OUTPUT_FILE_PREFIX = "OUTPUT_";
+    private static LogicalVariable METADATA_DUMMY_VAR = new LogicalVariable(-1);
+
+    public AqlExpressionToPlanTranslator(AqlMetadataProvider metadataProvider, int currentVarCounter,
+            String outputDatasetName, ICompiledDmlStatement stmt) throws AlgebricksException {
+        this.context = new TranslationContext(new Counter(currentVarCounter));
+        this.outputDatasetName = outputDatasetName;
+        this.stmt = stmt;
+        this.metadataProvider = metadataProvider;
+        FormatUtils.getDefaultFormat().registerRuntimeFunctions();
+    }
+
+    public int getVarCounter() {
+        return context.getVarCounter();
+    }
+
+    public ILogicalPlan translateLoad() throws AlgebricksException {
+        CompiledLoadFromFileStatement clffs = (CompiledLoadFromFileStatement) stmt;
+        Dataset dataset = metadataProvider.findDataset(clffs.getDataverseName(), clffs.getDatasetName());
+        if (dataset == null) {
+            // This would never happen since we check for this in AqlTranslator
+            throw new AlgebricksException("Unable to load dataset " + clffs.getDatasetName()
+                    + " since it does not exist");
+        }
+        IAType itemType = metadataProvider.findType(clffs.getDataverseName(), dataset.getItemTypeName());
+        DatasetDataSource targetDatasource = validateDatasetInfo(metadataProvider, stmt.getDataverseName(),
+                stmt.getDatasetName());
+        List<List<String>> partitionKeys = DatasetUtils.getPartitioningKeys(targetDatasource.getDataset());
+
+        LoadableDataSource lds;
+        try {
+            lds = new LoadableDataSource(dataset, itemType, clffs.getAdapter(), clffs.getProperties());
+        } catch (IOException e) {
+            throw new AlgebricksException(e);
+        }
+
+        // etsOp is a dummy input operator used to keep the compiler happy. it
+        // could be removed but would result in
+        // the need to fix many rewrite rules that assume that datasourcescan
+        // operators always have input.
+        ILogicalOperator etsOp = new EmptyTupleSourceOperator();
+
+        // Add a logical variable for the record.
+        List<LogicalVariable> payloadVars = new ArrayList<LogicalVariable>();
+        payloadVars.add(context.newVar());
+
+        // Create a scan operator and make the empty tuple source its input
+        DataSourceScanOperator dssOp = new DataSourceScanOperator(payloadVars, lds);
+        dssOp.getInputs().add(new MutableObject<ILogicalOperator>(etsOp));
+        ILogicalExpression payloadExpr = new VariableReferenceExpression(payloadVars.get(0));
+        Mutable<ILogicalExpression> payloadRef = new MutableObject<ILogicalExpression>(payloadExpr);
+
+        // Creating the assign to extract the PK out of the record
+        ArrayList<LogicalVariable> pkVars = new ArrayList<LogicalVariable>();
+        ArrayList<Mutable<ILogicalExpression>> pkExprs = new ArrayList<Mutable<ILogicalExpression>>();
+        List<Mutable<ILogicalExpression>> varRefsForLoading = new ArrayList<Mutable<ILogicalExpression>>();
+        LogicalVariable payloadVar = payloadVars.get(0);
+        for (List<String> keyFieldName : partitionKeys) {
+            prepareVarAndExpression(keyFieldName, payloadVar, pkVars, pkExprs, varRefsForLoading);
+        }
+
+        AssignOperator assign = new AssignOperator(pkVars, pkExprs);
+        assign.getInputs().add(new MutableObject<ILogicalOperator>(dssOp));
+
+        // If the input is pre-sorted, we set the ordering property explicitly in the assign
+        if (clffs.alreadySorted()) {
+            List<OrderColumn> orderColumns = new ArrayList<OrderColumn>();
+            for (int i = 0; i < pkVars.size(); ++i) {
+                orderColumns.add(new OrderColumn(pkVars.get(i), OrderKind.ASC));
+            }
+            assign.setExplicitOrderingProperty(new LocalOrderProperty(orderColumns));
+        }
+
+        List<String> additionalFilteringField = DatasetUtils.getFilterField(targetDatasource.getDataset());
+        List<LogicalVariable> additionalFilteringVars = null;
+        List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions = null;
+        List<Mutable<ILogicalExpression>> additionalFilteringExpressions = null;
+        AssignOperator additionalFilteringAssign = null;
+        if (additionalFilteringField != null) {
+            additionalFilteringVars = new ArrayList<LogicalVariable>();
+            additionalFilteringAssignExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+            additionalFilteringExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+            prepareVarAndExpression(additionalFilteringField, payloadVar, additionalFilteringVars,
+                    additionalFilteringAssignExpressions, additionalFilteringExpressions);
+            additionalFilteringAssign = new AssignOperator(additionalFilteringVars,
+                    additionalFilteringAssignExpressions);
+        }
+
+        InsertDeleteOperator insertOp = new InsertDeleteOperator(targetDatasource, payloadRef, varRefsForLoading,
+                InsertDeleteOperator.Kind.INSERT, true);
+        insertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
+
+        if (additionalFilteringAssign != null) {
+            additionalFilteringAssign.getInputs().add(new MutableObject<ILogicalOperator>(assign));
+            insertOp.getInputs().add(new MutableObject<ILogicalOperator>(additionalFilteringAssign));
+        } else {
+            insertOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
+        }
+
+        ILogicalOperator leafOperator = new SinkOperator();
+        leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(insertOp));
+        return new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(leafOperator));
+    }
+
+    public ILogicalPlan translate(Query expr) throws AlgebricksException, AsterixException {
+        Pair<ILogicalOperator, LogicalVariable> p = expr.accept(this, new MutableObject<ILogicalOperator>(
+                new EmptyTupleSourceOperator()));
+        ArrayList<Mutable<ILogicalOperator>> globalPlanRoots = new ArrayList<Mutable<ILogicalOperator>>();
+        ILogicalOperator topOp = p.first;
+        ProjectOperator project = (ProjectOperator) topOp;
+        LogicalVariable resVar = project.getVariables().get(0);
+
+        if (outputDatasetName == null) {
+            FileSplit outputFileSplit = metadataProvider.getOutputFile();
+            if (outputFileSplit == null) {
+                outputFileSplit = getDefaultOutputFileLocation();
+            }
+            metadataProvider.setOutputFile(outputFileSplit);
+
+            List<Mutable<ILogicalExpression>> writeExprList = new ArrayList<Mutable<ILogicalExpression>>(1);
+            writeExprList.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(resVar)));
+            ResultSetSinkId rssId = new ResultSetSinkId(metadataProvider.getResultSetId());
+            ResultSetDataSink sink = new ResultSetDataSink(rssId, null);
+            topOp = new DistributeResultOperator(writeExprList, sink);
+            topOp.getInputs().add(new MutableObject<ILogicalOperator>(project));
+
+            // Retrieve the Output RecordType (if any) and store it on
+            // the DistributeResultOperator
+            IAType outputRecordType = metadataProvider.findOutputRecordType();
+            if (outputRecordType != null) {
+                topOp.getAnnotations().put("output-record-type", outputRecordType);
+            }
+        } else {
+            /**
+             * add the collection-to-sequence right before the final project,
+             * because dataset only accept non-collection records
+             */
+            LogicalVariable seqVar = context.newVar();
+            @SuppressWarnings("unchecked")
+            /** This assign adds a marker function collection-to-sequence: if the input is a singleton collection, unnest it; otherwise do nothing. */
+            AssignOperator assignCollectionToSequence = new AssignOperator(seqVar,
+                    new MutableObject<ILogicalExpression>(new ScalarFunctionCallExpression(
+                            FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.COLLECTION_TO_SEQUENCE),
+                            new MutableObject<ILogicalExpression>(new VariableReferenceExpression(resVar)))));
+            assignCollectionToSequence.getInputs().add(
+                    new MutableObject<ILogicalOperator>(project.getInputs().get(0).getValue()));
+            project.getInputs().get(0).setValue(assignCollectionToSequence);
+            project.getVariables().set(0, seqVar);
+            resVar = seqVar;
+
+            DatasetDataSource targetDatasource = validateDatasetInfo(metadataProvider, stmt.getDataverseName(),
+                    stmt.getDatasetName());
+            ArrayList<LogicalVariable> vars = new ArrayList<LogicalVariable>();
+            ArrayList<Mutable<ILogicalExpression>> exprs = new ArrayList<Mutable<ILogicalExpression>>();
+            List<Mutable<ILogicalExpression>> varRefsForLoading = new ArrayList<Mutable<ILogicalExpression>>();
+            List<List<String>> partitionKeys = DatasetUtils.getPartitioningKeys(targetDatasource.getDataset());
+            for (List<String> keyFieldName : partitionKeys) {
+                prepareVarAndExpression(keyFieldName, resVar, vars, exprs, varRefsForLoading);
+            }
+
+            List<String> additionalFilteringField = DatasetUtils.getFilterField(targetDatasource.getDataset());
+            List<LogicalVariable> additionalFilteringVars = null;
+            List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions = null;
+            List<Mutable<ILogicalExpression>> additionalFilteringExpressions = null;
+            AssignOperator additionalFilteringAssign = null;
+            if (additionalFilteringField != null) {
+                additionalFilteringVars = new ArrayList<LogicalVariable>();
+                additionalFilteringAssignExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+                additionalFilteringExpressions = new ArrayList<Mutable<ILogicalExpression>>();
+
+                prepareVarAndExpression(additionalFilteringField, resVar, additionalFilteringVars,
+                        additionalFilteringAssignExpressions, additionalFilteringExpressions);
+
+                additionalFilteringAssign = new AssignOperator(additionalFilteringVars,
+                        additionalFilteringAssignExpressions);
+            }
+
+            AssignOperator assign = new AssignOperator(vars, exprs);
+
+            if (additionalFilteringAssign != null) {
+                additionalFilteringAssign.getInputs().add(new MutableObject<ILogicalOperator>(project));
+                assign.getInputs().add(new MutableObject<ILogicalOperator>(additionalFilteringAssign));
+            } else {
+                assign.getInputs().add(new MutableObject<ILogicalOperator>(project));
+            }
+
+            Mutable<ILogicalExpression> varRef = new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
+                    resVar));
+            ILogicalOperator leafOperator = null;
+
+            switch (stmt.getKind()) {
+                case INSERT: {
+                    InsertDeleteOperator insertOp = new InsertDeleteOperator(targetDatasource, varRef,
+                            varRefsForLoading, InsertDeleteOperator.Kind.INSERT, false);
+                    insertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
+                    insertOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
+                    leafOperator = new SinkOperator();
+                    leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(insertOp));
+                    break;
+                }
+                case DELETE: {
+                    InsertDeleteOperator deleteOp = new InsertDeleteOperator(targetDatasource, varRef,
+                            varRefsForLoading, InsertDeleteOperator.Kind.DELETE, false);
+                    deleteOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
+                    deleteOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
+                    leafOperator = new SinkOperator();
+                    leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(deleteOp));
+                    break;
+                }
+                case CONNECT_FEED: {
+                    InsertDeleteOperator insertOp = new InsertDeleteOperator(targetDatasource, varRef,
+                            varRefsForLoading, InsertDeleteOperator.Kind.INSERT, false);
+                    insertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
+                    insertOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
+                    leafOperator = new SinkOperator();
+                    leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(insertOp));
+                    break;
+                }
+                case SUBSCRIBE_FEED: { 
+                    ILogicalOperator insertOp = new InsertDeleteOperator(targetDatasource, varRef, varRefsForLoading,   
+                            InsertDeleteOperator.Kind.INSERT, false);   
+                    insertOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));  
+                    leafOperator = new SinkOperator();  
+                    leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(insertOp));    
+                    break;  
+                }
+            }
+            topOp = leafOperator;
+        }
+        globalPlanRoots.add(new MutableObject<ILogicalOperator>(topOp));
+        ILogicalPlan plan = new ALogicalPlanImpl(globalPlanRoots);
+        return plan;
+    }
+
+    @SuppressWarnings("unchecked")
+    private void prepareVarAndExpression(List<String> field, LogicalVariable resVar,
+            List<LogicalVariable> additionalFilteringVars,
+            List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions,
+            List<Mutable<ILogicalExpression>> varRefs) {
+        IFunctionInfo finfoAccess;
+        ScalarFunctionCallExpression f;
+        if (field.size() > 1) {
+            finfoAccess = FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_NESTED);
+            f = new ScalarFunctionCallExpression(finfoAccess, new MutableObject<ILogicalExpression>(
+                    new VariableReferenceExpression(METADATA_DUMMY_VAR)), new MutableObject<ILogicalExpression>(
+                    new ConstantExpression(new AsterixConstantValue(new AOrderedList(field)))));
+        } else {
+            finfoAccess = FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME);
+            f = new ScalarFunctionCallExpression(finfoAccess, new MutableObject<ILogicalExpression>(
+                    new VariableReferenceExpression(METADATA_DUMMY_VAR)), new MutableObject<ILogicalExpression>(
+                    new ConstantExpression(new AsterixConstantValue(new AString(field.get(0))))));
+        }
+        f.substituteVar(METADATA_DUMMY_VAR, resVar);
+        additionalFilteringAssignExpressions.add(new MutableObject<ILogicalExpression>(f));
+        LogicalVariable v = context.newVar();
+        additionalFilteringVars.add(v);
+        varRefs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(v)));
+    }
+
+    private DatasetDataSource validateDatasetInfo(AqlMetadataProvider metadataProvider, String dataverseName,
+            String datasetName) throws AlgebricksException {
+        Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
+        if (dataset == null) {
+            throw new AlgebricksException("Cannot find dataset " + datasetName + " in dataverse " + dataverseName);
+        }
+        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
+            throw new AlgebricksException("Cannot write output to an external dataset.");
+        }
+        AqlSourceId sourceId = new AqlSourceId(dataverseName, datasetName);
+        String itemTypeName = dataset.getItemTypeName();
+        IAType itemType = metadataProvider.findType(dataverseName, itemTypeName);
+        DatasetDataSource dataSource = new DatasetDataSource(sourceId, dataset.getDataverseName(),
+                dataset.getDatasetName(), itemType, AqlDataSourceType.INTERNAL_DATASET);
+
+        return dataSource;
+    }
+
+    private FileSplit getDefaultOutputFileLocation() throws MetadataException {
+        String outputDir = System.getProperty("java.io.tmpDir");
+        String filePath = outputDir + System.getProperty("file.separator") + OUTPUT_FILE_PREFIX
+                + outputFileID.incrementAndGet();
+        AsterixMetadataProperties metadataProperties = AsterixAppContextInfo.getInstance().getMetadataProperties();
+        return new FileSplit(metadataProperties.getMetadataNodeName(), new FileReference(new File(filePath)));
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitForClause(ForClause fc, Mutable<ILogicalOperator> tupSource)
+            throws AsterixException {
+        LogicalVariable v = context.newVar(fc.getVarExpr());
+        Expression inExpr = fc.getInExpr();
+        Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(inExpr, tupSource);
+        ILogicalOperator returnedOp;
+
+        if (fc.getPosVarExpr() == null) {
+            returnedOp = new UnnestOperator(v, new MutableObject<ILogicalExpression>(makeUnnestExpression(eo.first)));
+        } else {
+            LogicalVariable pVar = context.newVar(fc.getPosVarExpr());
+            // We set the positional variable type as INT64 type.
+            returnedOp = new UnnestOperator(v, new MutableObject<ILogicalExpression>(makeUnnestExpression(eo.first)),
+                    pVar, BuiltinType.AINT64, new AqlPositionWriter());
+        }
+        returnedOp.getInputs().add(eo.second);
+
+        return new Pair<ILogicalOperator, LogicalVariable>(returnedOp, v);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitLetClause(LetClause lc, Mutable<ILogicalOperator> tupSource)
+            throws AsterixException {
+        LogicalVariable v;
+        ILogicalOperator returnedOp;
+
+        switch (lc.getBindingExpr().getKind()) {
+            case VARIABLE_EXPRESSION: {
+                v = context.newVar(lc.getVarExpr());
+                LogicalVariable prev = context.getVar(((VariableExpr) lc.getBindingExpr()).getVar().getId());
+                returnedOp = new AssignOperator(v, new MutableObject<ILogicalExpression>(
+                        new VariableReferenceExpression(prev)));
+                returnedOp.getInputs().add(tupSource);
+                break;
+            }
+            default: {
+                v = context.newVar(lc.getVarExpr());
+                Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(lc.getBindingExpr(),
+                        tupSource);
+                returnedOp = new AssignOperator(v, new MutableObject<ILogicalExpression>(eo.first));
+                returnedOp.getInputs().add(eo.second);
+                break;
+            }
+        }
+        return new Pair<ILogicalOperator, LogicalVariable>(returnedOp, v);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitFlworExpression(FLWOGRExpression flwor,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        Mutable<ILogicalOperator> flworPlan = tupSource;
+        boolean isTop = context.isTopFlwor();
+        if (isTop) {
+            context.setTopFlwor(false);
+        }
+        for (Clause c : flwor.getClauseList()) {
+            Pair<ILogicalOperator, LogicalVariable> pC = c.accept(this, flworPlan);
+            flworPlan = new MutableObject<ILogicalOperator>(pC.first);
+        }
+
+        Expression r = flwor.getReturnExpr();
+        boolean noFlworClause = flwor.noForClause();
+
+        if (r.getKind() == Kind.VARIABLE_EXPRESSION) {
+            VariableExpr v = (VariableExpr) r;
+            LogicalVariable var = context.getVar(v.getVar().getId());
+
+            return produceFlwrResult(noFlworClause, isTop, flworPlan, var);
+
+        } else {
+            Mutable<ILogicalOperator> baseOp = new MutableObject<ILogicalOperator>(flworPlan.getValue());
+            Pair<ILogicalOperator, LogicalVariable> rRes = r.accept(this, baseOp);
+            ILogicalOperator rOp = rRes.first;
+            ILogicalOperator resOp;
+            if (expressionNeedsNoNesting(r)) {
+                baseOp.setValue(flworPlan.getValue());
+                resOp = rOp;
+            } else {
+                SubplanOperator s = new SubplanOperator(rOp);
+                s.getInputs().add(flworPlan);
+                resOp = s;
+                baseOp.setValue(new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(s)));
+            }
+            Mutable<ILogicalOperator> resOpRef = new MutableObject<ILogicalOperator>(resOp);
+            return produceFlwrResult(noFlworClause, isTop, resOpRef, rRes.second);
+        }
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitFieldAccessor(FieldAccessor fa,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(fa.getExpr(), tupSource);
+        LogicalVariable v = context.newVar();
+        AbstractFunctionCallExpression fldAccess = new ScalarFunctionCallExpression(
+                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME));
+        fldAccess.getArguments().add(new MutableObject<ILogicalExpression>(p.first));
+        ILogicalExpression faExpr = new ConstantExpression(new AsterixConstantValue(new AString(fa.getIdent()
+                .getValue())));
+        fldAccess.getArguments().add(new MutableObject<ILogicalExpression>(faExpr));
+        AssignOperator a = new AssignOperator(v, new MutableObject<ILogicalExpression>(fldAccess));
+        a.getInputs().add(p.second);
+        return new Pair<ILogicalOperator, LogicalVariable>(a, v);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitIndexAccessor(IndexAccessor ia,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(ia.getExpr(), tupSource);
+        LogicalVariable v = context.newVar();
+        AbstractFunctionCallExpression f;
+        if (ia.isAny()) {
+            f = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.ANY_COLLECTION_MEMBER));
+            f.getArguments().add(new MutableObject<ILogicalExpression>(p.first));
+        } else {
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> indexPair = aqlExprToAlgExpression(ia.getIndexExpr(),
+                    tupSource);
+            f = new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM));
+            f.getArguments().add(new MutableObject<ILogicalExpression>(p.first));
+            f.getArguments().add(new MutableObject<ILogicalExpression>(indexPair.first));
+        }
+        AssignOperator a = new AssignOperator(v, new MutableObject<ILogicalExpression>(f));
+        a.getInputs().add(p.second);
+        return new Pair<ILogicalOperator, LogicalVariable>(a, v);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitCallExpr(CallExpr fcall, Mutable<ILogicalOperator> tupSource)
+            throws AsterixException {
+        LogicalVariable v = context.newVar();
+        FunctionSignature signature = fcall.getFunctionSignature();
+        List<Mutable<ILogicalExpression>> args = new ArrayList<Mutable<ILogicalExpression>>();
+        Mutable<ILogicalOperator> topOp = tupSource;
+
+        for (Expression expr : fcall.getExprList()) {
+            switch (expr.getKind()) {
+                case VARIABLE_EXPRESSION: {
+                    LogicalVariable var = context.getVar(((VariableExpr) expr).getVar().getId());
+                    args.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(var)));
+                    break;
+                }
+                case LITERAL_EXPRESSION: {
+                    LiteralExpr val = (LiteralExpr) expr;
+                    args.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
+                            ConstantHelper.objectFromLiteral(val.getValue())))));
+                    break;
+                }
+                default: {
+                    Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(expr, topOp);
+                    AbstractLogicalOperator o1 = (AbstractLogicalOperator) eo.second.getValue();
+                    args.add(new MutableObject<ILogicalExpression>(eo.first));
+                    if (o1 != null && !(o1.getOperatorTag() == LogicalOperatorTag.ASSIGN && hasOnlyChild(o1, topOp))) {
+                        topOp = eo.second;
+                    }
+                    break;
+                }
+            }
+        }
+
+        AbstractFunctionCallExpression f;
+        if ((f = lookupUserDefinedFunction(signature, args)) == null) {
+            f = lookupBuiltinFunction(signature.getName(), signature.getArity(), args);
+        }
+
+        if (f == null) {
+            throw new AsterixException(" Unknown function " + signature.getName() + "@" + signature.getArity());
+        }
+
+        // Put hints into function call expr.
+        if (fcall.hasHints()) {
+            for (IExpressionAnnotation hint : fcall.getHints()) {
+                f.getAnnotations().put(hint, hint);
+            }
+        }
+
+        AssignOperator op = new AssignOperator(v, new MutableObject<ILogicalExpression>(f));
+        if (topOp != null) {
+            op.getInputs().add(topOp);
+        }
+
+        return new Pair<ILogicalOperator, LogicalVariable>(op, v);
+    }
+
+    private AbstractFunctionCallExpression lookupUserDefinedFunction(FunctionSignature signature,
+            List<Mutable<ILogicalExpression>> args) throws MetadataException {
+        if (signature.getNamespace() == null) {
+            return null;
+        }
+        Function function = MetadataManager.INSTANCE.getFunction(metadataProvider.getMetadataTxnContext(), signature);
+        if (function == null) {
+            return null;
+        }
+        AbstractFunctionCallExpression f = null;
+        if (function.getLanguage().equalsIgnoreCase(Function.LANGUAGE_JAVA)) {
+            IFunctionInfo finfo = ExternalFunctionCompilerUtil.getExternalFunctionInfo(
+                    metadataProvider.getMetadataTxnContext(), function);
+            f = new ScalarFunctionCallExpression(finfo, args);
+        } else if (function.getLanguage().equalsIgnoreCase(Function.LANGUAGE_AQL)) {
+            IFunctionInfo finfo = FunctionUtils.getFunctionInfo(signature);
+            f = new ScalarFunctionCallExpression(finfo, args);
+        } else {
+            throw new MetadataException(" User defined functions written in " + function.getLanguage()
+                    + " are not supported");
+        }
+        return f;
+    }
+
+    private AbstractFunctionCallExpression lookupBuiltinFunction(String functionName, int arity,
+            List<Mutable<ILogicalExpression>> args) {
+        AbstractFunctionCallExpression f = null;
+        FunctionIdentifier fi = new FunctionIdentifier(AlgebricksBuiltinFunctions.ALGEBRICKS_NS, functionName, arity);
+        AsterixFunctionInfo afi = AsterixBuiltinFunctions.lookupFunction(fi);
+        FunctionIdentifier builtinAquafi = afi == null ? null : afi.getFunctionIdentifier();
+
+        if (builtinAquafi != null) {
+            fi = builtinAquafi;
+        } else {
+            fi = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, functionName, arity);
+            afi = AsterixBuiltinFunctions.lookupFunction(fi);
+            if (afi == null) {
+                return null;
+            }
+        }
+        if (AsterixBuiltinFunctions.isBuiltinAggregateFunction(fi)) {
+            f = AsterixBuiltinFunctions.makeAggregateFunctionExpression(fi, args);
+        } else if (AsterixBuiltinFunctions.isBuiltinUnnestingFunction(fi)) {
+            UnnestingFunctionCallExpression ufce = new UnnestingFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(fi), args);
+            ufce.setReturnsUniqueValues(AsterixBuiltinFunctions.returnsUniqueValues(fi));
+            f = ufce;
+        } else {
+            f = new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(fi), args);
+        }
+        return f;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitFunctionDecl(FunctionDecl fd,
+            Mutable<ILogicalOperator> tupSource) {
+        // TODO Auto-generated method stub
+        throw new NotImplementedException();
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitGroupbyClause(GroupbyClause gc,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        GroupByOperator gOp = new GroupByOperator();
+        Mutable<ILogicalOperator> topOp = tupSource;
+        for (GbyVariableExpressionPair ve : gc.getGbyPairList()) {
+            LogicalVariable v;
+            VariableExpr vexpr = ve.getVar();
+            if (vexpr != null) {
+                v = context.newVar(vexpr);
+            } else {
+                v = context.newVar();
+            }
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(ve.getExpr(), topOp);
+            gOp.addGbyExpression(v, eo.first);
+            topOp = eo.second;
+        }
+        for (GbyVariableExpressionPair ve : gc.getDecorPairList()) {
+            LogicalVariable v;
+            VariableExpr vexpr = ve.getVar();
+            if (vexpr != null) {
+                v = context.newVar(vexpr);
+            } else {
+                v = context.newVar();
+            }
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(ve.getExpr(), topOp);
+            gOp.addDecorExpression(v, eo.first);
+            topOp = eo.second;
+        }
+        gOp.getInputs().add(topOp);
+
+        for (VariableExpr var : gc.getWithVarList()) {
+            LogicalVariable aggVar = context.newVar();
+            LogicalVariable oldVar = context.getVar(var);
+            List<Mutable<ILogicalExpression>> flArgs = new ArrayList<Mutable<ILogicalExpression>>(1);
+            flArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(oldVar)));
+            AggregateFunctionCallExpression fListify = AsterixBuiltinFunctions.makeAggregateFunctionExpression(
+                    AsterixBuiltinFunctions.LISTIFY, flArgs);
+            AggregateOperator agg = new AggregateOperator(mkSingletonArrayList(aggVar),
+                    (List) mkSingletonArrayList(new MutableObject<ILogicalExpression>(fListify)));
+
+            agg.getInputs().add(
+                    new MutableObject<ILogicalOperator>(new NestedTupleSourceOperator(
+                            new MutableObject<ILogicalOperator>(gOp))));
+            ILogicalPlan plan = new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(agg));
+            gOp.getNestedPlans().add(plan);
+            // Hide the variable that was part of the "with", replacing it with
+            // the one bound by the aggregation op.
+            context.setVar(var, aggVar);
+        }
+
+        gOp.getAnnotations().put(OperatorAnnotations.USE_HASH_GROUP_BY, gc.hasHashGroupByHint());
+        return new Pair<ILogicalOperator, LogicalVariable>(gOp, null);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitIfExpr(IfExpr ifexpr, Mutable<ILogicalOperator> tupSource)
+            throws AsterixException {
+        // In the most general case, IfThenElse is translated in the following
+        // way.
+        //
+        // We assign the result of the condition to one variable varCond.
+        // We create one subplan which contains the plan for the "then" branch,
+        // on top of which there is a selection whose condition is varCond.
+        // Similarly, we create one subplan for the "else" branch, in which the
+        // selection is not(varCond).
+        // Finally, we concatenate the results. (??)
+
+        Pair<ILogicalOperator, LogicalVariable> pCond = ifexpr.getCondExpr().accept(this, tupSource);
+        ILogicalOperator opCond = pCond.first;
+        LogicalVariable varCond = pCond.second;
+
+        SubplanOperator sp = new SubplanOperator();
+        Mutable<ILogicalOperator> nestedSource = new MutableObject<ILogicalOperator>(new NestedTupleSourceOperator(
+                new MutableObject<ILogicalOperator>(sp)));
+
+        Pair<ILogicalOperator, LogicalVariable> pThen = ifexpr.getThenExpr().accept(this, nestedSource);
+        SelectOperator sel1 = new SelectOperator(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
+                varCond)), false, null);
+        sel1.getInputs().add(new MutableObject<ILogicalOperator>(pThen.first));
+
+        Pair<ILogicalOperator, LogicalVariable> pElse = ifexpr.getElseExpr().accept(this, nestedSource);
+        AbstractFunctionCallExpression notVarCond = new ScalarFunctionCallExpression(
+                FunctionUtils.getFunctionInfo(AlgebricksBuiltinFunctions.NOT), new MutableObject<ILogicalExpression>(
+                        new VariableReferenceExpression(varCond)));
+        SelectOperator sel2 = new SelectOperator(new MutableObject<ILogicalExpression>(notVarCond), false, null);
+        sel2.getInputs().add(new MutableObject<ILogicalOperator>(pElse.first));
+
+        ILogicalPlan p1 = new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(sel1));
+        sp.getNestedPlans().add(p1);
+        ILogicalPlan p2 = new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(sel2));
+        sp.getNestedPlans().add(p2);
+
+        Mutable<ILogicalOperator> opCondRef = new MutableObject<ILogicalOperator>(opCond);
+        sp.getInputs().add(opCondRef);
+
+        LogicalVariable resV = context.newVar();
+        AbstractFunctionCallExpression concatNonNull = new ScalarFunctionCallExpression(
+                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.CONCAT_NON_NULL),
+                new MutableObject<ILogicalExpression>(new VariableReferenceExpression(pThen.second)),
+                new MutableObject<ILogicalExpression>(new VariableReferenceExpression(pElse.second)));
+        AssignOperator a = new AssignOperator(resV, new MutableObject<ILogicalExpression>(concatNonNull));
+        a.getInputs().add(new MutableObject<ILogicalOperator>(sp));
+
+        return new Pair<ILogicalOperator, LogicalVariable>(a, resV);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitLiteralExpr(LiteralExpr l, Mutable<ILogicalOperator> tupSource) {
+        LogicalVariable var = context.newVar();
+        AssignOperator a = new AssignOperator(var, new MutableObject<ILogicalExpression>(new ConstantExpression(
+                new AsterixConstantValue(ConstantHelper.objectFromLiteral(l.getValue())))));
+        if (tupSource != null) {
+            a.getInputs().add(tupSource);
+        }
+        return new Pair<ILogicalOperator, LogicalVariable>(a, var);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitOperatorExpr(OperatorExpr op,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        ArrayList<OperatorType> ops = op.getOpList();
+        int nOps = ops.size();
+
+        if (nOps > 0 && (ops.get(0) == OperatorType.AND || ops.get(0) == OperatorType.OR)) {
+            return visitAndOrOperator(op, tupSource);
+        }
+
+        ArrayList<Expression> exprs = op.getExprList();
+
+        Mutable<ILogicalOperator> topOp = tupSource;
+
+        ILogicalExpression currExpr = null;
+        for (int i = 0; i <= nOps; i++) {
+
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(exprs.get(i), topOp);
+            topOp = p.second;
+            ILogicalExpression e = p.first;
+            // now look at the operator
+            if (i < nOps) {
+                if (OperatorExpr.opIsComparison(ops.get(i))) {
+                    AbstractFunctionCallExpression c = createComparisonExpression(ops.get(i));
+
+                    // chain the operators
+                    if (i == 0) {
+                        c.getArguments().add(new MutableObject<ILogicalExpression>(e));
+                        currExpr = c;
+                        if (op.isBroadcastOperand(i)) {
+                            BroadcastExpressionAnnotation bcast = new BroadcastExpressionAnnotation();
+                            bcast.setObject(BroadcastSide.LEFT);
+                            c.getAnnotations().put(BroadcastExpressionAnnotation.BROADCAST_ANNOTATION_KEY, bcast);
+                        }
+                    } else {
+                        ((AbstractFunctionCallExpression) currExpr).getArguments().add(
+                                new MutableObject<ILogicalExpression>(e));
+                        c.getArguments().add(new MutableObject<ILogicalExpression>(currExpr));
+                        currExpr = c;
+                        if (i == 1 && op.isBroadcastOperand(i)) {
+                            BroadcastExpressionAnnotation bcast = new BroadcastExpressionAnnotation();
+                            bcast.setObject(BroadcastSide.RIGHT);
+                            c.getAnnotations().put(BroadcastExpressionAnnotation.BROADCAST_ANNOTATION_KEY, bcast);
+                        }
+                    }
+                } else {
+                    AbstractFunctionCallExpression f = createFunctionCallExpressionForBuiltinOperator(ops.get(i));
+
+                    if (i == 0) {
+                        f.getArguments().add(new MutableObject<ILogicalExpression>(e));
+                        currExpr = f;
+                    } else {
+                        ((AbstractFunctionCallExpression) currExpr).getArguments().add(
+                                new MutableObject<ILogicalExpression>(e));
+                        f.getArguments().add(new MutableObject<ILogicalExpression>(currExpr));
+                        currExpr = f;
+                    }
+                }
+            } else { // don't forget the last expression...
+                ((AbstractFunctionCallExpression) currExpr).getArguments()
+                        .add(new MutableObject<ILogicalExpression>(e));
+                if (i == 1 && op.isBroadcastOperand(i)) {
+                    BroadcastExpressionAnnotation bcast = new BroadcastExpressionAnnotation();
+                    bcast.setObject(BroadcastSide.RIGHT);
+                    ((AbstractFunctionCallExpression) currExpr).getAnnotations().put(
+                            BroadcastExpressionAnnotation.BROADCAST_ANNOTATION_KEY, bcast);
+                }
+            }
+        }
+
+        // Add hints as annotations.
+        if (op.hasHints() && currExpr instanceof AbstractFunctionCallExpression) {
+            AbstractFunctionCallExpression currFuncExpr = (AbstractFunctionCallExpression) currExpr;
+            for (IExpressionAnnotation hint : op.getHints()) {
+                currFuncExpr.getAnnotations().put(hint, hint);
+            }
+        }
+
+        LogicalVariable assignedVar = context.newVar();
+        AssignOperator a = new AssignOperator(assignedVar, new MutableObject<ILogicalExpression>(currExpr));
+
+        a.getInputs().add(topOp);
+
+        return new Pair<ILogicalOperator, LogicalVariable>(a, assignedVar);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitOrderbyClause(OrderbyClause oc,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        OrderOperator ord = new OrderOperator();
+        Iterator<OrderModifier> modifIter = oc.getModifierList().iterator();
+        Mutable<ILogicalOperator> topOp = tupSource;
+        for (Expression e : oc.getOrderbyList()) {
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(e, topOp);
+            OrderModifier m = modifIter.next();
+            OrderOperator.IOrder comp = (m == OrderModifier.ASC) ? OrderOperator.ASC_ORDER : OrderOperator.DESC_ORDER;
+            ord.getOrderExpressions()
+                    .add(new Pair<IOrder, Mutable<ILogicalExpression>>(comp, new MutableObject<ILogicalExpression>(
+                            p.first)));
+            topOp = p.second;
+        }
+        ord.getInputs().add(topOp);
+        if (oc.getNumTuples() > 0) {
+            ord.getAnnotations().put(OperatorAnnotations.CARDINALITY, oc.getNumTuples());
+        }
+        if (oc.getNumFrames() > 0) {
+            ord.getAnnotations().put(OperatorAnnotations.MAX_NUMBER_FRAMES, oc.getNumFrames());
+        }
+        if (oc.getRangeMap() != null) {
+            Iterator<OrderModifier> orderModifIter = oc.getModifierList().iterator();
+            boolean ascending = (orderModifIter.next() == OrderModifier.ASC);
+            RangeMapBuilder.verifyRangeOrder(oc.getRangeMap(), ascending);
+            ord.getAnnotations().put(OperatorAnnotations.USE_RANGE_CONNECTOR, oc.getRangeMap());
+        }
+        return new Pair<ILogicalOperator, LogicalVariable>(ord, null);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitQuantifiedExpression(QuantifiedExpression qe,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        Mutable<ILogicalOperator> topOp = tupSource;
+
+        ILogicalOperator firstOp = null;
+        Mutable<ILogicalOperator> lastOp = null;
+
+        for (QuantifiedPair qt : qe.getQuantifiedList()) {
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo1 = aqlExprToAlgExpression(qt.getExpr(), topOp);
+            topOp = eo1.second;
+            LogicalVariable uVar = context.newVar(qt.getVarExpr());
+            ILogicalOperator u = new UnnestOperator(uVar, new MutableObject<ILogicalExpression>(
+                    makeUnnestExpression(eo1.first)));
+
+            if (firstOp == null) {
+                firstOp = u;
+            }
+            if (lastOp != null) {
+                u.getInputs().add(lastOp);
+            }
+            lastOp = new MutableObject<ILogicalOperator>(u);
+        }
+
+        // We make all the unnest correspond. to quantif. vars. sit on top
+        // in the hope of enabling joins & other optimiz.
+        firstOp.getInputs().add(topOp);
+        topOp = lastOp;
+
+        Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo2 = aqlExprToAlgExpression(qe.getSatisfiesExpr(), topOp);
+
+        AggregateFunctionCallExpression fAgg;
+        SelectOperator s;
+        if (qe.getQuantifier() == Quantifier.SOME) {
+            s = new SelectOperator(new MutableObject<ILogicalExpression>(eo2.first), false, null);
+            s.getInputs().add(eo2.second);
+            fAgg = AsterixBuiltinFunctions.makeAggregateFunctionExpression(AsterixBuiltinFunctions.NON_EMPTY_STREAM,
+                    new ArrayList<Mutable<ILogicalExpression>>());
+        } else { // EVERY
+            List<Mutable<ILogicalExpression>> satExprList = new ArrayList<Mutable<ILogicalExpression>>(1);
+            satExprList.add(new MutableObject<ILogicalExpression>(eo2.first));
+            s = new SelectOperator(new MutableObject<ILogicalExpression>(new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AlgebricksBuiltinFunctions.NOT), satExprList)), false, null);
+            s.getInputs().add(eo2.second);
+            fAgg = AsterixBuiltinFunctions.makeAggregateFunctionExpression(AsterixBuiltinFunctions.EMPTY_STREAM,
+                    new ArrayList<Mutable<ILogicalExpression>>());
+        }
+        LogicalVariable qeVar = context.newVar();
+        AggregateOperator a = new AggregateOperator(mkSingletonArrayList(qeVar),
+                (List) mkSingletonArrayList(new MutableObject<ILogicalExpression>(fAgg)));
+        a.getInputs().add(new MutableObject<ILogicalOperator>(s));
+        return new Pair<ILogicalOperator, LogicalVariable>(a, qeVar);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitQuery(Query q, Mutable<ILogicalOperator> tupSource)
+            throws AsterixException {
+        return q.getBody().accept(this, tupSource);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitRecordConstructor(RecordConstructor rc,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        AbstractFunctionCallExpression f = new ScalarFunctionCallExpression(
+                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR));
+        LogicalVariable v1 = context.newVar();
+        AssignOperator a = new AssignOperator(v1, new MutableObject<ILogicalExpression>(f));
+        Mutable<ILogicalOperator> topOp = tupSource;
+        for (FieldBinding fb : rc.getFbList()) {
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo1 = aqlExprToAlgExpression(fb.getLeftExpr(), topOp);
+            f.getArguments().add(new MutableObject<ILogicalExpression>(eo1.first));
+            topOp = eo1.second;
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo2 = aqlExprToAlgExpression(fb.getRightExpr(), topOp);
+            f.getArguments().add(new MutableObject<ILogicalExpression>(eo2.first));
+            topOp = eo2.second;
+        }
+        a.getInputs().add(topOp);
+        return new Pair<ILogicalOperator, LogicalVariable>(a, v1);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitListConstructor(ListConstructor lc,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        FunctionIdentifier fid = (lc.getType() == Type.ORDERED_LIST_CONSTRUCTOR) ? AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR
+                : AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR;
+        AbstractFunctionCallExpression f = new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(fid));
+        LogicalVariable v1 = context.newVar();
+        AssignOperator a = new AssignOperator(v1, new MutableObject<ILogicalExpression>(f));
+        Mutable<ILogicalOperator> topOp = tupSource;
+        for (Expression expr : lc.getExprList()) {
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(expr, topOp);
+            f.getArguments().add(new MutableObject<ILogicalExpression>(eo.first));
+            topOp = eo.second;
+        }
+        a.getInputs().add(topOp);
+        return new Pair<ILogicalOperator, LogicalVariable>(a, v1);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitUnaryExpr(UnaryExpr u, Mutable<ILogicalOperator> tupSource)
+            throws AsterixException {
+        Expression expr = u.getExpr();
+        Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = aqlExprToAlgExpression(expr, tupSource);
+        LogicalVariable v1 = context.newVar();
+        AssignOperator a;
+        if (u.getSign() == Sign.POSITIVE) {
+            a = new AssignOperator(v1, new MutableObject<ILogicalExpression>(eo.first));
+        } else {
+            AbstractFunctionCallExpression m = new ScalarFunctionCallExpression(
+                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.NUMERIC_UNARY_MINUS));
+            m.getArguments().add(new MutableObject<ILogicalExpression>(eo.first));
+            a = new AssignOperator(v1, new MutableObject<ILogicalExpression>(m));
+        }
+        a.getInputs().add(eo.second);
+        return new Pair<ILogicalOperator, LogicalVariable>(a, v1);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitVariableExpr(VariableExpr v, Mutable<ILogicalOperator> tupSource) {
+        // Should we ever get to this method?
+        LogicalVariable var = context.newVar();
+        LogicalVariable oldV = context.getVar(v.getVar().getId());
+        AssignOperator a = new AssignOperator(var, new MutableObject<ILogicalExpression>(
+                new VariableReferenceExpression(oldV)));
+        a.getInputs().add(tupSource);
+        return new Pair<ILogicalOperator, LogicalVariable>(a, var);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitWhereClause(WhereClause w, Mutable<ILogicalOperator> tupSource)
+            throws AsterixException {
+        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(w.getWhereExpr(), tupSource);
+        SelectOperator s = new SelectOperator(new MutableObject<ILogicalExpression>(p.first), false, null);
+        s.getInputs().add(p.second);
+
+        return new Pair<ILogicalOperator, LogicalVariable>(s, null);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitLimitClause(LimitClause lc, Mutable<ILogicalOperator> tupSource)
+            throws AsterixException {
+        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p1 = aqlExprToAlgExpression(lc.getLimitExpr(), tupSource);
+        LimitOperator opLim;
+        Expression offset = lc.getOffset();
+        if (offset != null) {
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p2 = aqlExprToAlgExpression(offset, p1.second);
+            opLim = new LimitOperator(p1.first, p2.first);
+            opLim.getInputs().add(p2.second);
+        } else {
+            opLim = new LimitOperator(p1.first);
+            opLim.getInputs().add(p1.second);
+        }
+        return new Pair<ILogicalOperator, LogicalVariable>(opLim, null);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitDistinctClause(DistinctClause dc,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        List<Mutable<ILogicalExpression>> exprList = new ArrayList<Mutable<ILogicalExpression>>();
+        Mutable<ILogicalOperator> input = null;
+        for (Expression expr : dc.getDistinctByExpr()) {
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(expr, tupSource);
+            exprList.add(new MutableObject<ILogicalExpression>(p.first));
+            input = p.second;
+        }
+        DistinctOperator opDistinct = new DistinctOperator(exprList);
+        opDistinct.getInputs().add(input);
+        return new Pair<ILogicalOperator, LogicalVariable>(opDistinct, null);
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitUnionExpr(UnionExpr unionExpr,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        //Translate the AQL union into an assign [var] <- [function-call: asterix:union, Args:[..]]
+        //The rule "IntroduceUnionRule" will translates this assign operator into the UnionAll operator.
+        Mutable<ILogicalOperator> ts = tupSource;
+        LogicalVariable assignedVar = context.newVar();
+        List<Mutable<ILogicalExpression>> inputVars = new ArrayList<Mutable<ILogicalExpression>>();
+        for (Expression e : unionExpr.getExprs()) {
+            Pair<ILogicalOperator, LogicalVariable> op_var = e.accept(this, ts);
+            ts = new MutableObject<ILogicalOperator>(op_var.first);
+            VariableReferenceExpression var = new VariableReferenceExpression(op_var.second);
+            inputVars.add(new MutableObject<ILogicalExpression>(var));
+        }
+        AbstractFunctionCallExpression union = new ScalarFunctionCallExpression(
+                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.UNION), inputVars);
+        AssignOperator a = new AssignOperator(assignedVar, new MutableObject<ILogicalExpression>(union));
+        a.getInputs().add(ts);
+        return new Pair<ILogicalOperator, LogicalVariable>(a, assignedVar);
+    }
+
+    private AbstractFunctionCallExpression createComparisonExpression(OperatorType t) {
+        FunctionIdentifier fi = operatorTypeToFunctionIdentifier(t);
+        IFunctionInfo finfo = FunctionUtils.getFunctionInfo(fi);
+        return new ScalarFunctionCallExpression(finfo);
+    }
+
+    private FunctionIdentifier operatorTypeToFunctionIdentifier(OperatorType t) {
+        switch (t) {
+            case EQ: {
+                return AlgebricksBuiltinFunctions.EQ;
+            }
+            case NEQ: {
+                return AlgebricksBuiltinFunctions.NEQ;
+            }
+            case GT: {
+                return AlgebricksBuiltinFunctions.GT;
+            }
+            case GE: {
+                return AlgebricksBuiltinFunctions.GE;
+            }
+            case LT: {
+                return AlgebricksBuiltinFunctions.LT;
+            }
+            case LE: {
+                return AlgebricksBuiltinFunctions.LE;
+            }
+            default: {
+                throw new IllegalStateException();
+            }
+        }
+    }
+
+    private AbstractFunctionCallExpression createFunctionCallExpressionForBuiltinOperator(OperatorType t)
+            throws AsterixException {
+
+        FunctionIdentifier fid = null;
+        switch (t) {
+            case PLUS: {
+                fid = AlgebricksBuiltinFunctions.NUMERIC_ADD;
+                break;
+            }
+            case MINUS: {
+                fid = AsterixBuiltinFunctions.NUMERIC_SUBTRACT;
+                break;
+            }
+            case MUL: {
+                fid = AsterixBuiltinFunctions.NUMERIC_MULTIPLY;
+                break;
+            }
+            case DIV: {
+                fid = AsterixBuiltinFunctions.NUMERIC_DIVIDE;
+                break;
+            }
+            case MOD: {
+                fid = AsterixBuiltinFunctions.NUMERIC_MOD;
+                break;
+            }
+            case IDIV: {
+                fid = AsterixBuiltinFunctions.NUMERIC_IDIV;
+                break;
+            }
+            case CARET: {
+                fid = AsterixBuiltinFunctions.CARET;
+                break;
+            }
+            case AND: {
+                fid = AlgebricksBuiltinFunctions.AND;
+                break;
+            }
+            case OR: {
+                fid = AlgebricksBuiltinFunctions.OR;
+                break;
+            }
+            case FUZZY_EQ: {
+                fid = AsterixBuiltinFunctions.FUZZY_EQ;
+                break;
+            }
+
+            default: {
+                throw new NotImplementedException("Operator " + t + " is not yet implemented");
+            }
+        }
+        return new ScalarFunctionCallExpression(FunctionUtils.getFunctionInfo(fid));
+    }
+
+    private static boolean hasOnlyChild(ILogicalOperator parent, Mutable<ILogicalOperator> childCandidate) {
+        List<Mutable<ILogicalOperator>> inp = parent.getInputs();
+        if (inp == null || inp.size() != 1) {
+            return false;
+        }
+        return inp.get(0) == childCandidate;
+    }
+
+    private Pair<ILogicalExpression, Mutable<ILogicalOperator>> aqlExprToAlgExpression(Expression expr,
+            Mutable<ILogicalOperator> topOp) throws AsterixException {
+        switch (expr.getKind()) {
+            case VARIABLE_EXPRESSION: {
+                VariableReferenceExpression ve = new VariableReferenceExpression(context.getVar(((VariableExpr) expr)
+                        .getVar().getId()));
+                return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(ve, topOp);
+            }
+            case LITERAL_EXPRESSION: {
+                LiteralExpr val = (LiteralExpr) expr;
+                return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(new ConstantExpression(
+                        new AsterixConstantValue(ConstantHelper.objectFromLiteral(val.getValue()))), topOp);
+            }
+            default: {
+                // Mutable<ILogicalOperator> src = new
+                // Mutable<ILogicalOperator>();
+                // Mutable<ILogicalOperator> src = topOp;
+                if (expressionNeedsNoNesting(expr)) {
+                    Pair<ILogicalOperator, LogicalVariable> p = expr.accept(this, topOp);
+                    ILogicalExpression exp = ((AssignOperator) p.first).getExpressions().get(0).getValue();
+                    return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(exp, p.first.getInputs().get(0));
+                } else {
+                    Mutable<ILogicalOperator> src = new MutableObject<ILogicalOperator>();
+
+                    Pair<ILogicalOperator, LogicalVariable> p = expr.accept(this, src);
+
+                    if (((AbstractLogicalOperator) p.first).getOperatorTag() == LogicalOperatorTag.SUBPLAN) {
+                        src.setValue(topOp.getValue());
+                        Mutable<ILogicalOperator> top2 = new MutableObject<ILogicalOperator>(p.first);
+                        return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(new VariableReferenceExpression(
+                                p.second), top2);
+                    } else {
+                        SubplanOperator s = new SubplanOperator();
+                        s.getInputs().add(topOp);
+                        src.setValue(new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(s)));
+                        Mutable<ILogicalOperator> planRoot = new MutableObject<ILogicalOperator>(p.first);
+                        s.setRootOp(planRoot);
+                        return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(new VariableReferenceExpression(
+                                p.second), new MutableObject<ILogicalOperator>(s));
+                    }
+                }
+            }
+        }
+
+    }
+
+    private Pair<ILogicalOperator, LogicalVariable> produceFlwrResult(boolean noForClause, boolean isTop,
+            Mutable<ILogicalOperator> resOpRef, LogicalVariable resVar) {
+        if (isTop) {
+            ProjectOperator pr = new ProjectOperator(resVar);
+            pr.getInputs().add(resOpRef);
+            return new Pair<ILogicalOperator, LogicalVariable>(pr, resVar);
+
+        } else if (noForClause) {
+            return new Pair<ILogicalOperator, LogicalVariable>(resOpRef.getValue(), resVar);
+        } else {
+            return aggListify(resVar, resOpRef, false);
+        }
+    }
+
+    private Pair<ILogicalOperator, LogicalVariable> aggListify(LogicalVariable var, Mutable<ILogicalOperator> opRef,
+            boolean bProject) {
+        AggregateFunctionCallExpression funAgg = AsterixBuiltinFunctions.makeAggregateFunctionExpression(
+                AsterixBuiltinFunctions.LISTIFY, new ArrayList<Mutable<ILogicalExpression>>());
+        funAgg.getArguments().add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(var)));
+        LogicalVariable varListified = context.newVar();
+        AggregateOperator agg = new AggregateOperator(mkSingletonArrayList(varListified),
+                (List) mkSingletonArrayList(new MutableObject<ILogicalExpression>(funAgg)));
+        agg.getInputs().add(opRef);
+        ILogicalOperator res;
+        if (bProject) {
+            ProjectOperator pr = new ProjectOperator(varListified);
+            pr.getInputs().add(new MutableObject<ILogicalOperator>(agg));
+            res = pr;
+        } else {
+            res = agg;
+        }
+        return new Pair<ILogicalOperator, LogicalVariable>(res, varListified);
+    }
+
+    private Pair<ILogicalOperator, LogicalVariable> visitAndOrOperator(OperatorExpr op,
+            Mutable<ILogicalOperator> tupSource) throws AsterixException {
+        ArrayList<OperatorType> ops = op.getOpList();
+        int nOps = ops.size();
+
+        ArrayList<Expression> exprs = op.getExprList();
+
+        Mutable<ILogicalOperator> topOp = tupSource;
+
+        OperatorType opLogical = ops.get(0);
+        AbstractFunctionCallExpression f = createFunctionCallExpressionForBuiltinOperator(opLogical);
+
+        for (int i = 0; i <= nOps; i++) {
+            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = aqlExprToAlgExpression(exprs.get(i), topOp);
+            topOp = p.second;
+            // now look at the operator
+            if (i < nOps) {
+                if (ops.get(i) != opLogical) {
+                    throw new TranslationException("Unexpected operator " + ops.get(i)
+                            + " in an OperatorExpr starting with " + opLogical);
+                }
+            }
+            f.getArguments().add(new MutableObject<ILogicalExpression>(p.first));
+        }
+
+        LogicalVariable assignedVar = context.newVar();
+        AssignOperator a = new AssignOperator(assignedVar, new MutableObject<ILogicalExpression>(f));
+        a.getInputs().add(topOp);
+
+        return new Pair<ILogicalOperator, LogicalVariable>(a, assignedVar);
+
+    }
+
+    private static boolean expressionNeedsNoNesting(Expression expr) {
+        Kind k = expr.getKind();
+        return k == Kind.LITERAL_EXPRESSION || k == Kind.LIST_CONSTRUCTOR_EXPRESSION
+                || k == Kind.RECORD_CONSTRUCTOR_EXPRESSION || k == Kind.VARIABLE_EXPRESSION
+                || k == Kind.CALL_EXPRESSION || k == Kind.OP_EXPRESSION || k == Kind.FIELD_ACCESSOR_EXPRESSION
+                || k == Kind.INDEX_ACCESSOR_EXPRESSION || k == Kind.UNARY_EXPRESSION || k == Kind.UNION_EXPRESSION;
+    }
+
+    private <T> ArrayList<T> mkSingletonArrayList(T item) {
+        ArrayList<T> array = new ArrayList<T>(1);
+        array.add(item);
+        return array;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitTypeDecl(TypeDecl td, Mutable<ILogicalOperator> arg)
+            throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitRecordTypeDefiniton(RecordTypeDefinition tre,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitTypeReferenceExpression(TypeReferenceExpression tre,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitNodegroupDecl(NodegroupDecl ngd, Mutable<ILogicalOperator> arg)
+            throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitLoadStatement(LoadStatement stmtLoad,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitDropStatement(DropStatement del, Mutable<ILogicalOperator> arg)
+            throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitDisconnectFeedStatement(DisconnectFeedStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitCreateIndexStatement(CreateIndexStatement cis,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitOrderedListTypeDefiniton(OrderedListTypeDefinition olte,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitUnorderedListTypeDefiniton(UnorderedListTypeDefinition ulte,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    private ILogicalExpression makeUnnestExpression(ILogicalExpression expr) {
+        switch (expr.getExpressionTag()) {
+            case VARIABLE: {
+                return new UnnestingFunctionCallExpression(
+                        FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.SCAN_COLLECTION),
+                        new MutableObject<ILogicalExpression>(expr));
+            }
+            case FUNCTION_CALL: {
+                AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expr;
+                if (fce.getKind() == FunctionKind.UNNEST) {
+                    return expr;
+                } else {
+                    return new UnnestingFunctionCallExpression(
+                            FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.SCAN_COLLECTION),
+                            new MutableObject<ILogicalExpression>(expr));
+                }
+            }
+            default: {
+                return expr;
+            }
+        }
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitInsertStatement(InsertStatement insert,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitDeleteStatement(DeleteStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitUpdateStatement(UpdateStatement update,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitUpdateClause(UpdateClause del, Mutable<ILogicalOperator> arg)
+            throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitDataverseDecl(DataverseDecl dv, Mutable<ILogicalOperator> arg)
+            throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitDatasetDecl(DatasetDecl dd, Mutable<ILogicalOperator> arg)
+            throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitSetStatement(SetStatement ss, Mutable<ILogicalOperator> arg)
+            throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitWriteStatement(WriteStatement ws, Mutable<ILogicalOperator> arg)
+            throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitCreateDataverseStatement(CreateDataverseStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitIndexDropStatement(IndexDropStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitNodeGroupDropStatement(NodeGroupDropStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitDataverseDropStatement(DataverseDropStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitTypeDropStatement(TypeDropStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visit(CreateFunctionStatement cfs, Mutable<ILogicalOperator> arg)
+            throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitFunctionDropStatement(FunctionDropStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+    
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitCreatePrimaryFeedStatement(CreatePrimaryFeedStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitCreateSecondaryFeedStatement(CreateSecondaryFeedStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+ 
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitConnectFeedStatement(ConnectFeedStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitDropFeedStatement(FeedDropStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitCompactStatement(CompactStatement del,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        // TODO Auto-generated method stub
+        return null;
+    }
+    
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitCreateFeedPolicyStatement(CreateFeedPolicyStatement cfps,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        return null;
+    }
+
+    @Override
+    public Pair<ILogicalOperator, LogicalVariable> visitDropFeedPolicyStatement(FeedPolicyDropStatement dfs,
+            Mutable<ILogicalOperator> arg) throws AsterixException {
+        return null;
+    }
+}


[46/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceInstantLockSearchCallbackRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceInstantLockSearchCallbackRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceInstantLockSearchCallbackRule.java
deleted file mode 100644
index 782f7ed..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceInstantLockSearchCallbackRule.java
+++ /dev/null
@@ -1,149 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.algebra.operators.CommitOperator;
-import edu.uci.ics.asterix.algebra.operators.physical.BTreeSearchPOperator;
-import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataImplConfig;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.optimizer.rules.am.AccessMethodJobGenParams;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IPhysicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.DataSourceScanPOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class IntroduceInstantLockSearchCallbackRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    private void extractDataSourcesInfo(AbstractLogicalOperator op,
-            Map<String, Triple<Integer, LogicalOperatorTag, IPhysicalOperator>> dataSourcesMap) {
-
-        for (int i = 0; i < op.getInputs().size(); ++i) {
-            AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) op.getInputs().get(i).getValue();
-
-            if (descendantOp.getOperatorTag() == LogicalOperatorTag.UNNEST_MAP) {
-                UnnestMapOperator unnestMapOp = (UnnestMapOperator) descendantOp;
-                ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
-                if (unnestExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                    AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
-                    FunctionIdentifier fid = f.getFunctionIdentifier();
-                    if (!fid.equals(AsterixBuiltinFunctions.INDEX_SEARCH)) {
-                        throw new IllegalStateException();
-                    }
-                    AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
-                    jobGenParams.readFromFuncArgs(f.getArguments());
-                    boolean isPrimaryIndex = jobGenParams.isPrimaryIndex();
-                    String indexName = jobGenParams.getIndexName();
-                    if (isPrimaryIndex) {
-                        if (dataSourcesMap.containsKey(indexName)) {
-                            ++(dataSourcesMap.get(indexName).first);
-                        } else {
-                            dataSourcesMap.put(indexName, new Triple<Integer, LogicalOperatorTag, IPhysicalOperator>(1,
-                                    LogicalOperatorTag.UNNEST_MAP, unnestMapOp.getPhysicalOperator()));
-                        }
-                    }
-                }
-            } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
-                DataSourceScanOperator dataSourceScanOp = (DataSourceScanOperator) descendantOp;
-                String datasourceName = ((AqlDataSource) dataSourceScanOp.getDataSource()).getDatasourceName();
-                if (dataSourcesMap.containsKey(datasourceName)) {
-                    ++(dataSourcesMap.get(datasourceName).first);
-                } else {
-                    dataSourcesMap.put(datasourceName, new Triple<Integer, LogicalOperatorTag, IPhysicalOperator>(1,
-                            LogicalOperatorTag.DATASOURCESCAN, dataSourceScanOp.getPhysicalOperator()));
-                }
-            }
-            extractDataSourcesInfo(descendantOp, dataSourcesMap);
-        }
-
-    }
-
-    private boolean checkIfRuleIsApplicable(AbstractLogicalOperator op) {
-        if (op.getPhysicalOperator() == null) {
-            return false;
-        }
-        if (op.getOperatorTag() == LogicalOperatorTag.EXTENSION_OPERATOR) {
-            ExtensionOperator extensionOp = (ExtensionOperator) op;
-            if (extensionOp.getDelegate() instanceof CommitOperator) {
-                return true;
-            }
-        }
-        if (op.getOperatorTag() == LogicalOperatorTag.DISTRIBUTE_RESULT
-                || op.getOperatorTag() == LogicalOperatorTag.WRITE_RESULT) {
-            return true;
-        }
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-
-        if (!checkIfRuleIsApplicable(op)) {
-            return false;
-        }
-        Map<String, Triple<Integer, LogicalOperatorTag, IPhysicalOperator>> dataSourcesMap = new HashMap<String, Triple<Integer, LogicalOperatorTag, IPhysicalOperator>>();
-        extractDataSourcesInfo(op, dataSourcesMap);
-
-        boolean introducedInstantLock = false;
-
-        Iterator<Map.Entry<String, Triple<Integer, LogicalOperatorTag, IPhysicalOperator>>> it = dataSourcesMap
-                .entrySet().iterator();
-        while (it.hasNext()) {
-            Entry<String, Triple<Integer, LogicalOperatorTag, IPhysicalOperator>> entry = it.next();
-            Triple<Integer, LogicalOperatorTag, IPhysicalOperator> triple = entry.getValue();
-            if (triple.first == 1) {
-                AqlMetadataImplConfig aqlMetadataImplConfig = new AqlMetadataImplConfig(true);
-                if (triple.second == LogicalOperatorTag.UNNEST_MAP) {
-                    BTreeSearchPOperator pOperator = (BTreeSearchPOperator) triple.third;
-                    pOperator.setImplConfig(aqlMetadataImplConfig);
-                    introducedInstantLock = true;
-                } else {
-                    DataSourceScanPOperator pOperator = (DataSourceScanPOperator) triple.third;
-                    pOperator.setImplConfig(aqlMetadataImplConfig);
-                    introducedInstantLock = true;
-                }
-            }
-
-        }
-        return introducedInstantLock;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceMaterializationForInsertWithSelfScanRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceMaterializationForInsertWithSelfScanRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceMaterializationForInsertWithSelfScanRule.java
deleted file mode 100644
index acd3b13..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceMaterializationForInsertWithSelfScanRule.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
-import edu.uci.ics.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
-import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.optimizer.rules.am.AccessMethodJobGenParams;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.MaterializePOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class IntroduceMaterializationForInsertWithSelfScanRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (op.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE) {
-            return false;
-        }
-
-        InsertDeleteOperator insertOp = (InsertDeleteOperator) op;
-        boolean sameDataset = checkIfInsertAndScanDatasetsSame(op, ((DatasetDataSource) insertOp.getDataSource())
-                .getDataset().getDatasetName());
-
-        if (sameDataset) {
-            MaterializeOperator materializeOperator = new MaterializeOperator();
-            MaterializePOperator materializePOperator = new MaterializePOperator(true);
-            materializeOperator.setPhysicalOperator(materializePOperator);
-
-            materializeOperator.getInputs().add(
-                    new MutableObject<ILogicalOperator>(insertOp.getInputs().get(0).getValue()));
-            context.computeAndSetTypeEnvironmentForOperator(materializeOperator);
-
-            insertOp.getInputs().clear();
-            insertOp.getInputs().add(new MutableObject<ILogicalOperator>(materializeOperator));
-            context.computeAndSetTypeEnvironmentForOperator(insertOp);
-            return true;
-        } else {
-            return false;
-        }
-
-    }
-
-    private boolean checkIfInsertAndScanDatasetsSame(AbstractLogicalOperator op, String insertDatasetName) {
-        boolean sameDataset = false;
-        for (int i = 0; i < op.getInputs().size(); ++i) {
-            AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) op.getInputs().get(i).getValue();
-
-            if (descendantOp.getOperatorTag() == LogicalOperatorTag.UNNEST_MAP) {
-                UnnestMapOperator unnestMapOp = (UnnestMapOperator) descendantOp;
-                ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
-                if (unnestExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                    AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
-                    FunctionIdentifier fid = f.getFunctionIdentifier();
-                    if (!fid.equals(AsterixBuiltinFunctions.INDEX_SEARCH)) {
-                        throw new IllegalStateException();
-                    }
-                    AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
-                    jobGenParams.readFromFuncArgs(f.getArguments());
-                    boolean isPrimaryIndex = jobGenParams.isPrimaryIndex();
-                    String indexName = jobGenParams.getIndexName();
-                    if (isPrimaryIndex && indexName.compareTo(insertDatasetName) == 0) {
-                        return true;
-                    }
-                }
-            } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
-                DataSourceScanOperator dataSourceScanOp = (DataSourceScanOperator) descendantOp;
-                AqlDataSource ds = (AqlDataSource) dataSourceScanOp.getDataSource();
-                if (ds.getDatasourceType() != AqlDataSourceType.FEED
-                        && ds.getDatasourceType() != AqlDataSourceType.LOADABLE) {
-                    if (((DatasetDataSource) ds).getDataset().getDatasetName().compareTo(insertDatasetName) == 0) {
-                        return true;
-                    }
-                }
-            }
-            sameDataset = checkIfInsertAndScanDatasetsSame(descendantOp, insertDatasetName);
-            if (sameDataset) {
-                break;
-            }
-        }
-        return sameDataset;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceRandomPartitioningFeedComputationRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceRandomPartitioningFeedComputationRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceRandomPartitioningFeedComputationRule.java
deleted file mode 100644
index f4349c3..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceRandomPartitioningFeedComputationRule.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
-import edu.uci.ics.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
-import edu.uci.ics.asterix.metadata.declared.FeedDataSource;
-import edu.uci.ics.asterix.metadata.entities.Feed;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.AssignPOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.RandomPartitionPOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.INodeDomain;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class IntroduceRandomPartitioningFeedComputationRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        ILogicalOperator op = opRef.getValue();
-        if (!op.getOperatorTag().equals(LogicalOperatorTag.ASSIGN)) {
-            return false;
-        }
-
-        ILogicalOperator opChild = op.getInputs().get(0).getValue();
-        if (!opChild.getOperatorTag().equals(LogicalOperatorTag.DATASOURCESCAN)) {
-            return false;
-        }
-
-        DataSourceScanOperator scanOp = (DataSourceScanOperator) opChild;
-        AqlDataSource dataSource = (AqlDataSource) scanOp.getDataSource();
-        if (!dataSource.getDatasourceType().equals(AqlDataSourceType.FEED)) {
-            return false;
-        }
-
-        final FeedDataSource feedDataSource = (FeedDataSource) dataSource;
-        Feed feed = feedDataSource.getFeed();
-        if (feed.getAppliedFunction() == null) {
-            return false;
-        }
-
-        ExchangeOperator exchangeOp = new ExchangeOperator();
-        INodeDomain domain = new INodeDomain() {
-            @Override
-            public boolean sameAs(INodeDomain domain) {
-                return domain == this;
-            }
-
-            @Override
-            public Integer cardinality() {
-                return feedDataSource.getComputeCardinality();
-            }
-        };
-
-        exchangeOp.setPhysicalOperator(new RandomPartitionPOperator(domain));
-        op.getInputs().get(0).setValue(exchangeOp);
-        exchangeOp.getInputs().add(new MutableObject<ILogicalOperator>(scanOp));
-        ExecutionMode em = ((AbstractLogicalOperator) scanOp).getExecutionMode();
-        exchangeOp.setExecutionMode(em);
-        exchangeOp.computeDeliveredPhysicalProperties(context);
-        context.computeAndSetTypeEnvironmentForOperator(exchangeOp);
-
-        AssignOperator assignOp = (AssignOperator) opRef.getValue();
-        AssignPOperator assignPhyOp = (AssignPOperator) assignOp.getPhysicalOperator();
-        assignPhyOp.setCardinalityConstraint(domain.cardinality());
-
-        return true;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        return false;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceRapidFrameFlushProjectAssignRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceRapidFrameFlushProjectAssignRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceRapidFrameFlushProjectAssignRule.java
deleted file mode 100644
index d44d762..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceRapidFrameFlushProjectAssignRule.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.algebra.operators.CommitOperator;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.AssignPOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.StreamProjectPOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-/**
- * This rule will search for project and assign operators in an insert/delete/update plan and
- * pass a hint to all of them. This hint is used by the project and assign operators so that frames are pushed to
- * the next operator without waiting until they get full. The purpose of this is to
- * reduce the time of holding exclusive locks on the keys that have been inserted. Also to allow feeds batching
- * to work correctly.
- *
- * @author salsubaiee
- */
-public class IntroduceRapidFrameFlushProjectAssignRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    private boolean checkIfRuleIsApplicable(AbstractLogicalOperator op) {
-        if (op.getOperatorTag() != LogicalOperatorTag.EXTENSION_OPERATOR) {
-            return false;
-        }
-        ExtensionOperator extensionOp = (ExtensionOperator) op;
-        if (!(extensionOp.getDelegate() instanceof CommitOperator)) {
-            return false;
-        }
-
-        for (int i = 0; i < op.getInputs().size(); ++i) {
-            AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) op.getInputs().get(i).getValue();
-
-            if (descendantOp.getOperatorTag() == LogicalOperatorTag.PROJECT
-                    || descendantOp.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
-                if (descendantOp.getPhysicalOperator() == null) {
-                    return false;
-                }
-            }
-            checkIfRuleIsApplicable(descendantOp);
-        }
-        return true;
-    }
-
-    private boolean changeRule(AbstractLogicalOperator op) {
-        boolean planModified = false;
-        for (int i = 0; i < op.getInputs().size(); ++i) {
-            AbstractLogicalOperator descendantOp = (AbstractLogicalOperator) op.getInputs().get(i).getValue();
-
-            if (descendantOp.getOperatorTag() == LogicalOperatorTag.PROJECT) {
-                ProjectOperator projectOp = (ProjectOperator) descendantOp;
-                StreamProjectPOperator physicalOp = (StreamProjectPOperator) projectOp.getPhysicalOperator();
-                physicalOp.setRapidFrameFlush(true);
-                planModified = true;
-            } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
-                AssignOperator assignOp = (AssignOperator) descendantOp;
-                AssignPOperator physicalOp = (AssignPOperator) assignOp.getPhysicalOperator();
-                physicalOp.setRapidFrameFlush(true);
-                planModified = true;
-            }
-            changeRule(descendantOp);
-        }
-        return planModified;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (!checkIfRuleIsApplicable(op)) {
-            return false;
-        }
-        return changeRule(op);
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
deleted file mode 100644
index 42b60d5..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
+++ /dev/null
@@ -1,598 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-import java.util.Stack;
-
-import org.apache.commons.lang3.ArrayUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
-import edu.uci.ics.asterix.metadata.declared.AqlIndex;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.AOrderedList;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
-import edu.uci.ics.asterix.om.types.AOrderedListType;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.AUnionType;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class IntroduceSecondaryIndexInsertDeleteRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op0 = (AbstractLogicalOperator) opRef.getValue();
-        if (op0.getOperatorTag() != LogicalOperatorTag.SINK) {
-            return false;
-        }
-        AbstractLogicalOperator op1 = (AbstractLogicalOperator) op0.getInputs().get(0).getValue();
-        if (op1.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE) {
-            return false;
-        }
-
-        FunctionIdentifier fid = null;
-        /** find the record variable */
-        InsertDeleteOperator insertOp = (InsertDeleteOperator) op1;
-        ILogicalExpression recordExpr = insertOp.getPayloadExpression().getValue();
-        List<LogicalVariable> recordVar = new ArrayList<LogicalVariable>();
-        /** assume the payload is always a single variable expression */
-        recordExpr.getUsedVariables(recordVar);
-
-        /**
-         * op2 is the assign operator which extract primary keys from the record
-         * variable
-         */
-        AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
-
-        if (recordVar.size() == 0) {
-            /**
-             * For the case primary key-assignment expressions are constant
-             * expressions, find assign op that creates record to be
-             * inserted/deleted.
-             */
-            while (fid != AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR) {
-                if (op2.getInputs().size() == 0) {
-                    return false;
-                }
-                op2 = (AbstractLogicalOperator) op2.getInputs().get(0).getValue();
-                if (op2.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
-                    continue;
-                }
-                AssignOperator assignOp = (AssignOperator) op2;
-                ILogicalExpression assignExpr = assignOp.getExpressions().get(0).getValue();
-                if (assignExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                    ScalarFunctionCallExpression funcExpr = (ScalarFunctionCallExpression) assignOp.getExpressions()
-                            .get(0).getValue();
-                    fid = funcExpr.getFunctionIdentifier();
-                }
-            }
-            AssignOperator assignOp2 = (AssignOperator) op2;
-            recordVar.addAll(assignOp2.getVariables());
-        }
-        AqlDataSource datasetSource = (AqlDataSource) insertOp.getDataSource();
-        AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
-        String dataverseName = datasetSource.getId().getDataverseName();
-        String datasetName = datasetSource.getId().getDatasourceName();
-        Dataset dataset = mp.findDataset(dataverseName, datasetName);
-        if (dataset == null) {
-            throw new AlgebricksException("Unknown dataset " + datasetName + " in dataverse " + dataverseName);
-        }
-        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
-            return false;
-        }
-
-        // Create operators for secondary index insert/delete.
-        String itemTypeName = dataset.getItemTypeName();
-        IAType itemType = mp.findType(dataset.getDataverseName(), itemTypeName);
-        if (itemType.getTypeTag() != ATypeTag.RECORD) {
-            throw new AlgebricksException("Only record types can be indexed.");
-        }
-        ARecordType recType = (ARecordType) itemType;
-        List<Index> indexes = mp.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName());
-        ILogicalOperator currentTop = op1;
-        boolean hasSecondaryIndex = false;
-
-        // Put an n-gram or a keyword index in the later stage of index-update,
-        // since TokenizeOperator needs to be involved.
-        Collections.sort(indexes, new Comparator<Index>() {
-            @Override
-            public int compare(Index o1, Index o2) {
-                return o1.getIndexType().ordinal() - o2.getIndexType().ordinal();
-            }
-
-        });
-
-        // Check whether multiple keyword or n-gram indexes exist
-        int secondaryIndexTotalCnt = 0;
-        for (Index index : indexes) {
-            if (index.isSecondaryIndex())
-                secondaryIndexTotalCnt++;
-        }
-
-        // Initialize inputs to the SINK operator
-        if (secondaryIndexTotalCnt > 0) {
-            op0.getInputs().clear();
-        }
-
-        // Replicate Operator is applied only when doing the bulk-load.
-        AbstractLogicalOperator replicateOp = null;
-
-        if (secondaryIndexTotalCnt > 1 && insertOp.isBulkload()) {
-            // Split the logical plan into "each secondary index update branch"
-            // to replicate each <PK,RECORD> pair.
-            replicateOp = new ReplicateOperator(secondaryIndexTotalCnt);
-            replicateOp.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
-            replicateOp.setExecutionMode(ExecutionMode.PARTITIONED);
-            context.computeAndSetTypeEnvironmentForOperator(replicateOp);
-            currentTop = replicateOp;
-        }
-
-        // Prepare filtering field information
-        List<String> additionalFilteringField = ((InternalDatasetDetails) dataset.getDatasetDetails()).getFilterField();
-        List<LogicalVariable> additionalFilteringVars = null;
-        List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions = null;
-        List<Mutable<ILogicalExpression>> additionalFilteringExpressions = null;
-        AssignOperator additionalFilteringAssign = null;
-
-        if (additionalFilteringField != null) {
-            additionalFilteringVars = new ArrayList<LogicalVariable>();
-            additionalFilteringAssignExpressions = new ArrayList<Mutable<ILogicalExpression>>();
-            additionalFilteringExpressions = new ArrayList<Mutable<ILogicalExpression>>();
-            prepareVarAndExpression(additionalFilteringField, recType.getFieldNames(), recordVar.get(0),
-                    additionalFilteringAssignExpressions, additionalFilteringVars, context);
-            additionalFilteringAssign = new AssignOperator(additionalFilteringVars,
-                    additionalFilteringAssignExpressions);
-            for (LogicalVariable var : additionalFilteringVars) {
-                additionalFilteringExpressions.add(new MutableObject<ILogicalExpression>(
-                        new VariableReferenceExpression(var)));
-            }
-        }
-
-        // Iterate each secondary index and applying Index Update operations.
-        for (Index index : indexes) {
-            List<LogicalVariable> projectVars = new ArrayList<LogicalVariable>();
-            VariableUtilities.getUsedVariables(op1, projectVars);
-            if (!index.isSecondaryIndex()) {
-                continue;
-            }
-            LogicalVariable enforcedRecordVar = recordVar.get(0);
-            hasSecondaryIndex = true;
-            //if the index is enforcing field types
-            if (index.isEnforcingKeyFileds()) {
-                try {
-                    DatasetDataSource ds = (DatasetDataSource) (insertOp.getDataSource());
-                    ARecordType insertRecType = (ARecordType) ds.getSchemaTypes()[ds.getSchemaTypes().length - 1];
-                    LogicalVariable castVar = context.newVar();
-                    ARecordType enforcedType = createEnforcedType(insertRecType, index);
-                    //introduce casting to enforced type
-                    AbstractFunctionCallExpression castFunc = new ScalarFunctionCallExpression(
-                            FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.CAST_RECORD));
-
-                    castFunc.getArguments().add(
-                            new MutableObject<ILogicalExpression>(insertOp.getPayloadExpression().getValue()));
-                    TypeComputerUtilities.setRequiredAndInputTypes(castFunc, enforcedType, insertRecType);
-                    AssignOperator newAssignOperator = new AssignOperator(castVar,
-                            new MutableObject<ILogicalExpression>(castFunc));
-                    newAssignOperator.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
-                    currentTop = newAssignOperator;
-                    //project out casted record
-                    projectVars.add(castVar);
-                    enforcedRecordVar = castVar;
-                    context.computeAndSetTypeEnvironmentForOperator(newAssignOperator);
-                    context.computeAndSetTypeEnvironmentForOperator(currentTop);
-                    recType = enforcedType;
-                } catch (AsterixException e) {
-                    throw new AlgebricksException(e);
-                }
-            }
-
-            List<List<String>> secondaryKeyFields = index.getKeyFieldNames();
-            List<IAType> secondaryKeyTypes = index.getKeyFieldTypes();
-            List<LogicalVariable> secondaryKeyVars = new ArrayList<LogicalVariable>();
-            List<Mutable<ILogicalExpression>> expressions = new ArrayList<Mutable<ILogicalExpression>>();
-            List<Mutable<ILogicalExpression>> secondaryExpressions = new ArrayList<Mutable<ILogicalExpression>>();
-
-            for (List<String> secondaryKey : secondaryKeyFields) {
-                prepareVarAndExpression(secondaryKey, recType.getFieldNames(), enforcedRecordVar, expressions,
-                        secondaryKeyVars, context);
-            }
-
-            AssignOperator assign = new AssignOperator(secondaryKeyVars, expressions);
-            ProjectOperator project = new ProjectOperator(projectVars);
-
-            if (additionalFilteringAssign != null) {
-                additionalFilteringAssign.getInputs().add(new MutableObject<ILogicalOperator>(project));
-                assign.getInputs().add(new MutableObject<ILogicalOperator>(additionalFilteringAssign));
-            } else {
-                assign.getInputs().add(new MutableObject<ILogicalOperator>(project));
-            }
-
-            // Only apply replicate operator when doing bulk-load
-            if (secondaryIndexTotalCnt > 1 && insertOp.isBulkload())
-                project.getInputs().add(new MutableObject<ILogicalOperator>(replicateOp));
-            else
-                project.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
-
-            context.computeAndSetTypeEnvironmentForOperator(project);
-
-            if (additionalFilteringAssign != null) {
-                context.computeAndSetTypeEnvironmentForOperator(additionalFilteringAssign);
-            }
-
-            context.computeAndSetTypeEnvironmentForOperator(assign);
-            currentTop = assign;
-
-            // BTree, Keyword, or n-gram index case
-            if (index.getIndexType() == IndexType.BTREE
-                    || index.getIndexType() == IndexType.SINGLE_PARTITION_WORD_INVIX
-                    || index.getIndexType() == IndexType.SINGLE_PARTITION_NGRAM_INVIX
-                    || index.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX
-                    || index.getIndexType() == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX) {
-                for (LogicalVariable secondaryKeyVar : secondaryKeyVars) {
-                    secondaryExpressions.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
-                            secondaryKeyVar)));
-                }
-                Mutable<ILogicalExpression> filterExpression = createFilterExpression(secondaryKeyVars,
-                        context.getOutputTypeEnvironment(currentTop), false);
-                AqlIndex dataSourceIndex = new AqlIndex(index, dataverseName, datasetName, mp);
-
-                // Introduce the TokenizeOperator only when doing bulk-load,
-                // and index type is keyword or n-gram.
-                if (index.getIndexType() != IndexType.BTREE && insertOp.isBulkload()) {
-
-                    // Check whether the index is length-partitioned or not.
-                    // If partitioned, [input variables to TokenizeOperator,
-                    // token, number of token] pairs will be generated and
-                    // fed into the IndexInsertDeleteOperator.
-                    // If not, [input variables, token] pairs will be generated
-                    // and fed into the IndexInsertDeleteOperator.
-                    // Input variables are passed since TokenizeOperator is not an
-                    // filtering operator.
-                    boolean isPartitioned = false;
-                    if (index.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX
-                            || index.getIndexType() == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX)
-                        isPartitioned = true;
-
-                    // Create a new logical variable - token
-                    List<LogicalVariable> tokenizeKeyVars = new ArrayList<LogicalVariable>();
-                    List<Mutable<ILogicalExpression>> tokenizeKeyExprs = new ArrayList<Mutable<ILogicalExpression>>();
-                    LogicalVariable tokenVar = context.newVar();
-                    tokenizeKeyVars.add(tokenVar);
-                    tokenizeKeyExprs.add(new MutableObject<ILogicalExpression>(
-                            new VariableReferenceExpression(tokenVar)));
-
-                    // Check the field type of the secondary key.
-                    IAType secondaryKeyType = null;
-                    Pair<IAType, Boolean> keyPairType = Index.getNonNullableKeyFieldType(secondaryKeyFields.get(0),
-                            recType);
-                    secondaryKeyType = keyPairType.first;
-
-                    List<Object> varTypes = new ArrayList<Object>();
-                    varTypes.add(NonTaggedFormatUtil.getTokenType(secondaryKeyType));
-
-                    // If the index is a length-partitioned, then create
-                    // additional variable - number of token.
-                    // We use a special type for the length-partitioned index.
-                    // The type is short, and this does not contain type info.
-                    if (isPartitioned) {
-                        LogicalVariable lengthVar = context.newVar();
-                        tokenizeKeyVars.add(lengthVar);
-                        tokenizeKeyExprs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
-                                lengthVar)));
-                        varTypes.add(BuiltinType.SHORTWITHOUTTYPEINFO);
-                    }
-
-                    // TokenizeOperator to tokenize [SK, PK] pairs
-                    TokenizeOperator tokenUpdate = new TokenizeOperator(dataSourceIndex,
-                            insertOp.getPrimaryKeyExpressions(), secondaryExpressions, tokenizeKeyVars,
-                            filterExpression, insertOp.getOperation(), insertOp.isBulkload(), isPartitioned, varTypes);
-                    tokenUpdate.getInputs().add(new MutableObject<ILogicalOperator>(assign));
-                    context.computeAndSetTypeEnvironmentForOperator(tokenUpdate);
-
-                    IndexInsertDeleteOperator indexUpdate = new IndexInsertDeleteOperator(dataSourceIndex,
-                            insertOp.getPrimaryKeyExpressions(), tokenizeKeyExprs, filterExpression,
-                            insertOp.getOperation(), insertOp.isBulkload());
-                    indexUpdate.setAdditionalFilteringExpressions(additionalFilteringExpressions);
-                    indexUpdate.getInputs().add(new MutableObject<ILogicalOperator>(tokenUpdate));
-
-                    context.computeAndSetTypeEnvironmentForOperator(indexUpdate);
-
-                    currentTop = indexUpdate;
-                    op0.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
-
-                } else {
-                    // When TokenizeOperator is not needed
-                    IndexInsertDeleteOperator indexUpdate = new IndexInsertDeleteOperator(dataSourceIndex,
-                            insertOp.getPrimaryKeyExpressions(), secondaryExpressions, filterExpression,
-                            insertOp.getOperation(), insertOp.isBulkload());
-                    indexUpdate.setAdditionalFilteringExpressions(additionalFilteringExpressions);
-                    indexUpdate.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
-
-                    currentTop = indexUpdate;
-                    context.computeAndSetTypeEnvironmentForOperator(indexUpdate);
-
-                    if (insertOp.isBulkload())
-                        op0.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
-
-                }
-
-            } else if (index.getIndexType() == IndexType.RTREE) {
-                Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(secondaryKeyTypes.get(0),
-                        secondaryKeyFields.get(0), recType);
-                IAType spatialType = keyPairType.first;
-                int dimension = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
-                int numKeys = dimension * 2;
-                List<LogicalVariable> keyVarList = new ArrayList<LogicalVariable>();
-                List<Mutable<ILogicalExpression>> keyExprList = new ArrayList<Mutable<ILogicalExpression>>();
-                for (int i = 0; i < numKeys; i++) {
-                    LogicalVariable keyVar = context.newVar();
-                    keyVarList.add(keyVar);
-                    AbstractFunctionCallExpression createMBR = new ScalarFunctionCallExpression(
-                            FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.CREATE_MBR));
-                    createMBR.getArguments().add(
-                            new MutableObject<ILogicalExpression>(new VariableReferenceExpression(secondaryKeyVars
-                                    .get(0))));
-                    createMBR.getArguments().add(
-                            new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
-                                    new AInt32(dimension)))));
-                    createMBR.getArguments().add(
-                            new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
-                                    new AInt32(i)))));
-                    keyExprList.add(new MutableObject<ILogicalExpression>(createMBR));
-                }
-                for (LogicalVariable secondaryKeyVar : keyVarList) {
-                    secondaryExpressions.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
-                            secondaryKeyVar)));
-                }
-                AssignOperator assignCoordinates = new AssignOperator(keyVarList, keyExprList);
-                assignCoordinates.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
-                context.computeAndSetTypeEnvironmentForOperator(assignCoordinates);
-                // We must enforce the filter if the originating spatial type is
-                // nullable.
-                boolean forceFilter = keyPairType.second;
-                Mutable<ILogicalExpression> filterExpression = createFilterExpression(keyVarList,
-                        context.getOutputTypeEnvironment(assignCoordinates), forceFilter);
-                AqlIndex dataSourceIndex = new AqlIndex(index, dataverseName, datasetName, mp);
-                IndexInsertDeleteOperator indexUpdate = new IndexInsertDeleteOperator(dataSourceIndex,
-                        insertOp.getPrimaryKeyExpressions(), secondaryExpressions, filterExpression,
-                        insertOp.getOperation(), insertOp.isBulkload());
-                indexUpdate.setAdditionalFilteringExpressions(additionalFilteringExpressions);
-                indexUpdate.getInputs().add(new MutableObject<ILogicalOperator>(assignCoordinates));
-                currentTop = indexUpdate;
-                context.computeAndSetTypeEnvironmentForOperator(indexUpdate);
-
-                if (insertOp.isBulkload())
-                    op0.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
-
-            }
-
-        }
-        if (!hasSecondaryIndex) {
-            return false;
-        }
-
-        if (!insertOp.isBulkload()) {
-            op0.getInputs().clear();
-            op0.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
-        }
-        return true;
-    }
-
-    public static ARecordType createEnforcedType(ARecordType initialType, Index index) throws AsterixException,
-            AlgebricksException {
-        ARecordType enforcedType = initialType;
-        for (int i = 0; i < index.getKeyFieldNames().size(); i++) {
-            try {
-                Stack<Pair<ARecordType, String>> nestedTypeStack = new Stack<Pair<ARecordType, String>>();
-                List<String> splits = index.getKeyFieldNames().get(i);
-                ARecordType nestedFieldType = enforcedType;
-                boolean openRecords = false;
-                String bridgeName = nestedFieldType.getTypeName();
-                int j;
-                //Build the stack for the enforced type
-                for (j = 1; j < splits.size(); j++) {
-                    nestedTypeStack.push(new Pair<ARecordType, String>(nestedFieldType, splits.get(j - 1)));
-                    bridgeName = nestedFieldType.getTypeName();
-                    nestedFieldType = (ARecordType) enforcedType.getSubFieldType(splits.subList(0, j));
-                    if (nestedFieldType == null) {
-                        openRecords = true;
-                        break;
-                    }
-                }
-                if (openRecords == true) {
-                    //create the smallest record
-                    enforcedType = new ARecordType(splits.get(splits.size() - 2), new String[] { splits.get(splits
-                            .size() - 1) }, new IAType[] { AUnionType.createNullableType(index.getKeyFieldTypes()
-                            .get(i)) }, true);
-                    //create the open part of the nested field
-                    for (int k = splits.size() - 3; k > (j - 2); k--) {
-                        enforcedType = new ARecordType(splits.get(k), new String[] { splits.get(k + 1) },
-                                new IAType[] { AUnionType.createNullableType(enforcedType) }, true);
-                    }
-                    //Bridge the gap
-                    Pair<ARecordType, String> gapPair = nestedTypeStack.pop();
-                    ARecordType parent = gapPair.first;
-
-                    IAType[] parentFieldTypes = ArrayUtils.addAll(parent.getFieldTypes().clone(),
-                            new IAType[] { AUnionType.createNullableType(enforcedType) });
-                    enforcedType = new ARecordType(bridgeName, ArrayUtils.addAll(parent.getFieldNames(),
-                            enforcedType.getTypeName()), parentFieldTypes, true);
-
-                } else {
-                    //Schema is closed all the way to the field
-                    //enforced fields are either null or strongly typed
-                    enforcedType = new ARecordType(nestedFieldType.getTypeName(), ArrayUtils.addAll(
-                            nestedFieldType.getFieldNames(), splits.get(splits.size() - 1)), ArrayUtils.addAll(
-                            nestedFieldType.getFieldTypes(),
-                            AUnionType.createNullableType(index.getKeyFieldTypes().get(i))), nestedFieldType.isOpen());
-                }
-
-                //Create the enforcedtype for the nested fields in the schema, from the ground up
-                if (nestedTypeStack.size() > 0) {
-                    while (!nestedTypeStack.isEmpty()) {
-                        Pair<ARecordType, String> nestedTypePair = nestedTypeStack.pop();
-                        ARecordType nestedRecType = nestedTypePair.first;
-                        IAType[] nestedRecTypeFieldTypes = nestedRecType.getFieldTypes().clone();
-                        nestedRecTypeFieldTypes[nestedRecType.findFieldPosition(nestedTypePair.second)] = enforcedType;
-                        enforcedType = new ARecordType(nestedRecType.getTypeName(), nestedRecType.getFieldNames(),
-                                nestedRecTypeFieldTypes, nestedRecType.isOpen());
-                    }
-                }
-
-            } catch (AsterixException e) {
-                throw new AlgebricksException("Cannot enforce typed fields "
-                        + StringUtils.join(index.getKeyFieldNames()), e);
-            } catch (IOException e) {
-                throw new AsterixException(e);
-            }
-        }
-        return enforcedType;
-    }
-
-    @SuppressWarnings("unchecked")
-    private void prepareVarAndExpression(List<String> field, String[] fieldNames, LogicalVariable recordVar,
-            List<Mutable<ILogicalExpression>> expressions, List<LogicalVariable> vars, IOptimizationContext context)
-            throws AlgebricksException {
-        Mutable<ILogicalExpression> varRef = new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
-                recordVar));
-        int pos = -1;
-        if (field.size() == 1) {
-            for (int j = 0; j < fieldNames.length; j++) {
-                if (fieldNames[j].equals(field.get(0))) {
-                    pos = j;
-                    break;
-                }
-            }
-        }
-        if (pos == -1) {
-            AbstractFunctionCallExpression func;
-            if (field.size() > 1) {
-                AOrderedList fieldList = new AOrderedList(new AOrderedListType(BuiltinType.ASTRING, null));
-                for (int i = 0; i < field.size(); i++) {
-                    fieldList.add(new AString(field.get(i)));
-                }
-                Mutable<ILogicalExpression> fieldRef = new MutableObject<ILogicalExpression>(new ConstantExpression(
-                        new AsterixConstantValue(fieldList)));
-                //Create an expression for the nested case
-                func = new ScalarFunctionCallExpression(
-                        FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_NESTED), varRef, fieldRef);
-            } else {
-                Mutable<ILogicalExpression> fieldRef = new MutableObject<ILogicalExpression>(new ConstantExpression(
-                        new AsterixConstantValue(new AString(field.get(0)))));
-                //Create an expression for the open field case (By name)
-                func = new ScalarFunctionCallExpression(
-                        FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME), varRef, fieldRef);
-            }
-            expressions.add(new MutableObject<ILogicalExpression>(func));
-            LogicalVariable newVar = context.newVar();
-            vars.add(newVar);
-        } else {
-            // Assumes the indexed field is in the closed portion of the type.
-            Mutable<ILogicalExpression> indexRef = new MutableObject<ILogicalExpression>(new ConstantExpression(
-                    new AsterixConstantValue(new AInt32(pos))));
-            AbstractFunctionCallExpression func = new ScalarFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX), varRef, indexRef);
-            expressions.add(new MutableObject<ILogicalExpression>(func));
-            LogicalVariable newVar = context.newVar();
-            vars.add(newVar);
-        }
-    }
-
-    @SuppressWarnings("unchecked")
-    private Mutable<ILogicalExpression> createFilterExpression(List<LogicalVariable> secondaryKeyVars,
-            IVariableTypeEnvironment typeEnv, boolean forceFilter) throws AlgebricksException {
-        List<Mutable<ILogicalExpression>> filterExpressions = new ArrayList<Mutable<ILogicalExpression>>();
-        // Add 'is not null' to all nullable secondary index keys as a filtering
-        // condition.
-        for (LogicalVariable secondaryKeyVar : secondaryKeyVars) {
-            IAType secondaryKeyType = (IAType) typeEnv.getVarType(secondaryKeyVar);
-            if (!NonTaggedFormatUtil.isOptional(secondaryKeyType) && !forceFilter) {
-                continue;
-            }
-            ScalarFunctionCallExpression isNullFuncExpr = new ScalarFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.IS_NULL),
-                    new MutableObject<ILogicalExpression>(new VariableReferenceExpression(secondaryKeyVar)));
-            ScalarFunctionCallExpression notFuncExpr = new ScalarFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.NOT), new MutableObject<ILogicalExpression>(
-                            isNullFuncExpr));
-            filterExpressions.add(new MutableObject<ILogicalExpression>(notFuncExpr));
-        }
-        // No nullable secondary keys.
-        if (filterExpressions.isEmpty()) {
-            return null;
-        }
-        Mutable<ILogicalExpression> filterExpression = null;
-        if (filterExpressions.size() > 1) {
-            // Create a conjunctive condition.
-            filterExpression = new MutableObject<ILogicalExpression>(new ScalarFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.AND), filterExpressions));
-        } else {
-            filterExpression = filterExpressions.get(0);
-        }
-        return filterExpression;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceStaticTypeCastForInsertRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceStaticTypeCastForInsertRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceStaticTypeCastForInsertRule.java
deleted file mode 100644
index 6419fc1..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceStaticTypeCastForInsertRule.java
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
-import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.optimizer.rules.typecast.StaticTypeCastUtil;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-/**
- * Statically cast a constant from its type to a specified required type, in a
- * recursive way. It enables: 1. bag-based fields in a record, 2. bidirectional
- * cast of an open field and a matched closed field, and 3. put in null fields
- * when necessary. It should be fired before the constant folding rule.
- * This rule is not responsible for type casting between primitive types.
- * Here is an example: A record { "hobby": {{"music", "coding"}}, "id": "001",
- * "name": "Person Three"} which confirms to closed type ( id: string, name:
- * string, hobby: {{string}}? ) can be cast to an open type (id: string ), or
- * vice versa.
- * Implementation wise: first, we match the record's type and its target dataset
- * type to see if it is "cast-able"; second, if the types are cast-able, we
- * embed the required type into the original producer expression. If the types
- * are not cast-able, we throw a compile time exception.
- * Then, at runtime (not in this rule), the corresponding record/list
- * constructors know what to do by checking the required output type.
- * TODO: right now record/list constructor of the cast result is not done in the
- * ConstantFoldingRule and has to go to the runtime, because the
- * ConstantFoldingRule uses ARecordSerializerDeserializer which seems to have
- * some problem.
- */
-public class IntroduceStaticTypeCastForInsertRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        /**
-         * pattern match: sink/insert/assign record type is propagated from
-         * insert data source to the record-constructor expression
-         */
-        if (context.checkIfInDontApplySet(this, opRef.getValue()))
-            return false;
-        context.addToDontApplySet(this, opRef.getValue());
-
-        AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
-        List<LogicalVariable> producedVariables = new ArrayList<LogicalVariable>();
-        LogicalVariable oldRecordVariable;
-
-        if (op1.getOperatorTag() != LogicalOperatorTag.SINK)
-            return false;
-        AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
-        if (op2.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE)
-            return false;
-        InsertDeleteOperator insertDeleteOp = (InsertDeleteOperator) op2;
-        if (insertDeleteOp.getOperation() == InsertDeleteOperator.Kind.DELETE)
-            return false;
-        /**
-         * get required record type
-         */
-        InsertDeleteOperator insertDeleteOperator = (InsertDeleteOperator) op2;
-        AqlDataSource dataSource = (AqlDataSource) insertDeleteOperator.getDataSource();
-        IAType[] schemaTypes = (IAType[]) dataSource.getSchemaTypes();
-        IAType requiredRecordType = schemaTypes[schemaTypes.length - 1];
-
-        List<LogicalVariable> usedVariables = new ArrayList<LogicalVariable>();
-        insertDeleteOperator.getPayloadExpression().getValue().getUsedVariables(usedVariables);
-
-        // the used variable should contain the record that will be inserted
-        // but it will not fail in many cases even if the used variable set is
-        // empty
-        if (usedVariables.size() == 0)
-            return false;
-
-        oldRecordVariable = usedVariables.get(0);
-        LogicalVariable inputRecordVar = usedVariables.get(0);
-        IVariableTypeEnvironment env = insertDeleteOperator.computeOutputTypeEnvironment(context);
-        IAType inputRecordType = (IAType) env.getVarType(inputRecordVar);
-
-        AbstractLogicalOperator currentOperator = (AbstractLogicalOperator) op2.getInputs().get(0).getValue();
-        /**
-         * find the assign operator for the "input record" to the insert_delete
-         * operator
-         */
-        do {
-            context.addToDontApplySet(this, currentOperator);
-            if (currentOperator.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
-                AssignOperator assignOp = (AssignOperator) currentOperator;
-                producedVariables.clear();
-                VariableUtilities.getProducedVariables(currentOperator, producedVariables);
-                int position = producedVariables.indexOf(oldRecordVariable);
-
-                /**
-                 * set the top-down propagated type
-                 */
-                if (position >= 0) {
-                    AssignOperator originalAssign = (AssignOperator) currentOperator;
-                    List<Mutable<ILogicalExpression>> expressionRefs = originalAssign.getExpressions();
-                    ILogicalExpression expr = expressionRefs.get(position).getValue();
-                    if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
-                        // that expression has been rewritten, and it will not
-                        // fail but just return false
-                        if (TypeComputerUtilities.getRequiredType(funcExpr) != null) {
-                            context.computeAndSetTypeEnvironmentForOperator(assignOp);
-                            return false;
-                        }
-                        IVariableTypeEnvironment assignEnv = assignOp.computeOutputTypeEnvironment(context);
-                        StaticTypeCastUtil.rewriteFuncExpr(funcExpr, requiredRecordType, inputRecordType, assignEnv);
-                    }
-                    context.computeAndSetTypeEnvironmentForOperator(originalAssign);
-                }
-            }
-            if (currentOperator.getInputs().size() > 0)
-                currentOperator = (AbstractLogicalOperator) currentOperator.getInputs().get(0).getValue();
-            else
-                break;
-        } while (currentOperator != null);
-        return true;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceTransactionCommitByAssignOpRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceTransactionCommitByAssignOpRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceTransactionCommitByAssignOpRule.java
deleted file mode 100644
index 3984fe8..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceTransactionCommitByAssignOpRule.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class IntroduceTransactionCommitByAssignOpRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
-            return false;
-        }
-        SelectOperator selectOperator = (SelectOperator) op;
-
-        Mutable<ILogicalOperator> childOfSelect = selectOperator.getInputs().get(0);
-
-        //[Direction] SelectOp(cond1)<--ChildOps... ==> SelectOp(booleanValue of cond1)<--NewAssignOp(cond1)<--ChildOps...
-        //#. Create an assign-operator with a new local variable and the condition of the select-operator.
-        //#. Set the input(child operator) of the new assign-operator to input(child operator) of the select-operator.
-        //   (Later, the newly created assign-operator will apply the condition on behalf of the select-operator,
-        //    and set the variable of the assign-operator to a boolean value according to the condition evaluation.)
-        //#. Give the select-operator the result boolean value created by the newly created child assign-operator.
-
-        //create an assignOp with a variable and the condition of the select-operator.
-        LogicalVariable v = context.newVar();
-        AssignOperator assignOperator = new AssignOperator(v, new MutableObject<ILogicalExpression>(selectOperator
-                .getCondition().getValue()));
-
-        //set the input of the new assign-operator to the input of the select-operator.
-        assignOperator.getInputs().add(childOfSelect);
-
-        //set the result value of the assign-operator to the condition of the select-operator
-        selectOperator.getCondition().setValue(new VariableReferenceExpression(v));//scalarFunctionCallExpression);
-        selectOperator.getInputs().set(0, new MutableObject<ILogicalOperator>(assignOperator));
-
-        context.computeAndSetTypeEnvironmentForOperator(assignOperator);
-
-        //Once this rule is fired, don't apply again.
-        context.addToDontApplySet(this, selectOperator);
-        return true;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceUnionRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceUnionRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceUnionRule.java
deleted file mode 100644
index 050a2cd..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceUnionRule.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-/**
- * @author kereno, ecarm002, ildar.absalyamov
- *         Generates a union operator and puts it instead of "assign <- [function-call: asterix:union]"
- *         Before rule:
- *         ============
- *         assign [var] <- [asterix:union(left_branch, right_branch)]
- *         join (TRUE)
- *         left_branch
- *         right_branch
- *         After rule:
- *         ============
- *         union (left_branch, right_branch, result_var)
- *         left_branch
- *         right_branch
- */
-public class IntroduceUnionRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-
-        if (!opRef.getValue().getOperatorTag().equals(LogicalOperatorTag.ASSIGN)) {
-            return false;
-        }
-
-        AssignOperator assignUnion = (AssignOperator) opRef.getValue();
-
-        if (assignUnion.getExpressions().get(0).getValue().getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL)
-            return false;
-
-        AbstractFunctionCallExpression u = (AbstractFunctionCallExpression) assignUnion.getExpressions().get(0)
-                .getValue();
-        if (!AsterixBuiltinFunctions.UNION.equals(u.getFunctionIdentifier())) {
-            return false;
-        }
-
-        //Retrieving the logical variables for the union from the two aggregates which are inputs to the join
-        Mutable<ILogicalOperator> join = assignUnion.getInputs().get(0);
-
-        LogicalOperatorTag tag1 = join.getValue().getOperatorTag();
-        if (tag1 != LogicalOperatorTag.INNERJOIN && tag1 != LogicalOperatorTag.LEFTOUTERJOIN) {
-            return false;
-        }
-        AbstractBinaryJoinOperator join1 = (AbstractBinaryJoinOperator) join.getValue();
-        ILogicalExpression cond1 = join1.getCondition().getValue();
-        // don't try to push a product down
-        if (!OperatorPropertiesUtil.isAlwaysTrueCond(cond1)) {
-            return false;
-        }
-
-        List<Mutable<ILogicalOperator>> joinInputs = join.getValue().getInputs();
-
-        Mutable<ILogicalOperator> left_branch = joinInputs.get(0);
-        Mutable<ILogicalOperator> right_branch = joinInputs.get(1);
-
-        List<LogicalVariable> input1Var = new ArrayList<LogicalVariable>();
-        VariableUtilities.getProducedVariables(left_branch.getValue(), input1Var);
-
-        List<LogicalVariable> input2Var = new ArrayList<LogicalVariable>();
-        VariableUtilities.getProducedVariables(right_branch.getValue(), input2Var);
-
-        List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> varMap = new ArrayList<Triple<LogicalVariable, LogicalVariable, LogicalVariable>>(
-                1);
-        Triple<LogicalVariable, LogicalVariable, LogicalVariable> triple = new Triple<LogicalVariable, LogicalVariable, LogicalVariable>(
-                input1Var.get(0), input2Var.get(0), assignUnion.getVariables().get(0));
-        varMap.add(triple);
-        UnionAllOperator unionOp = new UnionAllOperator(varMap);
-
-        unionOp.getInputs().add(left_branch);
-        unionOp.getInputs().add(right_branch);
-
-        context.computeAndSetTypeEnvironmentForOperator(unionOp);
-
-        opRef.setValue(unionOp);
-
-        return true;
-
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceUnnestForCollectionToSequenceRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceUnnestForCollectionToSequenceRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceUnnestForCollectionToSequenceRule.java
deleted file mode 100644
index 5943911..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceUnnestForCollectionToSequenceRule.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-/**
- * This rule introduces a unnest operator for the collection-to-sequence function (if the input to the function is a collection).
- *
- * @author yingyib
- */
-public class IntroduceUnnestForCollectionToSequenceRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (op.getOperatorTag() != LogicalOperatorTag.ASSIGN) {
-            return false;
-        }
-        AssignOperator assign = (AssignOperator) op;
-        List<Mutable<ILogicalExpression>> exprs = assign.getExpressions();
-        if (exprs.size() != 1) {
-            return false;
-        }
-        ILogicalExpression expr = exprs.get(0).getValue();
-        if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            return false;
-        }
-        AbstractFunctionCallExpression func = (AbstractFunctionCallExpression) expr;
-        if (func.getFunctionIdentifier() != AsterixBuiltinFunctions.COLLECTION_TO_SEQUENCE) {
-            return false;
-        }
-
-        IVariableTypeEnvironment env = assign.computeInputTypeEnvironment(context);
-        ILogicalExpression argExpr = func.getArguments().get(0).getValue();
-        IAType outerExprType = (IAType) env.getType(expr);
-        IAType innerExprType = (IAType) env.getType(argExpr);
-        if (outerExprType.equals(innerExprType)) {
-            /** nothing is changed with the collection-to-sequence function, remove the collection-sequence function call */
-            assign.getExpressions().set(0, new MutableObject<ILogicalExpression>(argExpr));
-            return true;
-        }
-        /** change the assign operator to an unnest operator */
-        LogicalVariable var = assign.getVariables().get(0);
-        @SuppressWarnings("unchecked")
-        UnnestOperator unnest = new UnnestOperator(var, new MutableObject<ILogicalExpression>(
-                new UnnestingFunctionCallExpression(
-                        FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.SCAN_COLLECTION),
-                        new MutableObject<ILogicalExpression>(argExpr))));
-        unnest.getInputs().addAll(assign.getInputs());
-        opRef.setValue(unnest);
-        context.computeAndSetTypeEnvironmentForOperator(unnest);
-        return true;
-    }
-}


[43/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java
deleted file mode 100644
index 65c9da8..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java
+++ /dev/null
@@ -1,300 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.algebra.operators.physical.BTreeSearchPOperator;
-import edu.uci.ics.asterix.algebra.operators.physical.InvertedIndexPOperator;
-import edu.uci.ics.asterix.algebra.operators.physical.RTreeSearchPOperator;
-import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.optimizer.rules.am.AccessMethodJobGenParams;
-import edu.uci.ics.asterix.optimizer.rules.am.BTreeJobGenParams;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.OperatorAnnotations;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IMergeAggregationExpressionFactory;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.ExternalGroupByPOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.PreclusteredGroupByPOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
-import edu.uci.ics.hyracks.algebricks.rewriter.util.JoinUtils;
-
-public class SetAsterixPhysicalOperatorsRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (context.checkIfInDontApplySet(this, op)) {
-            return false;
-        }
-
-        computeDefaultPhysicalOp(op, context);
-        context.addToDontApplySet(this, op);
-        return true;
-    }
-
-    private static void setPhysicalOperators(ILogicalPlan plan, IOptimizationContext context)
-            throws AlgebricksException {
-        for (Mutable<ILogicalOperator> root : plan.getRoots()) {
-            computeDefaultPhysicalOp((AbstractLogicalOperator) root.getValue(), context);
-        }
-    }
-
-    private static void computeDefaultPhysicalOp(AbstractLogicalOperator op, IOptimizationContext context)
-            throws AlgebricksException {
-        PhysicalOptimizationConfig physicalOptimizationConfig = context.getPhysicalOptimizationConfig();
-        if (op.getOperatorTag().equals(LogicalOperatorTag.GROUP)) {
-            GroupByOperator gby = (GroupByOperator) op;
-            if (gby.getNestedPlans().size() == 1) {
-                ILogicalPlan p0 = gby.getNestedPlans().get(0);
-                if (p0.getRoots().size() == 1) {
-                    Mutable<ILogicalOperator> r0 = p0.getRoots().get(0);
-                    if (((AbstractLogicalOperator) (r0.getValue())).getOperatorTag().equals(
-                            LogicalOperatorTag.AGGREGATE)) {
-                        AggregateOperator aggOp = (AggregateOperator) r0.getValue();
-                        boolean serializable = true;
-                        for (Mutable<ILogicalExpression> exprRef : aggOp.getExpressions()) {
-                            AbstractFunctionCallExpression expr = (AbstractFunctionCallExpression) exprRef.getValue();
-                            if (!AsterixBuiltinFunctions.isAggregateFunctionSerializable(expr.getFunctionIdentifier())) {
-                                serializable = false;
-                                break;
-                            }
-                        }
-
-                        if ((gby.getAnnotations().get(OperatorAnnotations.USE_HASH_GROUP_BY) == Boolean.TRUE || gby
-                                .getAnnotations().get(OperatorAnnotations.USE_EXTERNAL_GROUP_BY) == Boolean.TRUE)) {
-                            boolean setToExternalGby = false;
-                            if (serializable) {
-                                // if serializable, use external group-by
-                                // now check whether the serialized version aggregation function has corresponding intermediate agg
-                                boolean hasIntermediateAgg = true;
-                                IMergeAggregationExpressionFactory mergeAggregationExpressionFactory = context
-                                        .getMergeAggregationExpressionFactory();
-                                List<LogicalVariable> originalVariables = aggOp.getVariables();
-                                List<Mutable<ILogicalExpression>> aggExprs = aggOp.getExpressions();
-                                int aggNum = aggExprs.size();
-                                for (int i = 0; i < aggNum; i++) {
-                                    AbstractFunctionCallExpression expr = (AbstractFunctionCallExpression) aggExprs
-                                            .get(i).getValue();
-                                    AggregateFunctionCallExpression serialAggExpr = AsterixBuiltinFunctions
-                                            .makeSerializableAggregateFunctionExpression(expr.getFunctionIdentifier(),
-                                                    expr.getArguments());
-                                    if (mergeAggregationExpressionFactory.createMergeAggregation(
-                                            originalVariables.get(i), serialAggExpr, context) == null) {
-                                        hasIntermediateAgg = false;
-                                        break;
-                                    }
-                                }
-
-                                if (hasIntermediateAgg) {
-                                    for (int i = 0; i < aggNum; i++) {
-                                        AbstractFunctionCallExpression expr = (AbstractFunctionCallExpression) aggExprs
-                                                .get(i).getValue();
-                                        AggregateFunctionCallExpression serialAggExpr = AsterixBuiltinFunctions
-                                                .makeSerializableAggregateFunctionExpression(
-                                                        expr.getFunctionIdentifier(), expr.getArguments());
-                                        aggOp.getExpressions().get(i).setValue(serialAggExpr);
-                                    }
-                                    ExternalGroupByPOperator externalGby = new ExternalGroupByPOperator(
-                                            gby.getGroupByList(),
-                                            physicalOptimizationConfig.getMaxFramesExternalGroupBy(),
-                                            physicalOptimizationConfig.getExternalGroupByTableSize());
-                                    generateMergeAggregationExpressions(gby, context);
-                                    op.setPhysicalOperator(externalGby);
-                                    setToExternalGby = true;
-                                }
-                            }
-
-                            if (!setToExternalGby) {
-                                // if not serializable or no intermediate agg, use pre-clustered group-by
-                                List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> gbyList = gby.getGroupByList();
-                                List<LogicalVariable> columnList = new ArrayList<LogicalVariable>(gbyList.size());
-                                for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gbyList) {
-                                    ILogicalExpression expr = p.second.getValue();
-                                    if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-                                        VariableReferenceExpression varRef = (VariableReferenceExpression) expr;
-                                        columnList.add(varRef.getVariableReference());
-                                    }
-                                }
-                                op.setPhysicalOperator(new PreclusteredGroupByPOperator(columnList));
-                            }
-                        }
-                    } else if (((AbstractLogicalOperator) (r0.getValue())).getOperatorTag().equals(
-                            LogicalOperatorTag.RUNNINGAGGREGATE)) {
-                        List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> gbyList = gby.getGroupByList();
-                        List<LogicalVariable> columnList = new ArrayList<LogicalVariable>(gbyList.size());
-                        for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : gbyList) {
-                            ILogicalExpression expr = p.second.getValue();
-                            if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-                                VariableReferenceExpression varRef = (VariableReferenceExpression) expr;
-                                columnList.add(varRef.getVariableReference());
-                            }
-                        }
-                        op.setPhysicalOperator(new PreclusteredGroupByPOperator(columnList));
-                    } else {
-                        throw new AlgebricksException("Unsupported nested operator within a group-by: "
-                                + ((AbstractLogicalOperator) (r0.getValue())).getOperatorTag().name());
-                    }
-                }
-            }
-        }
-        if (op.getPhysicalOperator() == null) {
-            switch (op.getOperatorTag()) {
-                case INNERJOIN: {
-                    JoinUtils.setJoinAlgorithmAndExchangeAlgo((InnerJoinOperator) op, context);
-                    break;
-                }
-                case LEFTOUTERJOIN: {
-                    JoinUtils.setJoinAlgorithmAndExchangeAlgo((LeftOuterJoinOperator) op, context);
-                    break;
-                }
-                case UNNEST_MAP: {
-                    UnnestMapOperator unnestMap = (UnnestMapOperator) op;
-                    ILogicalExpression unnestExpr = unnestMap.getExpressionRef().getValue();
-                    if (unnestExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                        AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
-                        FunctionIdentifier fid = f.getFunctionIdentifier();
-                        if (!fid.equals(AsterixBuiltinFunctions.INDEX_SEARCH)) {
-                            throw new IllegalStateException();
-                        }
-                        AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
-                        jobGenParams.readFromFuncArgs(f.getArguments());
-                        AqlMetadataProvider mp = (AqlMetadataProvider) context.getMetadataProvider();
-                        AqlSourceId dataSourceId = new AqlSourceId(jobGenParams.getDataverseName(),
-                                jobGenParams.getDatasetName());
-                        IDataSourceIndex<String, AqlSourceId> dsi = mp.findDataSourceIndex(jobGenParams.getIndexName(),
-                                dataSourceId);
-                        if (dsi == null) {
-                            throw new AlgebricksException("Could not find index " + jobGenParams.getIndexName()
-                                    + " for dataset " + dataSourceId);
-                        }
-                        IndexType indexType = jobGenParams.getIndexType();
-                        boolean requiresBroadcast = jobGenParams.getRequiresBroadcast();
-                        switch (indexType) {
-                            case BTREE: {
-                                BTreeJobGenParams btreeJobGenParams = new BTreeJobGenParams();
-                                btreeJobGenParams.readFromFuncArgs(f.getArguments());
-                                op.setPhysicalOperator(new BTreeSearchPOperator(dsi, requiresBroadcast,
-                                        btreeJobGenParams.isPrimaryIndex(), btreeJobGenParams.isEqCondition(),
-                                        btreeJobGenParams.getLowKeyVarList(), btreeJobGenParams.getHighKeyVarList()));
-                                break;
-                            }
-                            case RTREE: {
-                                op.setPhysicalOperator(new RTreeSearchPOperator(dsi, requiresBroadcast));
-                                break;
-                            }
-                            case SINGLE_PARTITION_WORD_INVIX:
-                            case SINGLE_PARTITION_NGRAM_INVIX: {
-                                op.setPhysicalOperator(new InvertedIndexPOperator(dsi, requiresBroadcast, false));
-                                break;
-                            }
-                            case LENGTH_PARTITIONED_WORD_INVIX:
-                            case LENGTH_PARTITIONED_NGRAM_INVIX: {
-                                op.setPhysicalOperator(new InvertedIndexPOperator(dsi, requiresBroadcast, true));
-                                break;
-                            }
-                            default: {
-                                throw new NotImplementedException(indexType + " indexes are not implemented.");
-                            }
-                        }
-                    }
-                    break;
-                }
-            }
-        }
-        if (op.hasNestedPlans()) {
-            AbstractOperatorWithNestedPlans nested = (AbstractOperatorWithNestedPlans) op;
-            for (ILogicalPlan p : nested.getNestedPlans()) {
-                setPhysicalOperators(p, context);
-            }
-        }
-        for (Mutable<ILogicalOperator> opRef : op.getInputs()) {
-            computeDefaultPhysicalOp((AbstractLogicalOperator) opRef.getValue(), context);
-        }
-    }
-
-    private static void generateMergeAggregationExpressions(GroupByOperator gby, IOptimizationContext context)
-            throws AlgebricksException {
-        if (gby.getNestedPlans().size() != 1) {
-            throw new AlgebricksException(
-                    "External group-by currently works only for one nested plan with one root containing"
-                            + "an aggregate and a nested-tuple-source.");
-        }
-        ILogicalPlan p0 = gby.getNestedPlans().get(0);
-        if (p0.getRoots().size() != 1) {
-            throw new AlgebricksException(
-                    "External group-by currently works only for one nested plan with one root containing"
-                            + "an aggregate and a nested-tuple-source.");
-        }
-        IMergeAggregationExpressionFactory mergeAggregationExpressionFactory = context
-                .getMergeAggregationExpressionFactory();
-        Mutable<ILogicalOperator> r0 = p0.getRoots().get(0);
-        AbstractLogicalOperator r0Logical = (AbstractLogicalOperator) r0.getValue();
-        if (r0Logical.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
-            throw new AlgebricksException("The merge aggregation expression generation should not process a "
-                    + r0Logical.getOperatorTag() + " operator.");
-        }
-        AggregateOperator aggOp = (AggregateOperator) r0.getValue();
-        List<Mutable<ILogicalExpression>> aggFuncRefs = aggOp.getExpressions();
-        List<LogicalVariable> aggProducedVars = aggOp.getVariables();
-        int n = aggOp.getExpressions().size();
-        List<Mutable<ILogicalExpression>> mergeExpressionRefs = new ArrayList<Mutable<ILogicalExpression>>();
-        for (int i = 0; i < n; i++) {
-            ILogicalExpression mergeExpr = mergeAggregationExpressionFactory.createMergeAggregation(
-                    aggProducedVars.get(i), aggFuncRefs.get(i).getValue(), context);
-            if (mergeExpr == null) {
-                throw new AlgebricksException("The aggregation function " + aggFuncRefs.get(i).getValue()
-                        + " does not have a registered intermediate aggregation function.");
-            }
-            mergeExpressionRefs.add(new MutableObject<ILogicalExpression>(mergeExpr));
-        }
-        aggOp.setMergeExpressions(mergeExpressionRefs);
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
deleted file mode 100644
index 4b9a947..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
+++ /dev/null
@@ -1,252 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.common.config.GlobalConfig;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
-import edu.uci.ics.asterix.om.types.AOrderedListType;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.AUnionType;
-import edu.uci.ics.asterix.om.types.AUnorderedListType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.AbstractConstVarFunVisitor;
-import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-/**
- * open-record-constructor() becomes closed-record-constructor() if all the
- * branches below lead to dataset scans for closed record types
- */
-
-public class SetClosedRecordConstructorsRule implements IAlgebraicRewriteRule {
-
-    private SettingClosedRecordVisitor recordVisitor;
-
-    public SetClosedRecordConstructorsRule() {
-        this.recordVisitor = new SettingClosedRecordVisitor();
-    }
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext ctx) throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (ctx.checkIfInDontApplySet(this, op)) {
-            return false;
-        }
-        ctx.addToDontApplySet(this, op);
-        this.recordVisitor.setOptimizationContext(ctx, op.computeInputTypeEnvironment(ctx));
-        boolean res = op.acceptExpressionTransform(recordVisitor);
-        if (res) {
-            ctx.computeAndSetTypeEnvironmentForOperator(op);
-        }
-        return res;
-    }
-
-    private static class SettingClosedRecordVisitor extends AbstractConstVarFunVisitor<ClosedDataInfo, Void> implements
-            ILogicalExpressionReferenceTransform {
-
-        private IOptimizationContext context;
-        private IVariableTypeEnvironment env;
-
-        public void setOptimizationContext(IOptimizationContext context, IVariableTypeEnvironment env) {
-            this.context = context;
-            this.env = env;
-        }
-
-        @Override
-        public boolean transform(Mutable<ILogicalExpression> exprRef) throws AlgebricksException {
-            AbstractLogicalExpression expr = (AbstractLogicalExpression) exprRef.getValue();
-            ClosedDataInfo cdi = expr.accept(this, null);
-            if (cdi.expressionChanged) {
-                exprRef.setValue(cdi.expression);
-            }
-            return cdi.expressionChanged;
-        }
-
-        @Override
-        public ClosedDataInfo visitConstantExpression(ConstantExpression expr, Void arg) throws AlgebricksException {
-            return new ClosedDataInfo(false, hasClosedType(expr), expr);
-        }
-
-        @Override
-        public ClosedDataInfo visitFunctionCallExpression(AbstractFunctionCallExpression expr, Void arg)
-                throws AlgebricksException {
-            boolean allClosed = true;
-            boolean changed = false;
-            if (expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR)) {
-                ARecordType reqType = (ARecordType) TypeComputerUtilities.getRequiredType(expr);
-                if (reqType == null || !reqType.isOpen()) {
-                    int n = expr.getArguments().size();
-                    if (n % 2 > 0) {
-                        throw new AlgebricksException(
-                                "Record constructor expected to have an even number of arguments: " + expr);
-                    }
-                    for (int i = 0; i < n / 2; i++) {
-                        ILogicalExpression a0 = expr.getArguments().get(2 * i).getValue();
-                        if (a0.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
-                            allClosed = false;
-                        }
-                        Mutable<ILogicalExpression> aRef1 = expr.getArguments().get(2 * i + 1);
-                        ILogicalExpression a1 = aRef1.getValue();
-                        ClosedDataInfo cdi = a1.accept(this, arg);
-                        if (!cdi.dataIsClosed) {
-                            allClosed = false;
-                        }
-                        if (cdi.expressionChanged) {
-                            aRef1.setValue(cdi.expression);
-                            changed = true;
-                        }
-                    }
-                    if (allClosed) {
-                        expr.setFunctionInfo(FunctionUtils
-                                .getFunctionInfo(AsterixBuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR));
-                        GlobalConfig.ASTERIX_LOGGER.finest("Switching to CLOSED record constructor in " + expr + ".\n");
-                        changed = true;
-                    }
-                }
-            } else {
-                boolean rewrite = true;
-                if (expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR)
-                        || (expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR))) {
-                    IAType reqType = TypeComputerUtilities.getRequiredType(expr);
-                    if (reqType == null) {
-                        rewrite = false;
-                    }
-                }
-                if (rewrite) {
-                    for (Mutable<ILogicalExpression> e : expr.getArguments()) {
-                        ILogicalExpression ale = e.getValue();
-                        ClosedDataInfo cdi = ale.accept(this, arg);
-                        if (!cdi.dataIsClosed) {
-                            allClosed = false;
-                        }
-                        if (cdi.expressionChanged) {
-                            e.setValue(cdi.expression);
-                            changed = true;
-                        }
-                    }
-                }
-            }
-            return new ClosedDataInfo(changed, hasClosedType(expr), expr);
-        }
-
-        @Override
-        public ClosedDataInfo visitVariableReferenceExpression(VariableReferenceExpression expr, Void arg)
-                throws AlgebricksException {
-            Object varType = env.getVarType(expr.getVariableReference());
-            if (varType == null) {
-                throw new AlgebricksException("Could not infer type for variable '" + expr.getVariableReference()
-                        + "'.");
-            }
-            boolean dataIsClosed = isClosedRec((IAType) varType);
-            return new ClosedDataInfo(false, dataIsClosed, expr);
-        }
-
-        private boolean hasClosedType(ILogicalExpression expr) throws AlgebricksException {
-            IAType t = (IAType) context.getExpressionTypeComputer().getType(expr, context.getMetadataProvider(), env);
-            return isClosedRec(t);
-        }
-
-        private static boolean isClosedRec(IAType t) throws AlgebricksException {
-            switch (t.getTypeTag()) {
-                case ANY: {
-                    return false;
-                }
-                case CIRCLE:
-                case INT8:
-                case INT16:
-                case INT32:
-                case INT64:
-                case BINARY:
-                case BITARRAY:
-                case FLOAT:
-                case DOUBLE:
-                case STRING:
-                case LINE:
-                case NULL:
-                case BOOLEAN:
-                case DATETIME:
-                case DATE:
-                case TIME:
-                case UUID:
-                case DURATION:
-                case YEARMONTHDURATION:
-                case DAYTIMEDURATION:
-                case INTERVAL:
-                case POINT:
-                case POINT3D:
-                case POLYGON:
-                case RECTANGLE:
-                case SHORTWITHOUTTYPEINFO: {
-                    return true;
-                }
-                case RECORD: {
-                    return !((ARecordType) t).isOpen();
-                }
-                case UNION: {
-                    AUnionType ut = (AUnionType) t;
-                    for (IAType t2 : ut.getUnionList()) {
-                        if (!isClosedRec(t2)) {
-                            return false;
-                        }
-                    }
-                    return true;
-                }
-                case ORDEREDLIST: {
-                    AOrderedListType ol = (AOrderedListType) t;
-                    return isClosedRec(ol.getItemType());
-                }
-                case UNORDEREDLIST: {
-                    AUnorderedListType ul = (AUnorderedListType) t;
-                    return isClosedRec(ul.getItemType());
-                }
-                default: {
-                    throw new IllegalStateException("Closed type analysis not implemented for type " + t);
-                }
-            }
-        }
-    }
-
-    private static class ClosedDataInfo {
-        boolean expressionChanged;
-        boolean dataIsClosed;
-        ILogicalExpression expression;
-
-        public ClosedDataInfo(boolean expressionChanged, boolean dataIsClosed, ILogicalExpression expression) {
-            this.expressionChanged = expressionChanged;
-            this.dataIsClosed = dataIsClosed;
-            this.expression = expression;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SimilarityCheckRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SimilarityCheckRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SimilarityCheckRule.java
deleted file mode 100644
index 2d03d69..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SimilarityCheckRule.java
+++ /dev/null
@@ -1,336 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.om.base.ADouble;
-import edu.uci.ics.asterix.om.base.AFloat;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.IAObject;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.hierachy.ATypeHierarchy;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-/**
- * Looks for a select operator, containing a condition:
- * similarity-function GE/GT/LE/LE constant/variable
- * Rewrites the select condition (and possibly the assign expr) with the equivalent similarity-check function.
- */
-public class SimilarityCheckRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        // Look for select.
-        if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
-            return false;
-        }
-        SelectOperator select = (SelectOperator) op;
-        Mutable<ILogicalExpression> condExpr = select.getCondition();
-
-        // Gather assigns below this select.
-        List<AssignOperator> assigns = new ArrayList<AssignOperator>();
-        AbstractLogicalOperator childOp = (AbstractLogicalOperator) select.getInputs().get(0).getValue();
-        // Skip selects.
-        while (childOp.getOperatorTag() == LogicalOperatorTag.SELECT) {
-            childOp = (AbstractLogicalOperator) childOp.getInputs().get(0).getValue();
-        }
-        while (childOp.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
-            assigns.add((AssignOperator) childOp);
-            childOp = (AbstractLogicalOperator) childOp.getInputs().get(0).getValue();
-        }
-        return replaceSelectConditionExprs(condExpr, assigns, context);
-    }
-
-    private boolean replaceSelectConditionExprs(Mutable<ILogicalExpression> expRef, List<AssignOperator> assigns,
-            IOptimizationContext context) throws AlgebricksException {
-        ILogicalExpression expr = expRef.getValue();
-        if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            return false;
-        }
-        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
-        FunctionIdentifier funcIdent = funcExpr.getFunctionIdentifier();
-        // Recursively traverse conjuncts.
-        // TODO: Ignore disjuncts for now, because some replacements may be invalid.
-        // For example, if the result of the similarity function is used somewhere upstream,
-        // then we may still need the true similarity value even if the GE/GT/LE/LE comparison returns false.
-        if (funcIdent == AlgebricksBuiltinFunctions.AND) {
-            boolean found = true;
-            for (int i = 0; i < funcExpr.getArguments().size(); ++i) {
-                found = found && replaceSelectConditionExprs(funcExpr.getArguments().get(i), assigns, context);
-            }
-            return found;
-        }
-
-        // Look for GE/GT/LE/LT.
-        if (funcIdent != AlgebricksBuiltinFunctions.GE && funcIdent != AlgebricksBuiltinFunctions.GT
-                && funcIdent != AlgebricksBuiltinFunctions.LE && funcIdent != AlgebricksBuiltinFunctions.LT) {
-            return false;
-        }
-
-        // One arg should be a function call or a variable, the other a constant.
-        AsterixConstantValue constVal = null;
-        ILogicalExpression nonConstExpr = null;
-        ILogicalExpression arg1 = funcExpr.getArguments().get(0).getValue();
-        ILogicalExpression arg2 = funcExpr.getArguments().get(1).getValue();
-        // Normalized GE/GT/LE/LT as if constant was on the right hand side.
-        FunctionIdentifier normFuncIdent = null;
-        // One of the args must be a constant.
-        if (arg1.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
-            ConstantExpression constExpr = (ConstantExpression) arg1;
-            constVal = (AsterixConstantValue) constExpr.getValue();
-            nonConstExpr = arg2;
-            // Get func ident as if swapping lhs and rhs.
-            normFuncIdent = getLhsAndRhsSwappedFuncIdent(funcIdent);
-        } else if (arg2.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
-            ConstantExpression constExpr = (ConstantExpression) arg2;
-            constVal = (AsterixConstantValue) constExpr.getValue();
-            nonConstExpr = arg1;
-            // Constant is already on rhs, so nothing to be done for normalizedFuncIdent.
-            normFuncIdent = funcIdent;
-        } else {
-            return false;
-        }
-
-        // The other arg is a function call. We can directly replace the select condition with an equivalent similarity check expression.
-        if (nonConstExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-            return replaceWithFunctionCallArg(expRef, normFuncIdent, constVal,
-                    (AbstractFunctionCallExpression) nonConstExpr);
-        }
-        // The other arg ist a variable. We may have to introduce an assign operator that assigns the result of a similarity-check function to a variable.
-        if (nonConstExpr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
-            return replaceWithVariableArg(expRef, normFuncIdent, constVal, (VariableReferenceExpression) nonConstExpr,
-                    assigns, context);
-        }
-        return false;
-    }
-
-    private boolean replaceWithVariableArg(Mutable<ILogicalExpression> expRef, FunctionIdentifier normFuncIdent,
-            AsterixConstantValue constVal, VariableReferenceExpression varRefExpr, List<AssignOperator> assigns,
-            IOptimizationContext context) throws AlgebricksException {
-
-        // Find variable in assigns to determine its originating function.
-        LogicalVariable var = varRefExpr.getVariableReference();
-        Mutable<ILogicalExpression> simFuncExprRef = null;
-        ScalarFunctionCallExpression simCheckFuncExpr = null;
-        AssignOperator matchingAssign = null;
-        for (int i = 0; i < assigns.size(); i++) {
-            AssignOperator assign = assigns.get(i);
-            for (int j = 0; j < assign.getVariables().size(); j++) {
-                // Check if variables match.
-                if (var != assign.getVariables().get(j)) {
-                    continue;
-                }
-                // Check if corresponding expr is a function call.
-                if (assign.getExpressions().get(j).getValue().getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-                    continue;
-                }
-                simFuncExprRef = assign.getExpressions().get(j);
-                // Analyze function expression and get equivalent similarity check function.
-                simCheckFuncExpr = getSimilarityCheckExpr(normFuncIdent, constVal,
-                        (AbstractFunctionCallExpression) simFuncExprRef.getValue());
-                matchingAssign = assign;
-                break;
-            }
-            if (simCheckFuncExpr != null) {
-                break;
-            }
-        }
-
-        // Only non-null if we found that varRefExpr refers to an optimizable similarity function call.
-        if (simCheckFuncExpr != null) {
-            // Create a new assign under matchingAssign which assigns the result of our similarity-check function to a variable.
-            LogicalVariable newVar = context.newVar();
-            AssignOperator newAssign = new AssignOperator(newVar, new MutableObject<ILogicalExpression>(
-                    simCheckFuncExpr));
-            // Hook up inputs.
-            newAssign.getInputs()
-                    .add(new MutableObject<ILogicalOperator>(matchingAssign.getInputs().get(0).getValue()));
-            matchingAssign.getInputs().get(0).setValue(newAssign);
-
-            // Replace select condition with a get-item on newVar.
-            List<Mutable<ILogicalExpression>> selectGetItemArgs = new ArrayList<Mutable<ILogicalExpression>>();
-            // First arg is a variable reference expr on newVar.
-            selectGetItemArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(newVar)));
-            // Second arg is the item index to be accessed, here 0.
-            selectGetItemArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(
-                    new AsterixConstantValue(new AInt32(0)))));
-            ILogicalExpression selectGetItemExpr = new ScalarFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM), selectGetItemArgs);
-            // Replace the old similarity function call with the new getItemExpr.
-            expRef.setValue(selectGetItemExpr);
-
-            // Replace expr corresponding to original variable in the original assign with a get-item on newVar.
-            List<Mutable<ILogicalExpression>> assignGetItemArgs = new ArrayList<Mutable<ILogicalExpression>>();
-            // First arg is a variable reference expr on newVar.
-            assignGetItemArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(newVar)));
-            // Second arg is the item index to be accessed, here 1.
-            assignGetItemArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(
-                    new AsterixConstantValue(new AInt32(1)))));
-            ILogicalExpression assignGetItemExpr = new ScalarFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM), assignGetItemArgs);
-            // Replace the original assign expr with the get-item expr.
-            simFuncExprRef.setValue(assignGetItemExpr);
-
-            context.computeAndSetTypeEnvironmentForOperator(newAssign);
-            context.computeAndSetTypeEnvironmentForOperator(matchingAssign);
-
-            return true;
-        }
-
-        return false;
-    }
-
-    private boolean replaceWithFunctionCallArg(Mutable<ILogicalExpression> expRef, FunctionIdentifier normFuncIdent,
-            AsterixConstantValue constVal, AbstractFunctionCallExpression funcExpr) throws AlgebricksException {
-        // Analyze func expr to see if it is an optimizable similarity function.
-        ScalarFunctionCallExpression simCheckFuncExpr = getSimilarityCheckExpr(normFuncIdent, constVal, funcExpr);
-
-        // Replace the expr in the select condition.
-        if (simCheckFuncExpr != null) {
-            // Get item 0 from var.
-            List<Mutable<ILogicalExpression>> getItemArgs = new ArrayList<Mutable<ILogicalExpression>>();
-            // First arg is the similarity-check function call.
-            getItemArgs.add(new MutableObject<ILogicalExpression>(simCheckFuncExpr));
-            // Second arg is the item index to be accessed.
-            getItemArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
-                    new AInt32(0)))));
-            ILogicalExpression getItemExpr = new ScalarFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM), getItemArgs);
-            // Replace the old similarity function call with the new getItemExpr.
-            expRef.setValue(getItemExpr);
-            return true;
-        }
-
-        return false;
-    }
-
-    private ScalarFunctionCallExpression getSimilarityCheckExpr(FunctionIdentifier normFuncIdent,
-            AsterixConstantValue constVal, AbstractFunctionCallExpression funcExpr) throws AlgebricksException {
-        // Remember args from original similarity function to add them to the similarity-check function later.
-        ArrayList<Mutable<ILogicalExpression>> similarityArgs = null;
-        ScalarFunctionCallExpression simCheckFuncExpr = null;
-        // Look for jaccard function call, and GE or GT.
-        if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.SIMILARITY_JACCARD) {
-            IAObject jaccThresh;
-            if (normFuncIdent == AlgebricksBuiltinFunctions.GE) {
-                if (constVal.getObject() instanceof AFloat) {
-                    jaccThresh = constVal.getObject();
-                } else {
-                    jaccThresh = new AFloat((float) ((ADouble) constVal.getObject()).getDoubleValue());
-                }
-            } else if (normFuncIdent == AlgebricksBuiltinFunctions.GT) {
-                float threshVal = 0.0f;
-                if (constVal.getObject() instanceof AFloat) {
-                    threshVal = ((AFloat) constVal.getObject()).getFloatValue();
-                } else {
-                    threshVal = (float) ((ADouble) constVal.getObject()).getDoubleValue();
-                }
-                float f = threshVal + Float.MIN_VALUE;
-                if (f > 1.0f)
-                    f = 1.0f;
-                jaccThresh = new AFloat(f);
-            } else {
-                return null;
-            }
-            similarityArgs = new ArrayList<Mutable<ILogicalExpression>>();
-            similarityArgs.addAll(funcExpr.getArguments());
-            similarityArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
-                    jaccThresh))));
-            simCheckFuncExpr = new ScalarFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.SIMILARITY_JACCARD_CHECK), similarityArgs);
-        }
-
-        // Look for edit-distance function call, and LE or LT.
-        if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.EDIT_DISTANCE) {
-            AInt32 aInt = new AInt32(0);
-            try {
-                aInt = (AInt32) ATypeHierarchy.convertNumericTypeObject(constVal.getObject(), ATypeTag.INT32);
-            } catch (AsterixException e) {
-                throw new AlgebricksException(e);
-            }
-
-            AInt32 edThresh;
-            if (normFuncIdent == AlgebricksBuiltinFunctions.LE) {
-                edThresh = aInt;
-            } else if (normFuncIdent == AlgebricksBuiltinFunctions.LT) {
-                int ed = aInt.getIntegerValue() - 1;
-                if (ed < 0)
-                    ed = 0;
-                edThresh = new AInt32(ed);
-            } else {
-                return null;
-            }
-            similarityArgs = new ArrayList<Mutable<ILogicalExpression>>();
-            similarityArgs.addAll(funcExpr.getArguments());
-            similarityArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
-                    edThresh))));
-            simCheckFuncExpr = new ScalarFunctionCallExpression(
-                    FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.EDIT_DISTANCE_CHECK), similarityArgs);
-        }
-        // Preserve all annotations.
-        if (simCheckFuncExpr != null) {
-            simCheckFuncExpr.getAnnotations().putAll(funcExpr.getAnnotations());
-        }
-        return simCheckFuncExpr;
-    }
-
-    private FunctionIdentifier getLhsAndRhsSwappedFuncIdent(FunctionIdentifier oldFuncIdent) {
-        if (oldFuncIdent == AlgebricksBuiltinFunctions.GE) {
-            return AlgebricksBuiltinFunctions.LE;
-        }
-        if (oldFuncIdent == AlgebricksBuiltinFunctions.GT) {
-            return AlgebricksBuiltinFunctions.LT;
-        }
-        if (oldFuncIdent == AlgebricksBuiltinFunctions.LE) {
-            return AlgebricksBuiltinFunctions.GE;
-        }
-        if (oldFuncIdent == AlgebricksBuiltinFunctions.LT) {
-            return AlgebricksBuiltinFunctions.GT;
-        }
-        throw new IllegalStateException();
-    }
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SweepIllegalNonfunctionalFunctions.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SweepIllegalNonfunctionalFunctions.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SweepIllegalNonfunctionalFunctions.java
deleted file mode 100644
index e2f2f4f..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SweepIllegalNonfunctionalFunctions.java
+++ /dev/null
@@ -1,296 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExternalDataLookupOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteResultOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.AbstractExtractExprRule;
-
-public class SweepIllegalNonfunctionalFunctions extends AbstractExtractExprRule implements IAlgebraicRewriteRule {
-
-    private final IllegalNonfunctionalFunctionSweeperOperatorVisitor visitor;
-
-    public SweepIllegalNonfunctionalFunctions() {
-        visitor = new IllegalNonfunctionalFunctionSweeperOperatorVisitor();
-    }
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        ILogicalOperator op = opRef.getValue();
-        if (context.checkIfInDontApplySet(this, op)) {
-            return false;
-        }
-
-        op.accept(visitor, null);
-        context.computeAndSetTypeEnvironmentForOperator(op);
-        context.addToDontApplySet(this, op);
-        return false;
-    }
-
-    private class IllegalNonfunctionalFunctionSweeperOperatorVisitor implements ILogicalOperatorVisitor<Void, Void> {
-
-        private void sweepExpression(ILogicalExpression expr, ILogicalOperator op) throws AlgebricksException {
-            if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-                if (!expr.isFunctional()) {
-                    AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expr;
-                    throw new AlgebricksException("Found non-functional function " + fce.getFunctionIdentifier()
-                            + " in op " + op);
-                }
-            }
-        }
-
-        @Override
-        public Void visitAggregateOperator(AggregateOperator op, Void arg) throws AlgebricksException {
-            for (Mutable<ILogicalExpression> me : op.getExpressions()) {
-                sweepExpression(me.getValue(), op);
-            }
-            List<Mutable<ILogicalExpression>> mergeExprs = op.getMergeExpressions();
-            if (mergeExprs != null) {
-                for (Mutable<ILogicalExpression> me : mergeExprs) {
-                    sweepExpression(me.getValue(), op);
-                }
-            }
-            return null;
-        }
-
-        @Override
-        public Void visitRunningAggregateOperator(RunningAggregateOperator op, Void arg) throws AlgebricksException {
-            for (Mutable<ILogicalExpression> me : op.getExpressions()) {
-                sweepExpression(me.getValue(), op);
-            }
-            return null;
-        }
-
-        @Override
-        public Void visitEmptyTupleSourceOperator(EmptyTupleSourceOperator op, Void arg) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitGroupByOperator(GroupByOperator op, Void arg) throws AlgebricksException {
-            for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : op.getGroupByList()) {
-                sweepExpression(p.second.getValue(), op);
-            }
-            for (Pair<LogicalVariable, Mutable<ILogicalExpression>> p : op.getDecorList()) {
-                sweepExpression(p.second.getValue(), op);
-            }
-            return null;
-        }
-
-        @Override
-        public Void visitLimitOperator(LimitOperator op, Void arg) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitInnerJoinOperator(InnerJoinOperator op, Void arg) throws AlgebricksException {
-            sweepExpression(op.getCondition().getValue(), op);
-            return null;
-        }
-
-        @Override
-        public Void visitLeftOuterJoinOperator(LeftOuterJoinOperator op, Void arg) throws AlgebricksException {
-            sweepExpression(op.getCondition().getValue(), op);
-            return null;
-        }
-
-        @Override
-        public Void visitNestedTupleSourceOperator(NestedTupleSourceOperator op, Void arg) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitOrderOperator(OrderOperator op, Void arg) throws AlgebricksException {
-            for (Pair<IOrder, Mutable<ILogicalExpression>> p : op.getOrderExpressions()) {
-                sweepExpression(p.second.getValue(), op);
-            }
-            return null;
-        }
-
-        @Override
-        public Void visitAssignOperator(AssignOperator op, Void arg) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitSelectOperator(SelectOperator op, Void arg) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitExtensionOperator(ExtensionOperator op, Void arg) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitProjectOperator(ProjectOperator op, Void arg) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitPartitioningSplitOperator(PartitioningSplitOperator op, Void arg) throws AlgebricksException {
-            for (Mutable<ILogicalExpression> expr : op.getExpressions()) {
-                sweepExpression(expr.getValue(), op);
-            }
-            return null;
-        }
-
-        @Override
-        public Void visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitMaterializeOperator(MaterializeOperator op, Void arg) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitScriptOperator(ScriptOperator op, Void arg) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitSubplanOperator(SubplanOperator op, Void arg) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitSinkOperator(SinkOperator op, Void arg) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitUnionOperator(UnionAllOperator op, Void arg) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitUnnestOperator(UnnestOperator op, Void arg) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitUnnestMapOperator(UnnestMapOperator op, Void arg) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitDataScanOperator(DataSourceScanOperator op, Void arg) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitDistinctOperator(DistinctOperator op, Void arg) throws AlgebricksException {
-            for (Mutable<ILogicalExpression> expr : op.getExpressions()) {
-                sweepExpression(expr.getValue(), op);
-            }
-            return null;
-        }
-
-        @Override
-        public Void visitExchangeOperator(ExchangeOperator op, Void arg) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitWriteOperator(WriteOperator op, Void arg) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitDistributeResultOperator(DistributeResultOperator op, Void arg) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitWriteResultOperator(WriteResultOperator op, Void arg) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitInsertDeleteOperator(InsertDeleteOperator op, Void tag) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitIndexInsertDeleteOperator(IndexInsertDeleteOperator op, Void tag) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitTokenizeOperator(TokenizeOperator op, Void tag) throws AlgebricksException {
-            return null;
-        }
-
-        @Override
-        public Void visitExternalDataLookupOperator(ExternalDataLookupOperator op, Void arg) throws AlgebricksException {
-            return null;
-        }
-
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/UnnestToDataScanRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/UnnestToDataScanRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/UnnestToDataScanRule.java
deleted file mode 100644
index 6defc07..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/UnnestToDataScanRule.java
+++ /dev/null
@@ -1,248 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.optimizer.rules;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
-import edu.uci.ics.asterix.common.feeds.FeedActivity.FeedActivityDetails;
-import edu.uci.ics.asterix.common.feeds.api.IFeedLifecycleListener.ConnectionLocation;
-import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
-import edu.uci.ics.asterix.metadata.declared.FeedDataSource;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.Dataverse;
-import edu.uci.ics.asterix.metadata.entities.Feed;
-import edu.uci.ics.asterix.metadata.entities.FeedPolicy;
-import edu.uci.ics.asterix.metadata.feeds.BuiltinFeedPolicies;
-import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.optimizer.rules.util.EquivalenceClassUtils;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IAlgebricksConstantValue;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.FunctionalDependency;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class UnnestToDataScanRule implements IAlgebraicRewriteRule {
-
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
-        if (op.getOperatorTag() != LogicalOperatorTag.UNNEST) {
-            return false;
-        }
-        UnnestOperator unnest = (UnnestOperator) op;
-        ILogicalExpression unnestExpr = unnest.getExpressionRef().getValue();
-
-        if (unnestExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
-            AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) unnestExpr;
-            FunctionIdentifier fid = f.getFunctionIdentifier();
-
-            if (fid.equals(AsterixBuiltinFunctions.DATASET)) {
-                if (unnest.getPositionalVariable() != null) {
-                    // TODO remove this after enabling the support of positional variables in data scan
-                    throw new AlgebricksException("No positional variables are allowed over datasets.");
-                }
-                ILogicalExpression expr = f.getArguments().get(0).getValue();
-                if (expr.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
-                    return false;
-                }
-                ConstantExpression ce = (ConstantExpression) expr;
-                IAlgebricksConstantValue acv = ce.getValue();
-                if (!(acv instanceof AsterixConstantValue)) {
-                    return false;
-                }
-                AsterixConstantValue acv2 = (AsterixConstantValue) acv;
-                if (acv2.getObject().getType().getTypeTag() != ATypeTag.STRING) {
-                    return false;
-                }
-                String datasetArg = ((AString) acv2.getObject()).getStringValue();
-
-                AqlMetadataProvider metadataProvider = (AqlMetadataProvider) context.getMetadataProvider();
-                Pair<String, String> datasetReference = parseDatasetReference(metadataProvider, datasetArg);
-                String dataverseName = datasetReference.first;
-                String datasetName = datasetReference.second;
-                Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
-                if (dataset == null) {
-                    throw new AlgebricksException("Could not find dataset " + datasetName + " in dataverse "
-                            + dataverseName);
-                }
-
-                AqlSourceId asid = new AqlSourceId(dataverseName, datasetName);
-
-                ArrayList<LogicalVariable> v = new ArrayList<LogicalVariable>();
-
-                if (dataset.getDatasetType() == DatasetType.INTERNAL) {
-                    int numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
-                    for (int i = 0; i < numPrimaryKeys; i++) {
-                        v.add(context.newVar());
-                    }
-                }
-                v.add(unnest.getVariable());
-                AqlDataSource dataSource = metadataProvider.findDataSource(asid);
-                DataSourceScanOperator scan = new DataSourceScanOperator(v, dataSource);
-                List<Mutable<ILogicalOperator>> scanInpList = scan.getInputs();
-                scanInpList.addAll(unnest.getInputs());
-                opRef.setValue(scan);
-                addPrimaryKey(v, context);
-                context.computeAndSetTypeEnvironmentForOperator(scan);
-
-                // Adds equivalence classes --- one equivalent class between a primary key
-                // variable and a record field-access expression.
-                IAType[] schemaTypes = dataSource.getSchemaTypes();
-                ARecordType recordType = (ARecordType) schemaTypes[schemaTypes.length - 1];
-                EquivalenceClassUtils.addEquivalenceClassesForPrimaryIndexAccess(scan, v, recordType, dataset, context);
-                return true;
-            }
-
-            if (fid.equals(AsterixBuiltinFunctions.FEED_COLLECT)) {
-                if (unnest.getPositionalVariable() != null) {
-                    throw new AlgebricksException("No positional variables are allowed over datasets.");
-                }
-
-                String dataverse = getStringArgument(f, 0);
-                String sourceFeedName = getStringArgument(f, 1);
-                String getTargetFeed = getStringArgument(f, 2);
-                String subscriptionLocation = getStringArgument(f, 3);
-                String targetDataset = getStringArgument(f, 4);
-                String outputType = getStringArgument(f, 5);
-
-                AqlMetadataProvider metadataProvider = (AqlMetadataProvider) context.getMetadataProvider();
-
-                AqlSourceId asid = new AqlSourceId(dataverse, getTargetFeed);
-                String policyName = metadataProvider.getConfig().get(FeedActivityDetails.FEED_POLICY_NAME);
-                FeedPolicy policy = metadataProvider.findFeedPolicy(dataverse, policyName);
-                if (policy == null) {
-                    policy = BuiltinFeedPolicies.getFeedPolicy(policyName);
-                    if (policy == null) {
-                        throw new AlgebricksException("Unknown feed policy:" + policyName);
-                    }
-                }
-
-                ArrayList<LogicalVariable> v = new ArrayList<LogicalVariable>();
-                v.add(unnest.getVariable());
-
-                String csLocations = metadataProvider.getConfig().get(FeedActivityDetails.COLLECT_LOCATIONS);
-                DataSourceScanOperator scan = new DataSourceScanOperator(v, createFeedDataSource(asid, targetDataset,
-                        sourceFeedName, subscriptionLocation, metadataProvider, policy, outputType, csLocations));
-
-                List<Mutable<ILogicalOperator>> scanInpList = scan.getInputs();
-                scanInpList.addAll(unnest.getInputs());
-                opRef.setValue(scan);
-                addPrimaryKey(v, context);
-                context.computeAndSetTypeEnvironmentForOperator(scan);
-
-                return true;
-            }
-
-        }
-
-        return false;
-    }
-
-    public void addPrimaryKey(List<LogicalVariable> scanVariables, IOptimizationContext context) {
-        int n = scanVariables.size();
-        List<LogicalVariable> head = new ArrayList<LogicalVariable>(scanVariables.subList(0, n - 1));
-        List<LogicalVariable> tail = new ArrayList<LogicalVariable>(1);
-        tail.add(scanVariables.get(n - 1));
-        FunctionalDependency pk = new FunctionalDependency(head, tail);
-        context.addPrimaryKey(pk);
-    }
-
-    private AqlDataSource createFeedDataSource(AqlSourceId aqlId, String targetDataset, String sourceFeedName,
-            String subscriptionLocation, AqlMetadataProvider metadataProvider, FeedPolicy feedPolicy,
-            String outputType, String locations) throws AlgebricksException {
-        if (!aqlId.getDataverseName().equals(
-                metadataProvider.getDefaultDataverse() == null ? null : metadataProvider.getDefaultDataverse()
-                        .getDataverseName())) {
-            return null;
-        }
-        IAType feedOutputType = metadataProvider.findType(aqlId.getDataverseName(), outputType);
-        Feed sourceFeed = metadataProvider.findFeed(aqlId.getDataverseName(), sourceFeedName);
-
-        FeedDataSource feedDataSource = new FeedDataSource(aqlId, targetDataset, feedOutputType,
-                AqlDataSource.AqlDataSourceType.FEED, sourceFeed.getFeedId(), sourceFeed.getFeedType(),
-                ConnectionLocation.valueOf(subscriptionLocation), locations.split(","));
-        feedDataSource.getProperties().put(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY, feedPolicy);
-        return feedDataSource;
-    }
-
-    private Pair<String, String> parseDatasetReference(AqlMetadataProvider metadataProvider, String datasetArg)
-            throws AlgebricksException {
-        String[] datasetNameComponents = datasetArg.split("\\.");
-        String dataverseName;
-        String datasetName;
-        if (datasetNameComponents.length == 1) {
-            Dataverse defaultDataverse = metadataProvider.getDefaultDataverse();
-            if (defaultDataverse == null) {
-                throw new AlgebricksException("Unresolved dataset " + datasetArg + " Dataverse not specified.");
-            }
-            dataverseName = defaultDataverse.getDataverseName();
-            datasetName = datasetNameComponents[0];
-        } else {
-            dataverseName = datasetNameComponents[0];
-            datasetName = datasetNameComponents[1];
-        }
-        return new Pair<String, String>(dataverseName, datasetName);
-    }
-
-    private String getStringArgument(AbstractFunctionCallExpression f, int index) {
-
-        ILogicalExpression expr = f.getArguments().get(index).getValue();
-        if (expr.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
-            return null;
-        }
-        ConstantExpression ce = (ConstantExpression) expr;
-        IAlgebricksConstantValue acv = ce.getValue();
-        if (!(acv instanceof AsterixConstantValue)) {
-            return null;
-        }
-        AsterixConstantValue acv2 = (AsterixConstantValue) acv;
-        if (acv2.getObject().getType().getTypeTag() != ATypeTag.STRING) {
-            return null;
-        }
-        String argument = ((AString) acv2.getObject()).getStringValue();
-        return argument;
-    }
-
-}


[50/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/AsterixOperatorAnnotations.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/AsterixOperatorAnnotations.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/AsterixOperatorAnnotations.java
deleted file mode 100644
index 1ce5b5f..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/AsterixOperatorAnnotations.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.algebra.base;
-
-public interface AsterixOperatorAnnotations {
-    public static final String FIELD_ACCESS = "FIELD_ACCESS";
-    public static final String PUSHED_FIELD_ACCESS = "PUSHED_FIELD_ACCESS";
-    public static final String PUSHED_RUNNABLE_FIELD_ACCESS = "PUSHED_RUNNABLE_FIELD_ACCESS";
-    public static final String FIELD_TYPE = "FIELD_TYPE";
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/LogicalExpressionDeepCopyVisitor.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/LogicalExpressionDeepCopyVisitor.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/LogicalExpressionDeepCopyVisitor.java
deleted file mode 100644
index a6ae0b1..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/LogicalExpressionDeepCopyVisitor.java
+++ /dev/null
@@ -1,149 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.algebra.base;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.Counter;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionAnnotation;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.StatefulFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionVisitor;
-
-public class LogicalExpressionDeepCopyVisitor implements ILogicalExpressionVisitor<ILogicalExpression, Void> {
-    private final Counter counter;
-    private final Map<LogicalVariable, LogicalVariable> inVarMapping;
-    private final Map<LogicalVariable, LogicalVariable> outVarMapping;
-
-    public LogicalExpressionDeepCopyVisitor(Counter counter, Map<LogicalVariable, LogicalVariable> inVarMapping,
-            Map<LogicalVariable, LogicalVariable> variableMapping) {
-        this.counter = counter;
-        this.inVarMapping = inVarMapping;
-        this.outVarMapping = variableMapping;
-    }
-
-    public ILogicalExpression deepCopy(ILogicalExpression expr) throws AlgebricksException {
-        return expr.accept(this, null);
-    }
-
-    private void deepCopyAnnotations(AbstractFunctionCallExpression src, AbstractFunctionCallExpression dest) {
-        Map<Object, IExpressionAnnotation> srcAnnotations = src.getAnnotations();
-        Map<Object, IExpressionAnnotation> destAnnotations = dest.getAnnotations();
-        for (Object k : srcAnnotations.keySet()) {
-            IExpressionAnnotation annotation = srcAnnotations.get(k).copy();
-            destAnnotations.put(k, annotation);
-        }
-    }
-
-    private void deepCopyOpaqueParameters(AbstractFunctionCallExpression src, AbstractFunctionCallExpression dest) {
-        Object[] srcOpaqueParameters = src.getOpaqueParameters();
-        Object[] newOpaqueParameters = null;
-        if (srcOpaqueParameters != null) {
-            newOpaqueParameters = new Object[srcOpaqueParameters.length];
-            for (int i = 0; i < srcOpaqueParameters.length; i++) {
-                newOpaqueParameters[i] = srcOpaqueParameters[i];
-            }
-        }
-        dest.setOpaqueParameters(newOpaqueParameters);
-    }
-
-    public MutableObject<ILogicalExpression> deepCopyExpressionReference(Mutable<ILogicalExpression> exprRef)
-            throws AlgebricksException {
-        return new MutableObject<ILogicalExpression>(deepCopy(exprRef.getValue()));
-    }
-
-    // TODO return List<...>
-    public ArrayList<Mutable<ILogicalExpression>> deepCopyExpressionReferenceList(List<Mutable<ILogicalExpression>> list)
-            throws AlgebricksException {
-        ArrayList<Mutable<ILogicalExpression>> listCopy = new ArrayList<Mutable<ILogicalExpression>>(list.size());
-        for (Mutable<ILogicalExpression> exprRef : list) {
-            listCopy.add(deepCopyExpressionReference(exprRef));
-        }
-        return listCopy;
-    }
-
-    @Override
-    public ILogicalExpression visitAggregateFunctionCallExpression(AggregateFunctionCallExpression expr, Void arg)
-            throws AlgebricksException {
-        AggregateFunctionCallExpression exprCopy = new AggregateFunctionCallExpression(expr.getFunctionInfo(),
-                expr.isTwoStep(), deepCopyExpressionReferenceList(expr.getArguments()));
-        deepCopyAnnotations(expr, exprCopy);
-        deepCopyOpaqueParameters(expr, exprCopy);
-        return exprCopy;
-    }
-
-    @Override
-    public ILogicalExpression visitConstantExpression(ConstantExpression expr, Void arg) throws AlgebricksException {
-        return new ConstantExpression(expr.getValue());
-    }
-
-    @Override
-    public ILogicalExpression visitScalarFunctionCallExpression(ScalarFunctionCallExpression expr, Void arg)
-            throws AlgebricksException {
-        ScalarFunctionCallExpression exprCopy = new ScalarFunctionCallExpression(expr.getFunctionInfo(),
-                deepCopyExpressionReferenceList(expr.getArguments()));
-        deepCopyAnnotations(expr, exprCopy);
-        deepCopyOpaqueParameters(expr, exprCopy);
-        return exprCopy;
-
-    }
-
-    @Override
-    public ILogicalExpression visitStatefulFunctionCallExpression(StatefulFunctionCallExpression expr, Void arg)
-            throws AlgebricksException {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public ILogicalExpression visitUnnestingFunctionCallExpression(UnnestingFunctionCallExpression expr, Void arg)
-            throws AlgebricksException {
-        UnnestingFunctionCallExpression exprCopy = new UnnestingFunctionCallExpression(expr.getFunctionInfo(),
-                deepCopyExpressionReferenceList(expr.getArguments()));
-        deepCopyAnnotations(expr, exprCopy);
-        deepCopyOpaqueParameters(expr, exprCopy);
-        return exprCopy;
-    }
-
-    @Override
-    public ILogicalExpression visitVariableReferenceExpression(VariableReferenceExpression expr, Void arg)
-            throws AlgebricksException {
-        LogicalVariable var = expr.getVariableReference();
-        LogicalVariable givenVarReplacement = inVarMapping.get(var);
-        if (givenVarReplacement != null) {
-            outVarMapping.put(var, givenVarReplacement);
-            return new VariableReferenceExpression(givenVarReplacement);
-        }
-        LogicalVariable varCopy = outVarMapping.get(var);
-        if (varCopy == null) {
-            counter.inc();
-            varCopy = new LogicalVariable(counter.get());
-            outVarMapping.put(var, varCopy);
-        }
-        return new VariableReferenceExpression(varCopy);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/LogicalOperatorDeepCopyVisitor.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/LogicalOperatorDeepCopyVisitor.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/LogicalOperatorDeepCopyVisitor.java
deleted file mode 100644
index cedc962..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/LogicalOperatorDeepCopyVisitor.java
+++ /dev/null
@@ -1,469 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.algebra.base;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.Counter;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExternalDataLookupOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.WriteResultOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.FunctionalDependency;
-import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
-
-public class LogicalOperatorDeepCopyVisitor implements ILogicalOperatorVisitor<ILogicalOperator, ILogicalOperator> {
-    private final Counter counter;
-    private final LogicalExpressionDeepCopyVisitor exprDeepCopyVisitor;
-
-    // Key: Variable in the original plan. Value: New variable replacing the original one in the copied plan.
-    private final Map<LogicalVariable, LogicalVariable> outVarMapping = new HashMap<LogicalVariable, LogicalVariable>();
-
-    // Key: Variable in the original plan. Value: Variable with which to replace original variable in the plan copy.
-    private final Map<LogicalVariable, LogicalVariable> inVarMapping;
-
-    public LogicalOperatorDeepCopyVisitor(Counter counter) {
-        this.counter = counter;
-        this.inVarMapping = Collections.emptyMap();
-        exprDeepCopyVisitor = new LogicalExpressionDeepCopyVisitor(counter, inVarMapping, outVarMapping);
-    }
-
-    /**
-     * @param counter
-     *            Starting variable counter.
-     * @param inVarMapping
-     *            Variable mapping keyed by variables in the original plan.
-     *            Those variables are replaced by their corresponding value in the map in the copied plan.
-     */
-    public LogicalOperatorDeepCopyVisitor(Counter counter, Map<LogicalVariable, LogicalVariable> inVarMapping) {
-        this.counter = counter;
-        this.inVarMapping = inVarMapping;
-        exprDeepCopyVisitor = new LogicalExpressionDeepCopyVisitor(counter, inVarMapping, outVarMapping);
-    }
-
-    private void copyAnnotations(ILogicalOperator src, ILogicalOperator dest) {
-        dest.getAnnotations().putAll(src.getAnnotations());
-    }
-
-    public ILogicalOperator deepCopy(ILogicalOperator op, ILogicalOperator arg) throws AlgebricksException {
-        return op.accept(this, arg);
-    }
-
-    private void deepCopyInputs(ILogicalOperator src, ILogicalOperator dest, ILogicalOperator arg)
-            throws AlgebricksException {
-        List<Mutable<ILogicalOperator>> inputs = src.getInputs();
-        List<Mutable<ILogicalOperator>> inputsCopy = dest.getInputs();
-        for (Mutable<ILogicalOperator> input : inputs) {
-            inputsCopy.add(deepCopyOperatorReference(input, arg));
-        }
-    }
-
-    private Mutable<ILogicalOperator> deepCopyOperatorReference(Mutable<ILogicalOperator> opRef, ILogicalOperator arg)
-            throws AlgebricksException {
-        return new MutableObject<ILogicalOperator>(deepCopy(opRef.getValue(), arg));
-    }
-
-    private List<Mutable<ILogicalOperator>> deepCopyOperatorReferenceList(List<Mutable<ILogicalOperator>> list,
-            ILogicalOperator arg) throws AlgebricksException {
-        List<Mutable<ILogicalOperator>> listCopy = new ArrayList<Mutable<ILogicalOperator>>(list.size());
-        for (Mutable<ILogicalOperator> opRef : list) {
-            listCopy.add(deepCopyOperatorReference(opRef, arg));
-        }
-        return listCopy;
-    }
-
-    private IOrder deepCopyOrder(IOrder order) {
-        switch (order.getKind()) {
-            case ASC:
-            case DESC:
-                return order;
-            case FUNCTIONCALL:
-            default:
-                throw new UnsupportedOperationException();
-        }
-    }
-
-    private List<Pair<IOrder, Mutable<ILogicalExpression>>> deepCopyOrderExpressionReferencePairList(
-            List<Pair<IOrder, Mutable<ILogicalExpression>>> list) throws AlgebricksException {
-        ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>> listCopy = new ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>>(
-                list.size());
-        for (Pair<IOrder, Mutable<ILogicalExpression>> pair : list) {
-            listCopy.add(new Pair<OrderOperator.IOrder, Mutable<ILogicalExpression>>(deepCopyOrder(pair.first),
-                    exprDeepCopyVisitor.deepCopyExpressionReference(pair.second)));
-        }
-        return listCopy;
-    }
-
-    private ILogicalPlan deepCopyPlan(ILogicalPlan plan, ILogicalOperator arg) throws AlgebricksException {
-        List<Mutable<ILogicalOperator>> rootsCopy = deepCopyOperatorReferenceList(plan.getRoots(), arg);
-        ILogicalPlan planCopy = new ALogicalPlanImpl(rootsCopy);
-        return planCopy;
-    }
-
-    private List<ILogicalPlan> deepCopyPlanList(List<ILogicalPlan> list, List<ILogicalPlan> listCopy,
-            ILogicalOperator arg) throws AlgebricksException {
-        for (ILogicalPlan plan : list) {
-            listCopy.add(deepCopyPlan(plan, arg));
-        }
-        return listCopy;
-    }
-
-    private LogicalVariable deepCopyVariable(LogicalVariable var) {
-        if (var == null) {
-            return null;
-        }
-        LogicalVariable givenVarReplacement = inVarMapping.get(var);
-        if (givenVarReplacement != null) {
-            outVarMapping.put(var, givenVarReplacement);
-            return givenVarReplacement;
-        }
-        LogicalVariable varCopy = outVarMapping.get(var);
-        if (varCopy == null) {
-            counter.inc();
-            varCopy = new LogicalVariable(counter.get());
-            outVarMapping.put(var, varCopy);
-        }
-        return varCopy;
-    }
-
-    private List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> deepCopyVariableExpressionReferencePairList(
-            List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> list) throws AlgebricksException {
-        List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> listCopy = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>(
-                list.size());
-        for (Pair<LogicalVariable, Mutable<ILogicalExpression>> pair : list) {
-            listCopy.add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(deepCopyVariable(pair.first),
-                    exprDeepCopyVisitor.deepCopyExpressionReference(pair.second)));
-        }
-        return listCopy;
-    }
-
-    // TODO return List<...>
-    private ArrayList<LogicalVariable> deepCopyVariableList(List<LogicalVariable> list) {
-        ArrayList<LogicalVariable> listCopy = new ArrayList<LogicalVariable>(list.size());
-        for (LogicalVariable var : list) {
-            listCopy.add(deepCopyVariable(var));
-        }
-        return listCopy;
-    }
-
-    public void reset() {
-        outVarMapping.clear();
-    }
-
-    public void updatePrimaryKeys(IOptimizationContext context) {
-        for (Map.Entry<LogicalVariable, LogicalVariable> entry : outVarMapping.entrySet()) {
-            List<LogicalVariable> primaryKey = context.findPrimaryKey(entry.getKey());
-            if (primaryKey != null) {
-                List<LogicalVariable> head = new ArrayList<LogicalVariable>();
-                for (LogicalVariable variable : primaryKey) {
-                    head.add(outVarMapping.get(variable));
-                }
-                List<LogicalVariable> tail = new ArrayList<LogicalVariable>(1);
-                tail.add(entry.getValue());
-                context.addPrimaryKey(new FunctionalDependency(head, tail));
-            }
-        }
-    }
-
-    public LogicalVariable varCopy(LogicalVariable var) throws AlgebricksException {
-        return outVarMapping.get(var);
-    }
-
-    @Override
-    public ILogicalOperator visitAggregateOperator(AggregateOperator op, ILogicalOperator arg)
-            throws AlgebricksException {
-        AggregateOperator opCopy = new AggregateOperator(deepCopyVariableList(op.getVariables()),
-                exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getExpressions()));
-        deepCopyInputs(op, opCopy, arg);
-        copyAnnotations(op, opCopy);
-        opCopy.setExecutionMode(op.getExecutionMode());
-        return opCopy;
-    }
-
-    @Override
-    public ILogicalOperator visitAssignOperator(AssignOperator op, ILogicalOperator arg) throws AlgebricksException {
-        AssignOperator opCopy = new AssignOperator(deepCopyVariableList(op.getVariables()),
-                exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getExpressions()));
-        deepCopyInputs(op, opCopy, arg);
-        copyAnnotations(op, opCopy);
-        opCopy.setExecutionMode(op.getExecutionMode());
-        return opCopy;
-    }
-
-    @Override
-    public ILogicalOperator visitDataScanOperator(DataSourceScanOperator op, ILogicalOperator arg)
-            throws AlgebricksException {
-        DataSourceScanOperator opCopy = new DataSourceScanOperator(deepCopyVariableList(op.getVariables()),
-                op.getDataSource());
-        deepCopyInputs(op, opCopy, arg);
-        copyAnnotations(op, opCopy);
-        opCopy.setExecutionMode(op.getExecutionMode());
-        return opCopy;
-    }
-
-    @Override
-    public ILogicalOperator visitDistinctOperator(DistinctOperator op, ILogicalOperator arg) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public ILogicalOperator visitEmptyTupleSourceOperator(EmptyTupleSourceOperator op, ILogicalOperator arg) {
-        EmptyTupleSourceOperator opCopy = new EmptyTupleSourceOperator();
-        opCopy.setExecutionMode(op.getExecutionMode());
-        return opCopy;
-    }
-
-    @Override
-    public ILogicalOperator visitExchangeOperator(ExchangeOperator op, ILogicalOperator arg) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public ILogicalOperator visitGroupByOperator(GroupByOperator op, ILogicalOperator arg) throws AlgebricksException {
-        List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> groupByListCopy = deepCopyVariableExpressionReferencePairList(op
-                .getGroupByList());
-        List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorListCopy = deepCopyVariableExpressionReferencePairList(op
-                .getDecorList());
-        List<ILogicalPlan> nestedPlansCopy = new ArrayList<ILogicalPlan>();
-
-        GroupByOperator opCopy = new GroupByOperator(groupByListCopy, decorListCopy, nestedPlansCopy);
-        deepCopyPlanList(op.getNestedPlans(), nestedPlansCopy, opCopy);
-        deepCopyInputs(op, opCopy, arg);
-        copyAnnotations(op, opCopy);
-        opCopy.setExecutionMode(op.getExecutionMode());
-        return opCopy;
-    }
-
-    @Override
-    public ILogicalOperator visitInnerJoinOperator(InnerJoinOperator op, ILogicalOperator arg)
-            throws AlgebricksException {
-        InnerJoinOperator opCopy = new InnerJoinOperator(exprDeepCopyVisitor.deepCopyExpressionReference(op
-                .getCondition()), deepCopyOperatorReference(op.getInputs().get(0), null), deepCopyOperatorReference(op
-                .getInputs().get(1), null));
-        copyAnnotations(op, opCopy);
-        opCopy.setExecutionMode(op.getExecutionMode());
-        return opCopy;
-    }
-
-    @Override
-    public ILogicalOperator visitLeftOuterJoinOperator(LeftOuterJoinOperator op, ILogicalOperator arg) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public ILogicalOperator visitLimitOperator(LimitOperator op, ILogicalOperator arg) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public ILogicalOperator visitNestedTupleSourceOperator(NestedTupleSourceOperator op, ILogicalOperator arg)
-            throws AlgebricksException {
-        NestedTupleSourceOperator opCopy = new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(arg));
-        deepCopyInputs(op, opCopy, arg);
-        copyAnnotations(op, opCopy);
-        opCopy.setExecutionMode(op.getExecutionMode());
-        return opCopy;
-    }
-
-    @Override
-    public ILogicalOperator visitOrderOperator(OrderOperator op, ILogicalOperator arg) throws AlgebricksException {
-        OrderOperator opCopy = new OrderOperator(deepCopyOrderExpressionReferencePairList(op.getOrderExpressions()));
-        deepCopyInputs(op, opCopy, arg);
-        copyAnnotations(op, opCopy);
-        opCopy.setExecutionMode(op.getExecutionMode());
-        return opCopy;
-    }
-
-    @Override
-    public ILogicalOperator visitPartitioningSplitOperator(PartitioningSplitOperator op, ILogicalOperator arg) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public ILogicalOperator visitProjectOperator(ProjectOperator op, ILogicalOperator arg) throws AlgebricksException {
-        ProjectOperator opCopy = new ProjectOperator(deepCopyVariableList(op.getVariables()));
-        deepCopyInputs(op, opCopy, arg);
-        copyAnnotations(op, opCopy);
-        opCopy.setExecutionMode(op.getExecutionMode());
-        return opCopy;
-    }
-
-    @Override
-    public ILogicalOperator visitReplicateOperator(ReplicateOperator op, ILogicalOperator arg)
-            throws AlgebricksException {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public ILogicalOperator visitMaterializeOperator(MaterializeOperator op, ILogicalOperator arg)
-            throws AlgebricksException {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public ILogicalOperator visitRunningAggregateOperator(RunningAggregateOperator op, ILogicalOperator arg) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public ILogicalOperator visitScriptOperator(ScriptOperator op, ILogicalOperator arg) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public ILogicalOperator visitSelectOperator(SelectOperator op, ILogicalOperator arg) throws AlgebricksException {
-        SelectOperator opCopy = new SelectOperator(exprDeepCopyVisitor.deepCopyExpressionReference(op.getCondition()),
-                op.getRetainNull(), deepCopyVariable(op.getNullPlaceholderVariable()));
-        deepCopyInputs(op, opCopy, arg);
-        copyAnnotations(op, opCopy);
-        opCopy.setExecutionMode(op.getExecutionMode());
-        return opCopy;
-    }
-
-    @Override
-    public ILogicalOperator visitSubplanOperator(SubplanOperator op, ILogicalOperator arg) throws AlgebricksException {
-        List<ILogicalPlan> nestedPlansCopy = new ArrayList<ILogicalPlan>();
-
-        SubplanOperator opCopy = new SubplanOperator(nestedPlansCopy);
-        deepCopyPlanList(op.getNestedPlans(), nestedPlansCopy, opCopy);
-        deepCopyInputs(op, opCopy, arg);
-        copyAnnotations(op, opCopy);
-        opCopy.setExecutionMode(op.getExecutionMode());
-        return opCopy;
-    }
-
-    @Override
-    public ILogicalOperator visitUnionOperator(UnionAllOperator op, ILogicalOperator arg) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public ILogicalOperator visitUnnestMapOperator(UnnestMapOperator op, ILogicalOperator arg)
-            throws AlgebricksException {
-        UnnestMapOperator opCopy = new UnnestMapOperator(deepCopyVariableList(op.getVariables()),
-                exprDeepCopyVisitor.deepCopyExpressionReference(op.getExpressionRef()), op.getVariableTypes(),
-                op.propagatesInput());
-        deepCopyInputs(op, opCopy, arg);
-        copyAnnotations(op, opCopy);
-        opCopy.setExecutionMode(op.getExecutionMode());
-        return opCopy;
-    }
-
-    @Override
-    public ILogicalOperator visitUnnestOperator(UnnestOperator op, ILogicalOperator arg) throws AlgebricksException {
-        UnnestOperator opCopy = new UnnestOperator(deepCopyVariable(op.getVariable()),
-                exprDeepCopyVisitor.deepCopyExpressionReference(op.getExpressionRef()),
-                deepCopyVariable(op.getPositionalVariable()), op.getPositionalVariableType(), op.getPositionWriter());
-        deepCopyInputs(op, opCopy, arg);
-        copyAnnotations(op, opCopy);
-        opCopy.setExecutionMode(op.getExecutionMode());
-        return opCopy;
-    }
-
-    @Override
-    public ILogicalOperator visitWriteOperator(WriteOperator op, ILogicalOperator arg) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public ILogicalOperator visitDistributeResultOperator(DistributeResultOperator op, ILogicalOperator arg) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public ILogicalOperator visitWriteResultOperator(WriteResultOperator op, ILogicalOperator arg) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public ILogicalOperator visitInsertDeleteOperator(InsertDeleteOperator op, ILogicalOperator arg) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public ILogicalOperator visitIndexInsertDeleteOperator(IndexInsertDeleteOperator op, ILogicalOperator arg) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public ILogicalOperator visitTokenizeOperator(TokenizeOperator op, ILogicalOperator arg) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public ILogicalOperator visitSinkOperator(SinkOperator op, ILogicalOperator arg) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public ILogicalOperator visitExtensionOperator(ExtensionOperator op, ILogicalOperator arg)
-            throws AlgebricksException {
-        throw new UnsupportedOperationException();
-    }
-
-    public Map<LogicalVariable, LogicalVariable> getVariableMapping() {
-        return outVarMapping;
-    }
-
-    @Override
-    public ILogicalOperator visitExternalDataLookupOperator(ExternalDataLookupOperator op, ILogicalOperator arg)
-            throws AlgebricksException {
-        throw new UnsupportedOperationException();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/CommitOperator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/CommitOperator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/CommitOperator.java
deleted file mode 100644
index d65c67d..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/CommitOperator.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.algebra.operators;
-
-import java.util.Collection;
-import java.util.List;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractExtensibleLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorExtension;
-import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
-
-public class CommitOperator extends AbstractExtensibleLogicalOperator {
-
-    private final List<LogicalVariable> primaryKeyLogicalVars;
-
-    public CommitOperator(List<LogicalVariable> primaryKeyLogicalVars) {
-        this.primaryKeyLogicalVars = primaryKeyLogicalVars;
-    }
-
-    @Override
-    public boolean isMap() {
-        // TODO Auto-generated method stub
-        return false;
-    }
-
-    @Override
-    public IOperatorExtension newInstance() {
-        return new CommitOperator(primaryKeyLogicalVars);
-    }
-
-    @Override
-    public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform transform) throws AlgebricksException {
-        // TODO Auto-generated method stub
-        return false;
-    }
-
-    @Override
-    public String toString() {
-        return "commit";
-    }
-
-    @Override
-    public void getUsedVariables(Collection<LogicalVariable> usedVars) {
-        usedVars.addAll(primaryKeyLogicalVars);
-    }
-
-    @Override
-    public void getProducedVariables(Collection<LogicalVariable> producedVars) {
-        // No produced variables.
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/BTreeSearchPOperator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/BTreeSearchPOperator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/BTreeSearchPOperator.java
deleted file mode 100644
index 5a4b52a..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/BTreeSearchPOperator.java
+++ /dev/null
@@ -1,161 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.algebra.operators.physical;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.optimizer.rules.am.BTreeJobGenParams;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.ListSet;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder.OrderKind;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.BroadcastPartitioningProperty;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.ILocalStructuralProperty;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.LocalOrderProperty;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.OrderColumn;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.UnorderedPartitionedProperty;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-
-/**
- * Contributes the runtime operator for an unnest-map representing a BTree search.
- */
-public class BTreeSearchPOperator extends IndexSearchPOperator {
-
-    private final List<LogicalVariable> lowKeyVarList;
-    private final List<LogicalVariable> highKeyVarList;
-    private final boolean isPrimaryIndex;
-    private final boolean isEqCondition;
-    private Object implConfig;
-
-    public BTreeSearchPOperator(IDataSourceIndex<String, AqlSourceId> idx, boolean requiresBroadcast,
-            boolean isPrimaryIndex, boolean isEqCondition, List<LogicalVariable> lowKeyVarList,
-            List<LogicalVariable> highKeyVarList) {
-        super(idx, requiresBroadcast);
-        this.isPrimaryIndex = isPrimaryIndex;
-        this.isEqCondition = isEqCondition;
-        this.lowKeyVarList = lowKeyVarList;
-        this.highKeyVarList = highKeyVarList;
-    }
-
-    public void setImplConfig(Object implConfig) {
-        this.implConfig = implConfig;
-    }
-
-    public Object getImplConfig() {
-        return implConfig;
-    }
-
-    @Override
-    public PhysicalOperatorTag getOperatorTag() {
-        return PhysicalOperatorTag.BTREE_SEARCH;
-    }
-
-    @Override
-    public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
-            IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
-            throws AlgebricksException {
-        UnnestMapOperator unnestMap = (UnnestMapOperator) op;
-        ILogicalExpression unnestExpr = unnestMap.getExpressionRef().getValue();
-        if (unnestExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            throw new IllegalStateException();
-        }
-        AbstractFunctionCallExpression unnestFuncExpr = (AbstractFunctionCallExpression) unnestExpr;
-        FunctionIdentifier funcIdent = unnestFuncExpr.getFunctionIdentifier();
-        if (!funcIdent.equals(AsterixBuiltinFunctions.INDEX_SEARCH)) {
-            return;
-        }
-        BTreeJobGenParams jobGenParams = new BTreeJobGenParams();
-        jobGenParams.readFromFuncArgs(unnestFuncExpr.getArguments());
-        int[] lowKeyIndexes = getKeyIndexes(jobGenParams.getLowKeyVarList(), inputSchemas);
-        int[] highKeyIndexes = getKeyIndexes(jobGenParams.getHighKeyVarList(), inputSchemas);
-
-        int[] minFilterFieldIndexes = getKeyIndexes(unnestMap.getMinFilterVars(), inputSchemas);
-        int[] maxFilterFieldIndexes = getKeyIndexes(unnestMap.getMaxFilterVars(), inputSchemas);
-
-        AqlMetadataProvider metadataProvider = (AqlMetadataProvider) context.getMetadataProvider();
-        Dataset dataset = metadataProvider.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
-        IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op);
-        List<LogicalVariable> outputVars = unnestMap.getVariables();
-        if (jobGenParams.getRetainInput()) {
-            outputVars = new ArrayList<LogicalVariable>();
-            VariableUtilities.getLiveVariables(unnestMap, outputVars);
-        }
-        Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> btreeSearch = metadataProvider.buildBtreeRuntime(
-                builder.getJobSpec(), outputVars, opSchema, typeEnv, context, jobGenParams.getRetainInput(),
-                jobGenParams.getRetainNull(), dataset, jobGenParams.getIndexName(), lowKeyIndexes, highKeyIndexes,
-                jobGenParams.isLowKeyInclusive(), jobGenParams.isHighKeyInclusive(), implConfig, minFilterFieldIndexes,
-                maxFilterFieldIndexes);
-
-        builder.contributeHyracksOperator(unnestMap, btreeSearch.first);
-        builder.contributeAlgebricksPartitionConstraint(btreeSearch.first, btreeSearch.second);
-
-        ILogicalOperator srcExchange = unnestMap.getInputs().get(0).getValue();
-        builder.contributeGraphEdge(srcExchange, 0, unnestMap, 0);
-    }
-
-    @Override
-    public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
-            IPhysicalPropertiesVector reqdByParent) {
-        if (requiresBroadcast) {
-            // For primary indexes optimizing an equality condition we can reduce the broadcast requirement to hash partitioning.
-            if (isPrimaryIndex && isEqCondition) {
-                StructuralPropertiesVector[] pv = new StructuralPropertiesVector[1];
-                ListSet<LogicalVariable> searchKeyVars = new ListSet<LogicalVariable>();
-                searchKeyVars.addAll(lowKeyVarList);
-                searchKeyVars.addAll(highKeyVarList);
-                // Also, add a local sorting property to enforce a sort before the primary-index operator.
-                List<ILocalStructuralProperty> propsLocal = new ArrayList<ILocalStructuralProperty>();
-                List<OrderColumn> orderColumns = new ArrayList<OrderColumn>();
-                for (LogicalVariable orderVar : searchKeyVars) {
-                    orderColumns.add(new OrderColumn(orderVar, OrderKind.ASC));
-                }
-                propsLocal.add(new LocalOrderProperty(orderColumns));
-                pv[0] = new StructuralPropertiesVector(new UnorderedPartitionedProperty(searchKeyVars, null),
-                        propsLocal);
-                return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
-            } else {
-                StructuralPropertiesVector[] pv = new StructuralPropertiesVector[1];
-                pv[0] = new StructuralPropertiesVector(new BroadcastPartitioningProperty(null), null);
-                return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
-            }
-        } else {
-            return super.getRequiredPropertiesForChildren(op, reqdByParent);
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitPOperator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitPOperator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitPOperator.java
deleted file mode 100644
index 6722cbe..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitPOperator.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.algebra.operators.physical;
-
-import java.util.List;
-
-import edu.uci.ics.asterix.common.transactions.JobId;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.AbstractPhysicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenHelper;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-
-public class CommitPOperator extends AbstractPhysicalOperator {
-
-    private final List<LogicalVariable> primaryKeyLogicalVars;
-    private final JobId jobId;
-    private final int datasetId;
-
-    public CommitPOperator(JobId jobId, int datasetId, List<LogicalVariable> primaryKeyLogicalVars) {
-        this.jobId = jobId;
-        this.datasetId = datasetId;
-        this.primaryKeyLogicalVars = primaryKeyLogicalVars;
-    }
-
-    @Override
-    public PhysicalOperatorTag getOperatorTag() {
-        return PhysicalOperatorTag.EXTENSION_OPERATOR;
-    }
-
-    @Override
-    public String toString() {
-        return "COMMIT";
-    }
-
-    @Override
-    public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
-            IPhysicalPropertiesVector reqdByParent) {
-        return emptyUnaryRequirements();
-    }
-
-    @Override
-    public void computeDeliveredProperties(ILogicalOperator op, IOptimizationContext context)
-            throws AlgebricksException {
-        AbstractLogicalOperator op2 = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
-        deliveredProperties = op2.getDeliveredPhysicalProperties().clone();
-    }
-
-    @Override
-    public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
-            IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
-            throws AlgebricksException {
-
-        RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema,
-                context);
-        int[] primaryKeyFields = JobGenHelper.variablesToFieldIndexes(primaryKeyLogicalVars, inputSchemas[0]);
-
-        AqlMetadataProvider metadataProvider = (AqlMetadataProvider) context.getMetadataProvider();
-        CommitRuntimeFactory runtime = new CommitRuntimeFactory(jobId, datasetId, primaryKeyFields,
-                metadataProvider.isTemporaryDatasetWriteJob(), metadataProvider.isWriteTransaction());
-        builder.contributeMicroOperator(op, runtime, recDesc);
-        ILogicalOperator src = op.getInputs().get(0).getValue();
-        builder.contributeGraphEdge(src, 0, op, 0);
-    }
-
-    @Override
-    public boolean isMicroOperator() {
-        return true;
-    }
-
-    @Override
-    public boolean expensiveThanMaterialization() {
-        return false;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitRuntime.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitRuntime.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitRuntime.java
deleted file mode 100644
index 69c8f78..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitRuntime.java
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.algebra.operators.physical;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.asterix.common.api.IAsterixAppRuntimeContext;
-import edu.uci.ics.asterix.common.exceptions.ACIDException;
-import edu.uci.ics.asterix.common.transactions.ILogManager;
-import edu.uci.ics.asterix.common.transactions.ITransactionContext;
-import edu.uci.ics.asterix.common.transactions.ITransactionManager;
-import edu.uci.ics.asterix.common.transactions.JobId;
-import edu.uci.ics.asterix.common.transactions.LogRecord;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntime;
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.bloomfilter.impls.MurmurHash128Bit;
-
-public class CommitRuntime implements IPushRuntime {
-
-    private final static long SEED = 0L;
-
-    private final IHyracksTaskContext hyracksTaskCtx;
-    private final ITransactionManager transactionManager;
-    private final ILogManager logMgr;
-    private final JobId jobId;
-    private final int datasetId;
-    private final int[] primaryKeyFields;
-    private final boolean isTemporaryDatasetWriteJob;
-    private final boolean isWriteTransaction;
-    private final long[] longHashes;
-    private final LogRecord logRecord;
-
-    private ITransactionContext transactionContext;
-    private FrameTupleAccessor frameTupleAccessor;
-    private final FrameTupleReference frameTupleReference;
-
-    public CommitRuntime(IHyracksTaskContext ctx, JobId jobId, int datasetId, int[] primaryKeyFields,
-            boolean isTemporaryDatasetWriteJob, boolean isWriteTransaction) {
-        this.hyracksTaskCtx = ctx;
-        IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
-                .getApplicationContext().getApplicationObject();
-        this.transactionManager = runtimeCtx.getTransactionSubsystem().getTransactionManager();
-        this.logMgr = runtimeCtx.getTransactionSubsystem().getLogManager();
-        this.jobId = jobId;
-        this.datasetId = datasetId;
-        this.primaryKeyFields = primaryKeyFields;
-        this.frameTupleReference = new FrameTupleReference();
-        this.isTemporaryDatasetWriteJob = isTemporaryDatasetWriteJob;
-        this.isWriteTransaction = isWriteTransaction;
-        this.longHashes = new long[2];
-        this.logRecord = new LogRecord();
-    }
-
-    @Override
-    public void open() throws HyracksDataException {
-        try {
-            transactionContext = transactionManager.getTransactionContext(jobId, false);
-            transactionContext.setWriteTxn(isWriteTransaction);
-        } catch (ACIDException e) {
-            throw new HyracksDataException(e);
-        }
-    }
-
-    @Override
-    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        int pkHash = 0;
-        frameTupleAccessor.reset(buffer);
-        int nTuple = frameTupleAccessor.getTupleCount();
-        for (int t = 0; t < nTuple; t++) {
-            if (isTemporaryDatasetWriteJob) {
-                /**
-                 * This "if branch" is for writes over temporary datasets.
-                 * A temporary dataset does not require any lock and does not generate any write-ahead
-                 * update and commit log but generates flush log and job commit log.
-                 * However, a temporary dataset still MUST guarantee no-steal policy so that this
-                 * notification call should be delivered to PrimaryIndexOptracker and used correctly in order
-                 * to decrement number of active operation count of PrimaryIndexOptracker.
-                 * By maintaining the count correctly and only allowing flushing when the count is 0, it can
-                 * guarantee the no-steal policy for temporary datasets, too.
-                 */
-                transactionContext.notifyOptracker(false);
-            } else {
-                frameTupleReference.reset(frameTupleAccessor, t);
-                pkHash = computePrimaryKeyHashValue(frameTupleReference, primaryKeyFields);
-                logRecord.formEntityCommitLogRecord(transactionContext, datasetId, pkHash, frameTupleReference,
-                        primaryKeyFields);
-                try {
-                    logMgr.log(logRecord);
-                } catch (ACIDException e) {
-                    throw new HyracksDataException(e);
-                }
-            }
-        }
-    }
-
-    private int computePrimaryKeyHashValue(ITupleReference tuple, int[] primaryKeyFields) {
-        MurmurHash128Bit.hash3_x64_128(tuple, primaryKeyFields, SEED, longHashes);
-        return Math.abs((int) longHashes[0]);
-    }
-
-    @Override
-    public void fail() throws HyracksDataException {
-
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-
-    }
-
-    @Override
-    public void setFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
-        throw new IllegalStateException();
-    }
-
-    @Override
-    public void setInputRecordDescriptor(int index, RecordDescriptor recordDescriptor) {
-        this.frameTupleAccessor = new FrameTupleAccessor(recordDescriptor);
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitRuntimeFactory.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitRuntimeFactory.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitRuntimeFactory.java
deleted file mode 100644
index 768cdb6..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/CommitRuntimeFactory.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.asterix.algebra.operators.physical;
-
-import edu.uci.ics.asterix.common.transactions.JobId;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntime;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-
-public class CommitRuntimeFactory implements IPushRuntimeFactory {
-
-    private static final long serialVersionUID = 1L;
-
-    private final JobId jobId;
-    private final int datasetId;
-    private final int[] primaryKeyFields;
-    private final boolean isTemporaryDatasetWriteJob;
-    private final boolean isWriteTransaction;
-
-    public CommitRuntimeFactory(JobId jobId, int datasetId, int[] primaryKeyFields, boolean isTemporaryDatasetWriteJob,
-            boolean isWriteTransaction) {
-        this.jobId = jobId;
-        this.datasetId = datasetId;
-        this.primaryKeyFields = primaryKeyFields;
-        this.isTemporaryDatasetWriteJob = isTemporaryDatasetWriteJob;
-        this.isWriteTransaction = isWriteTransaction;
-    }
-
-    @Override
-    public String toString() {
-        return "commit";
-    }
-
-    @Override
-    public IPushRuntime createPushRuntime(IHyracksTaskContext ctx) throws AlgebricksException {
-        return new CommitRuntime(ctx, jobId, datasetId, primaryKeyFields, isTemporaryDatasetWriteJob,
-                isWriteTransaction);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/ExternalDataLookupPOperator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/ExternalDataLookupPOperator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/ExternalDataLookupPOperator.java
deleted file mode 100644
index dcbc70c..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/ExternalDataLookupPOperator.java
+++ /dev/null
@@ -1,180 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.algebra.operators.physical;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.asterix.external.indexing.dataflow.HDFSLookupAdapterFactory;
-import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
-import edu.uci.ics.asterix.metadata.declared.DatasetDataSource;
-import edu.uci.ics.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
-import edu.uci.ics.asterix.metadata.entities.Dataset;
-import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourcePropertiesProvider;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExternalDataLookupOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.AbstractScanPOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.BroadcastPartitioningProperty;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-
-public class ExternalDataLookupPOperator extends AbstractScanPOperator {
-
-    private final List<LogicalVariable> ridVarList;
-    private AqlSourceId datasetId;
-    private Dataset dataset;
-    private ARecordType recordType;
-    private Index secondaryIndex;
-    private boolean requiresBroadcast;
-    private boolean retainInput;
-    private boolean retainNull;
-
-    public ExternalDataLookupPOperator(AqlSourceId datasetId, Dataset dataset, ARecordType recordType,
-            Index secondaryIndex, List<LogicalVariable> ridVarList, boolean requiresBroadcast, boolean retainInput, boolean retainNull) {
-        this.datasetId = datasetId;
-        this.dataset = dataset;
-        this.recordType = recordType;
-        this.secondaryIndex = secondaryIndex;
-        this.ridVarList = ridVarList;
-        this.requiresBroadcast = requiresBroadcast;
-        this.retainInput = retainInput;
-        this.retainNull = retainNull;
-    }
-
-    public Dataset getDataset() {
-        return dataset;
-    }
-
-    public void setDataset(Dataset dataset) {
-        this.dataset = dataset;
-    }
-
-    public ARecordType getRecordType() {
-        return recordType;
-    }
-
-    public void setRecordType(ARecordType recordType) {
-        this.recordType = recordType;
-    }
-
-    public AqlSourceId getDatasetId() {
-        return datasetId;
-    }
-
-    public void setDatasetId(AqlSourceId datasetId) {
-        this.datasetId = datasetId;
-    }
-
-    @Override
-    public PhysicalOperatorTag getOperatorTag() {
-        return PhysicalOperatorTag.EXTERNAL_LOOKUP;
-    }
-
-    @Override
-    public void computeDeliveredProperties(ILogicalOperator op, IOptimizationContext context)
-            throws AlgebricksException {
-        AqlDataSource ds = new DatasetDataSource(datasetId, datasetId.getDataverseName(), datasetId.getDatasourceName(),
-                recordType, AqlDataSourceType.EXTERNAL_DATASET);
-        IDataSourcePropertiesProvider dspp = ds.getPropertiesProvider();
-        AbstractScanOperator as = (AbstractScanOperator) op;
-        deliveredProperties = dspp.computePropertiesVector(as.getVariables());
-    }
-
-    @Override
-    public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
-            IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
-            throws AlgebricksException {
-        ExternalDataLookupOperator edabro = (ExternalDataLookupOperator) op;
-        ILogicalExpression expr = edabro.getExpressionRef().getValue();
-        if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
-            throw new IllegalStateException();
-        }
-        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
-        FunctionIdentifier funcIdent = funcExpr.getFunctionIdentifier();
-        if (!funcIdent.equals(AsterixBuiltinFunctions.EXTERNAL_LOOKUP)) {
-            return;
-        }
-        int[] ridIndexes = getKeyIndexes(ridVarList, inputSchemas);
-        IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op);
-        List<LogicalVariable> outputVars = new ArrayList<LogicalVariable>();
-        if (retainInput) {
-            VariableUtilities.getLiveVariables(edabro, outputVars);
-        } else {
-            VariableUtilities.getProducedVariables(edabro, outputVars);
-        }
-
-        AqlMetadataProvider metadataProvider = (AqlMetadataProvider) context.getMetadataProvider();
-        Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> externalLoopup = HDFSLookupAdapterFactory
-                .buildExternalDataLookupRuntime(builder.getJobSpec(), dataset, secondaryIndex, ridIndexes, retainInput,
-                        typeEnv, outputVars, opSchema, context, metadataProvider, retainNull);
-        builder.contributeHyracksOperator(edabro, externalLoopup.first);
-        builder.contributeAlgebricksPartitionConstraint(externalLoopup.first, externalLoopup.second);
-        ILogicalOperator srcExchange = edabro.getInputs().get(0).getValue();
-        builder.contributeGraphEdge(srcExchange, 0, edabro, 0);
-    }
-
-    protected int[] getKeyIndexes(List<LogicalVariable> keyVarList, IOperatorSchema[] inputSchemas) {
-        if (keyVarList == null) {
-            return null;
-        }
-        int[] keyIndexes = new int[keyVarList.size()];
-        for (int i = 0; i < keyVarList.size(); i++) {
-            keyIndexes[i] = inputSchemas[0].findVariable(keyVarList.get(i));
-        }
-        return keyIndexes;
-    }
-
-    public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
-            IPhysicalPropertiesVector reqdByParent) {
-        if (requiresBroadcast) {
-            StructuralPropertiesVector[] pv = new StructuralPropertiesVector[1];
-            pv[0] = new StructuralPropertiesVector(new BroadcastPartitioningProperty(null), null);
-            return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
-
-        } else {
-            return super.getRequiredPropertiesForChildren(op, reqdByParent);
-        }
-    }
-
-    @Override
-    public boolean isMicroOperator() {
-        return false;
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/IndexSearchPOperator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/IndexSearchPOperator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/IndexSearchPOperator.java
deleted file mode 100644
index 1793407..0000000
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/operators/physical/IndexSearchPOperator.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.algebra.operators.physical;
-
-import java.util.List;
-
-import edu.uci.ics.asterix.metadata.declared.AqlSourceId;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSource;
-import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex;
-import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSourcePropertiesProvider;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.AbstractScanPOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.BroadcastPartitioningProperty;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;
-
-/**
- * Class that embodies the commonalities between access method physical operators.
- */
-public abstract class IndexSearchPOperator extends AbstractScanPOperator {
-
-    protected final IDataSourceIndex<String, AqlSourceId> idx;
-    protected final boolean requiresBroadcast;
-
-    public IndexSearchPOperator(IDataSourceIndex<String, AqlSourceId> idx, boolean requiresBroadcast) {
-        this.idx = idx;
-        this.requiresBroadcast = requiresBroadcast;
-    }
-
-    @Override
-    public boolean isMicroOperator() {
-        return false;
-    }
-
-    @Override
-    public void computeDeliveredProperties(ILogicalOperator op, IOptimizationContext context) {
-        IDataSource<?> ds = idx.getDataSource();
-        IDataSourcePropertiesProvider dspp = ds.getPropertiesProvider();
-        AbstractScanOperator as = (AbstractScanOperator) op;
-        deliveredProperties = dspp.computePropertiesVector(as.getVariables());
-    }
-
-    protected int[] getKeyIndexes(List<LogicalVariable> keyVarList, IOperatorSchema[] inputSchemas) {
-        if (keyVarList == null) {
-            return null;
-        }
-        int[] keyIndexes = new int[keyVarList.size()];
-        for (int i = 0; i < keyVarList.size(); i++) {
-            keyIndexes[i] = inputSchemas[0].findVariable(keyVarList.get(i));
-        }
-        return keyIndexes;
-    }
-
-    public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
-            IPhysicalPropertiesVector reqdByParent) {
-        if (requiresBroadcast) {
-            StructuralPropertiesVector[] pv = new StructuralPropertiesVector[1];
-            pv[0] = new StructuralPropertiesVector(new BroadcastPartitioningProperty(null), null);
-            return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
-        } else {
-            return super.getRequiredPropertiesForChildren(op, reqdByParent);
-        }
-    }
-
-    @Override
-    public boolean expensiveThanMaterialization() {
-        return true;
-    }
-}