You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@asterixdb.apache.org by im...@apache.org on 2015/08/25 18:44:00 UTC

[12/51] [partial] incubator-asterixdb git commit: Change folder structure for Java repackage

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
deleted file mode 100644
index 956b447..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
+++ /dev/null
@@ -1,279 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.hyracks.bootstrap;
-
-import java.io.File;
-import java.rmi.server.UnicastRemoteObject;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.kohsuke.args4j.CmdLineException;
-import org.kohsuke.args4j.CmdLineParser;
-import org.kohsuke.args4j.Option;
-
-import edu.uci.ics.asterix.api.common.AsterixAppRuntimeContext;
-import edu.uci.ics.asterix.common.api.AsterixThreadFactory;
-import edu.uci.ics.asterix.common.api.IAsterixAppRuntimeContext;
-import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
-import edu.uci.ics.asterix.common.config.AsterixTransactionProperties;
-import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
-import edu.uci.ics.asterix.common.transactions.IRecoveryManager;
-import edu.uci.ics.asterix.common.transactions.IRecoveryManager.SystemState;
-import edu.uci.ics.asterix.event.schema.cluster.Cluster;
-import edu.uci.ics.asterix.event.schema.cluster.Node;
-import edu.uci.ics.asterix.metadata.MetadataManager;
-import edu.uci.ics.asterix.metadata.MetadataNode;
-import edu.uci.ics.asterix.metadata.api.IAsterixStateProxy;
-import edu.uci.ics.asterix.metadata.api.IMetadataNode;
-import edu.uci.ics.asterix.metadata.bootstrap.MetadataBootstrap;
-import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
-import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceRepository;
-import edu.uci.ics.hyracks.api.application.INCApplicationContext;
-import edu.uci.ics.hyracks.api.application.INCApplicationEntryPoint;
-import edu.uci.ics.hyracks.api.lifecycle.ILifeCycleComponentManager;
-import edu.uci.ics.hyracks.api.lifecycle.LifeCycleComponentManager;
-
-public class NCApplicationEntryPoint implements INCApplicationEntryPoint {
-    private static final Logger LOGGER = Logger.getLogger(NCApplicationEntryPoint.class.getName());
-
-    @Option(name = "-metadata-port", usage = "IP port to bind metadata listener (default: random port)", required = false)
-    public int metadataRmiPort = 0;
-
-    @Option(name = "-initial-run", usage = "A flag indicating if it's the first time the NC is started (default: false)", required = false)
-    public boolean initialRun = false;
-
-    private INCApplicationContext ncApplicationContext = null;
-    private IAsterixAppRuntimeContext runtimeContext;
-    private String nodeId;
-    private boolean isMetadataNode = false;
-    private boolean stopInitiated = false;
-    private SystemState systemState = SystemState.NEW_UNIVERSE;
-    private final long NON_SHARP_CHECKPOINT_TARGET_LSN = -1;
-
-    @Override
-    public void start(INCApplicationContext ncAppCtx, String[] args) throws Exception {
-        CmdLineParser parser = new CmdLineParser(this);
-
-        try {
-            parser.parseArgument(args);
-        } catch (CmdLineException e) {
-            System.err.println(e.getMessage());
-            System.err.println("Usage:");
-            parser.printUsage(System.err);
-            throw e;
-        }
-
-        ncAppCtx.setThreadFactory(new AsterixThreadFactory(ncAppCtx.getLifeCycleComponentManager()));
-        ncApplicationContext = ncAppCtx;
-        nodeId = ncApplicationContext.getNodeId();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Starting Asterix node controller: " + nodeId);
-        }
-
-        runtimeContext = new AsterixAppRuntimeContext(ncApplicationContext);
-        AsterixMetadataProperties metadataProperties = ((IAsterixPropertiesProvider) runtimeContext)
-                .getMetadataProperties();
-        if (!metadataProperties.getNodeNames().contains(ncApplicationContext.getNodeId())) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Substitute node joining : " + ncApplicationContext.getNodeId());
-            }
-            updateOnNodeJoin();
-        }
-        runtimeContext.initialize();
-        ncApplicationContext.setApplicationObject(runtimeContext);
-
-        if (initialRun) {
-            LOGGER.info("System is being initialized. (first run)");
-            systemState = SystemState.NEW_UNIVERSE;
-        } else {
-            // #. recover if the system is corrupted by checking system state.
-            IRecoveryManager recoveryMgr = runtimeContext.getTransactionSubsystem().getRecoveryManager();
-            systemState = recoveryMgr.getSystemState();
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("System is in a state: " + systemState);
-            }
-
-            if (systemState != SystemState.NEW_UNIVERSE) {
-                PersistentLocalResourceRepository localResourceRepository = (PersistentLocalResourceRepository) runtimeContext
-                        .getLocalResourceRepository();
-                localResourceRepository.initialize(nodeId, null, false, runtimeContext.getResourceIdFactory());
-            }
-            
-            if (systemState == SystemState.CORRUPTED) {
-                recoveryMgr.startRecovery(true);
-            }
-        }
-
-    }
-
-    @Override
-    public void stop() throws Exception {
-        if (!stopInitiated) {
-            runtimeContext.setShuttingdown(true);
-            stopInitiated = true;
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Stopping Asterix node controller: " + nodeId);
-            }
-
-            IRecoveryManager recoveryMgr = runtimeContext.getTransactionSubsystem().getRecoveryManager();
-            recoveryMgr.checkpoint(true, NON_SHARP_CHECKPOINT_TARGET_LSN);
-
-            if (isMetadataNode) {
-                MetadataBootstrap.stopUniverse();
-            }
-
-            ncApplicationContext.getLifeCycleComponentManager().stopAll(false);
-            runtimeContext.deinitialize();
-        } else {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Duplicate attempt to stop ignored: " + nodeId);
-            }
-        }
-    }
-
-    @Override
-    public void notifyStartupComplete() throws Exception {
-        AsterixMetadataProperties metadataProperties = ((IAsterixPropertiesProvider) runtimeContext)
-                .getMetadataProperties();
-
-        if (systemState == SystemState.NEW_UNIVERSE) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("System state: " + SystemState.NEW_UNIVERSE);
-                LOGGER.info("Node ID: " + nodeId);
-                LOGGER.info("Stores: " + metadataProperties.getStores());
-                LOGGER.info("Root Metadata Store: " + metadataProperties.getStores().get(nodeId)[0]);
-            }
-
-            PersistentLocalResourceRepository localResourceRepository = (PersistentLocalResourceRepository) runtimeContext
-                    .getLocalResourceRepository();
-            localResourceRepository.initialize(nodeId, metadataProperties.getStores().get(nodeId)[0], true, null);
-        }
-
-        IAsterixStateProxy proxy = null;
-        isMetadataNode = nodeId.equals(metadataProperties.getMetadataNodeName());
-        if (isMetadataNode) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Bootstrapping metadata");
-            }
-            MetadataNode.INSTANCE.initialize(runtimeContext);
-
-            proxy = (IAsterixStateProxy) ncApplicationContext.getDistributedState();
-            if (proxy == null) {
-                throw new IllegalStateException("Metadata node cannot access distributed state");
-            }
-
-            // This is a special case, we just give the metadataNode directly.
-            // This way we can delay the registration of the metadataNode until
-            // it is completely initialized.
-            MetadataManager.INSTANCE = new MetadataManager(proxy, MetadataNode.INSTANCE);
-            MetadataBootstrap.startUniverse(((IAsterixPropertiesProvider) runtimeContext), ncApplicationContext,
-                    systemState == SystemState.NEW_UNIVERSE);
-            MetadataBootstrap.startDDLRecovery();
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Metadata node bound");
-            }
-        }
-
-        ExternalLibraryBootstrap.setUpExternaLibraries(isMetadataNode);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Starting lifecycle components");
-        }
-
-        Map<String, String> lifecycleMgmtConfiguration = new HashMap<String, String>();
-        String dumpPathKey = LifeCycleComponentManager.Config.DUMP_PATH_KEY;
-        String dumpPath = metadataProperties.getCoredumpPath(nodeId);
-        lifecycleMgmtConfiguration.put(dumpPathKey, dumpPath);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Coredump directory for NC is: " + dumpPath);
-        }
-        ILifeCycleComponentManager lccm = ncApplicationContext.getLifeCycleComponentManager();
-        lccm.configure(lifecycleMgmtConfiguration);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Configured:" + lccm);
-        }
-        ncApplicationContext.setStateDumpHandler(new AsterixStateDumpHandler(ncApplicationContext.getNodeId(), lccm
-                .getDumpPath(), lccm));
-
-        lccm.startAll();
-
-        IRecoveryManager recoveryMgr = runtimeContext.getTransactionSubsystem().getRecoveryManager();
-        recoveryMgr.checkpoint(true, NON_SHARP_CHECKPOINT_TARGET_LSN);
-
-        if (isMetadataNode) {
-            IMetadataNode stub = null;
-            stub = (IMetadataNode) UnicastRemoteObject.exportObject(MetadataNode.INSTANCE, metadataRmiPort);
-            proxy.setMetadataNode(stub);
-        }
-
-        // Reclaim storage for temporary datasets.
-        String[] ioDevices = AsterixClusterProperties.INSTANCE.getIODevices(nodeId);
-        String[] nodeStores = metadataProperties.getStores().get(nodeId);
-        int numIoDevices = AsterixClusterProperties.INSTANCE.getNumberOfIODevices(nodeId);
-        for (int j = 0; j < nodeStores.length; j++) {
-            for (int k = 0; k < numIoDevices; k++) {
-                File f = new File(ioDevices[k] + File.separator + nodeStores[j] + File.separator + "temp");
-                f.delete();
-            }
-        }
-
-        // TODO
-        // reclaim storage for orphaned index artifacts in NCs.
-
-    }
-
-    private void updateOnNodeJoin() {
-        AsterixMetadataProperties metadataProperties = ((IAsterixPropertiesProvider) runtimeContext)
-                .getMetadataProperties();
-        if (!metadataProperties.getNodeNames().contains(nodeId)) {
-            metadataProperties.getNodeNames().add(nodeId);
-            Cluster cluster = AsterixClusterProperties.INSTANCE.getCluster();
-            String asterixInstanceName = cluster.getInstanceName();
-            AsterixTransactionProperties txnProperties = ((IAsterixPropertiesProvider) runtimeContext)
-                    .getTransactionProperties();
-            Node self = null;
-            for (Node node : cluster.getSubstituteNodes().getNode()) {
-                String ncId = asterixInstanceName + "_" + node.getId();
-                if (ncId.equalsIgnoreCase(nodeId)) {
-                    String storeDir = node.getStore() == null ? cluster.getStore() : node.getStore();
-                    metadataProperties.getStores().put(nodeId, storeDir.split(","));
-
-                    String coredumpPath = node.getLogDir() == null ? cluster.getLogDir() : node.getLogDir();
-                    metadataProperties.getCoredumpPaths().put(nodeId, coredumpPath);
-
-                    String txnLogDir = node.getTxnLogDir() == null ? cluster.getTxnLogDir() : node.getTxnLogDir();
-                    txnProperties.getLogDirectories().put(nodeId, txnLogDir);
-
-                    if (LOGGER.isLoggable(Level.INFO)) {
-                        LOGGER.info("Store set to : " + storeDir);
-                        LOGGER.info("Coredump dir set to : " + coredumpPath);
-                        LOGGER.info("Transaction log dir set to :" + txnLogDir);
-                    }
-                    self = node;
-                    break;
-                }
-            }
-            if (self != null) {
-                cluster.getSubstituteNodes().getNode().remove(self);
-                cluster.getNode().add(self);
-            } else {
-                throw new IllegalStateException("Unknown node joining the cluster");
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultReader.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultReader.java b/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultReader.java
deleted file mode 100644
index c27f859..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultReader.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.result;
-
-import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
-import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.comm.IFrame;
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.dataset.DatasetJobRecord.Status;
-import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
-import edu.uci.ics.hyracks.api.dataset.IHyracksDatasetReader;
-import edu.uci.ics.hyracks.api.dataset.ResultSetId;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.JobId;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ResultFrameTupleAccessor;
-
-public class ResultReader {
-    private final IHyracksDataset hyracksDataset;
-
-    private IHyracksDatasetReader reader;
-
-    private IFrameTupleAccessor frameTupleAccessor;
-
-    // Number of parallel result reader buffers
-    public static final int NUM_READERS = 1;
-
-    public static final int FRAME_SIZE = AsterixAppContextInfo.getInstance().getCompilerProperties().getFrameSize();
-
-    public ResultReader(IHyracksClientConnection hcc, IHyracksDataset hdc) throws Exception {
-        hyracksDataset = hdc;
-    }
-
-    public void open(JobId jobId, ResultSetId resultSetId) throws HyracksDataException {
-        reader = hyracksDataset.createReader(jobId, resultSetId);
-        frameTupleAccessor = new ResultFrameTupleAccessor();
-    }
-
-    public Status getStatus() {
-        return reader.getResultStatus();
-    }
-
-    public int read(IFrame frame) throws HyracksDataException {
-        return reader.read(frame);
-    }
-
-    public IFrameTupleAccessor getFrameTupleAccessor() {
-        return frameTupleAccessor;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultUtils.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultUtils.java b/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultUtils.java
deleted file mode 100644
index 3a4fd5f..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/result/ResultUtils.java
+++ /dev/null
@@ -1,343 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.result;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.nio.charset.Charset;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import org.apache.http.ParseException;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import edu.uci.ics.asterix.api.common.SessionConfig;
-import edu.uci.ics.asterix.api.common.SessionConfig.OutputFormat;
-import edu.uci.ics.asterix.api.http.servlet.APIServlet;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.api.comm.IFrame;
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.comm.VSizeFrame;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.control.nc.resources.memory.FrameManager;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
-
-public class ResultUtils {
-    private static final Charset UTF_8 = Charset.forName("UTF-8");
-
-    static Map<Character, String> HTML_ENTITIES = new HashMap<Character, String>();
-
-    static {
-        HTML_ENTITIES.put('"', "&quot;");
-        HTML_ENTITIES.put('&', "&amp;");
-        HTML_ENTITIES.put('<', "&lt;");
-        HTML_ENTITIES.put('>', "&gt;");
-    }
-
-    public static String escapeHTML(String s) {
-        for (Character c : HTML_ENTITIES.keySet()) {
-            if (s.indexOf(c) >= 0) {
-                s = s.replace(c.toString(), HTML_ENTITIES.get(c));
-            }
-        }
-        return s;
-    }
-
-    public static void displayCSVHeader(ARecordType recordType, SessionConfig conf) {
-        // If HTML-ifying, we have to output this here before the header -
-        // pretty ugly
-        if (conf.is(SessionConfig.FORMAT_HTML)) {
-            conf.out().println("<h4>Results:</h4>");
-            conf.out().println("<pre>");
-        }
-
-        String[] fieldNames = recordType.getFieldNames();
-        boolean notfirst = false;
-        for (String name : fieldNames) {
-            if (notfirst) {
-                conf.out().print(',');
-            }
-            notfirst = true;
-            conf.out().print('"');
-            conf.out().print(name.replace("\"", "\"\""));
-            conf.out().print('"');
-        }
-        conf.out().print("\r\n");
-    }
-
-    public static FrameManager resultDisplayFrameMgr = new FrameManager(ResultReader.FRAME_SIZE);
-
-    public static void displayResults(ResultReader resultReader, SessionConfig conf)
-            throws HyracksDataException {
-        IFrameTupleAccessor fta = resultReader.getFrameTupleAccessor();
-
-        IFrame frame = new VSizeFrame(resultDisplayFrameMgr);
-        int bytesRead = resultReader.read(frame);
-        ByteBufferInputStream bbis = new ByteBufferInputStream();
-
-        // Whether we need to separate top-level ADM instances with commas
-        boolean need_commas = true;
-        // Whether this is the first instance being output
-        boolean notfirst = false;
-
-        // If we're outputting CSV with a header, the HTML header was already
-        // output by displayCSVHeader(), so skip it here
-        if (conf.is(SessionConfig.FORMAT_HTML) &&
-            ! (conf.fmt() == OutputFormat.CSV && conf.is(SessionConfig.FORMAT_CSV_HEADER))) {
-            conf.out().println("<h4>Results:</h4>");
-            conf.out().println("<pre>");
-        }
-
-        switch (conf.fmt()) {
-            case CSV:
-                need_commas = false;
-                break;
-            case JSON:
-            case ADM:
-                // Conveniently, JSON and ADM have the same syntax for an
-                // "ordered list", and our representation of the result of a
-                // statement is an ordered list of instances.
-                conf.out().print("[ ");
-                break;
-        }
-
-        if (bytesRead > 0) {
-            do {
-                try {
-                    fta.reset(frame.getBuffer());
-                    int last = fta.getTupleCount();
-                    String result;
-                    for (int tIndex = 0; tIndex < last; tIndex++) {
-                        int start = fta.getTupleStartOffset(tIndex);
-                        int length = fta.getTupleEndOffset(tIndex) - start;
-                        bbis.setByteBuffer(frame.getBuffer(), start);
-                        byte[] recordBytes = new byte[length];
-                        int numread = bbis.read(recordBytes, 0, length);
-                        if (conf.fmt() == OutputFormat.CSV) {
-                            if ( (numread > 0) && (recordBytes[numread-1] == '\n') ) {
-                                numread--;
-                            }
-                        }
-                        result = new String(recordBytes, 0, numread, UTF_8);
-                        if (need_commas && notfirst) {
-                            conf.out().print(", ");
-                        }
-                        notfirst = true;
-                        conf.out().print(result);
-                        if (conf.fmt() == OutputFormat.CSV) {
-                            conf.out().print("\r\n");
-                        }
-                    }
-                    frame.getBuffer().clear();
-                } finally {
-                    try {
-                        bbis.close();
-                    } catch (IOException e) {
-                        throw new HyracksDataException(e);
-                    }
-                }
-            } while (resultReader.read(frame) > 0);
-        }
-
-        conf.out().flush();
-
-        switch (conf.fmt()) {
-            case JSON:
-            case ADM:
-                conf.out().println(" ]");
-                break;
-            case CSV:
-                // Nothing to do
-                break;
-        }
-
-        if (conf.is(SessionConfig.FORMAT_HTML)) {
-            conf.out().println("</pre>");
-        }
-    }
-
-    public static JSONObject getErrorResponse(int errorCode, String errorMessage, String errorSummary,
-            String errorStackTrace) {
-        JSONObject errorResp = new JSONObject();
-        JSONArray errorArray = new JSONArray();
-        errorArray.put(errorCode);
-        errorArray.put(errorMessage);
-        try {
-            errorResp.put("error-code", errorArray);
-            if (!errorSummary.equals(""))
-                errorResp.put("summary", errorSummary);
-            if (!errorStackTrace.equals(""))
-                errorResp.put("stacktrace", errorStackTrace);
-        } catch (JSONException e) {
-            // TODO(madhusudancs): Figure out what to do when JSONException occurs while building the results.
-        }
-        return errorResp;
-    }
-
-    public static void webUIErrorHandler(PrintWriter out, Exception e) {
-        String errorTemplate = readTemplateFile("/webui/errortemplate.html", "%s\n%s\n%s");
-
-        String errorOutput = String.format(errorTemplate, escapeHTML(extractErrorMessage(e)),
-                escapeHTML(extractErrorSummary(e)), escapeHTML(extractFullStackTrace(e)));
-        out.println(errorOutput);
-    }
-
-    public static void webUIParseExceptionHandler(PrintWriter out, Throwable e, String query) {
-        String errorTemplate = readTemplateFile("/webui/errortemplate_message.html", "<pre class=\"error\">%s\n</pre>");
-
-        String errorOutput = String.format(errorTemplate, buildParseExceptionMessage(e, query));
-        out.println(errorOutput);
-    }
-
-    public static void apiErrorHandler(PrintWriter out, Exception e) {
-        int errorCode = 99;
-        if (e instanceof ParseException) {
-            errorCode = 2;
-        } else if (e instanceof AlgebricksException) {
-            errorCode = 3;
-        } else if (e instanceof HyracksDataException) {
-            errorCode = 4;
-        }
-
-        JSONObject errorResp = ResultUtils.getErrorResponse(errorCode, extractErrorMessage(e), extractErrorSummary(e),
-                extractFullStackTrace(e));
-        out.write(errorResp.toString());
-    }
-
-    public static String buildParseExceptionMessage(Throwable e, String query) {
-        StringBuilder errorMessage = new StringBuilder();
-        String message = e.getMessage();
-        message = message.replace("<", "&lt");
-        message = message.replace(">", "&gt");
-        errorMessage.append("SyntaxError: " + message + "\n");
-        int pos = message.indexOf("line");
-        if (pos > 0) {
-            Pattern p = Pattern.compile("\\d+");
-            Matcher m = p.matcher(message);
-            if (m.find(pos)) {
-                int lineNo = Integer.parseInt(message.substring(m.start(), m.end()));
-                String[] lines = query.split("\n");
-                if (lineNo > lines.length) {
-                    errorMessage.append("===> &ltBLANK LINE&gt \n");
-                } else {
-                    String line = lines[lineNo - 1];
-                    errorMessage.append("==> " + line);
-                }
-            }
-        }
-        return errorMessage.toString();
-    }
-
-    private static Throwable getRootCause(Throwable cause) {
-        Throwable nextCause = cause.getCause();
-        while (nextCause != null) {
-            cause = nextCause;
-            nextCause = cause.getCause();
-        }
-        return cause;
-    }
-
-    /**
-     * Extract the message in the root cause of the stack trace:
-     *
-     * @param e
-     * @return error message string.
-     */
-    private static String extractErrorMessage(Throwable e) {
-        Throwable cause = getRootCause(e);
-        String fullyQualifiedExceptionClassName = cause.getClass().getName();
-        String[] hierarchySplits = fullyQualifiedExceptionClassName.split("\\.");
-        //try returning the class without package qualification
-        String exceptionClassName = hierarchySplits[hierarchySplits.length - 1];
-        String localizedMessage = cause.getLocalizedMessage();
-        if(localizedMessage == null){
-            localizedMessage = "Internal error. Please check instance logs for further details.";
-        }
-        return localizedMessage + " [" + exceptionClassName + "]";
-    }
-
-    /**
-     * Extract the meaningful part of a stack trace:
-     * a. the causes in the stack trace hierarchy
-     * b. the top exception for each cause
-     *
-     * @param e
-     * @return the contacted message containing a and b.
-     */
-    private static String extractErrorSummary(Throwable e) {
-        StringBuilder errorMessageBuilder = new StringBuilder();
-        Throwable cause = e;
-        errorMessageBuilder.append(cause.getLocalizedMessage());
-        while (cause != null) {
-            StackTraceElement[] stackTraceElements = cause.getStackTrace();
-            errorMessageBuilder.append(stackTraceElements.length > 0 ? "\n caused by: " + stackTraceElements[0] : "");
-            cause = cause.getCause();
-        }
-        return errorMessageBuilder.toString();
-    }
-
-    /**
-     * Extract the full stack trace:
-     *
-     * @param e
-     * @return the string containing the full stack trace of the error.
-     */
-    private static String extractFullStackTrace(Throwable e) {
-        StringWriter stringWriter = new StringWriter();
-        PrintWriter printWriter = new PrintWriter(stringWriter);
-        e.printStackTrace(printWriter);
-        return stringWriter.toString();
-    }
-
-    /**
-     * Read the template file which is stored as a resource and return its content. If the file does not exist or is
-     * not readable return the default template string.
-     *
-     * @param path
-     *            The path to the resource template file
-     * @param defaultTemplate
-     *            The default template string if the template file does not exist or is not readable
-     * @return The template string to be used to render the output.
-     */
-    private static String readTemplateFile(String path, String defaultTemplate) {
-        String errorTemplate = defaultTemplate;
-        try {
-            String resourcePath = "/webui/errortemplate_message.html";
-            InputStream is = APIServlet.class.getResourceAsStream(resourcePath);
-            InputStreamReader isr = new InputStreamReader(is);
-            StringBuilder sb = new StringBuilder();
-            BufferedReader br = new BufferedReader(isr);
-            String line = br.readLine();
-
-            while (line != null) {
-                sb.append(line);
-                line = br.readLine();
-            }
-            errorTemplate = sb.toString();
-        } catch (IOException ioe) {
-            // If there is an IOException reading the error template html file, default value of error template is used.
-        }
-        return errorTemplate;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java b/asterix-app/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java
deleted file mode 100644
index a788c37..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.dataflow.std.misc;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-
-public class ConstantTupleSourceOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    private int[] fieldSlots;
-    private byte[] tupleData;
-    private int tupleSize;
-
-    public ConstantTupleSourceOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc, int[] fieldSlots,
-            byte[] tupleData, int tupleSize) {
-        super(spec, 0, 1);
-        this.tupleData = tupleData;
-        this.fieldSlots = fieldSlots;
-        this.tupleSize = tupleSize;
-        recordDescriptors[0] = recDesc;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
-        return new ConstantTupleSourceOperatorNodePushable(ctx, fieldSlots, tupleData, tupleSize);
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorNodePushable.java b/asterix-app/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorNodePushable.java
deleted file mode 100644
index ba75fb0..0000000
--- a/asterix-app/src/main/java/edu/uci/ics/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorNodePushable.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.dataflow.std.misc;
-
-import edu.uci.ics.hyracks.api.comm.VSizeFrame;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
-
-public class ConstantTupleSourceOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
-    private IHyracksTaskContext ctx;
-
-    private int[] fieldSlots;
-    private byte[] tupleData;
-    private int tupleSize;
-
-    public ConstantTupleSourceOperatorNodePushable(IHyracksTaskContext ctx, int[] fieldSlots, byte[] tupleData,
-            int tupleSize) {
-        super();
-        this.fieldSlots = fieldSlots;
-        this.tupleData = tupleData;
-        this.tupleSize = tupleSize;
-        this.ctx = ctx;
-    }
-
-    @Override
-    public void initialize() throws HyracksDataException {
-        FrameTupleAppender appender = new FrameTupleAppender(new VSizeFrame(ctx));
-        if (fieldSlots != null && tupleData != null && tupleSize > 0)
-            appender.append(fieldSlots, tupleData, 0, tupleSize);
-        writer.open();
-        try {
-            appender.flush(writer, true);
-        }
-        finally {
-            writer.close();
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java b/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
new file mode 100644
index 0000000..8ccc9d5
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
@@ -0,0 +1,411 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.common;
+
+import java.io.PrintWriter;
+import java.rmi.RemoteException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.json.JSONException;
+
+import edu.uci.ics.asterix.api.common.Job.SubmissionMode;
+import edu.uci.ics.asterix.aql.base.Statement.Kind;
+import edu.uci.ics.asterix.aql.expression.FunctionDecl;
+import edu.uci.ics.asterix.aql.expression.Query;
+import edu.uci.ics.asterix.aql.expression.visitor.AQLPrintVisitor;
+import edu.uci.ics.asterix.aql.rewrites.AqlRewriter;
+import edu.uci.ics.asterix.common.config.AsterixCompilerProperties;
+import edu.uci.ics.asterix.common.config.AsterixExternalProperties;
+import edu.uci.ics.asterix.common.config.OptimizationConfUtil;
+import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.dataflow.data.common.AqlExpressionTypeComputer;
+import edu.uci.ics.asterix.dataflow.data.common.AqlMergeAggregationExpressionFactory;
+import edu.uci.ics.asterix.dataflow.data.common.AqlNullableTypeComputer;
+import edu.uci.ics.asterix.dataflow.data.common.AqlPartialAggregationTypeComputer;
+import edu.uci.ics.asterix.formats.base.IDataFormat;
+import edu.uci.ics.asterix.jobgen.AqlLogicalExpressionJobGen;
+import edu.uci.ics.asterix.metadata.MetadataManager;
+import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
+import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
+import edu.uci.ics.asterix.metadata.entities.Dataverse;
+import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
+import edu.uci.ics.asterix.optimizer.base.RuleCollections;
+import edu.uci.ics.asterix.runtime.job.listener.JobEventListenerFactory;
+import edu.uci.ics.asterix.transaction.management.service.transaction.JobIdFactory;
+import edu.uci.ics.asterix.translator.AqlExpressionToPlanTranslator;
+import edu.uci.ics.asterix.translator.CompiledStatements.ICompiledDmlStatement;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.compiler.api.HeuristicCompilerFactoryBuilder;
+import edu.uci.ics.hyracks.algebricks.compiler.api.ICompiler;
+import edu.uci.ics.hyracks.algebricks.compiler.api.ICompilerFactory;
+import edu.uci.ics.hyracks.algebricks.compiler.rewriter.rulecontrollers.SequentialFixpointRuleController;
+import edu.uci.ics.hyracks.algebricks.compiler.rewriter.rulecontrollers.SequentialOnceRuleController;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionEvalSizeComputer;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionTypeComputer;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IMergeAggregationExpressionFactory;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.INullableTypeComputer;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.LogicalExpressionJobGenToExpressionRuntimeProviderAdapter;
+import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.LogicalOperatorPrettyPrintVisitor;
+import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.PlanPlotter;
+import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.PlanPrettyPrinter;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.AbstractRuleController;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.AlgebricksOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IOptimizationContextFactory;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+
+/**
+ * Provides helper methods for compilation of a query into a JobSpec and submission
+ * to Hyracks through the Hyracks client interface.
+ */
+public class APIFramework {
+
+    private static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> buildDefaultLogicalRewrites() {
+        List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> defaultLogicalRewrites = new ArrayList<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>>();
+        SequentialFixpointRuleController seqCtrlNoDfs = new SequentialFixpointRuleController(false);
+        SequentialFixpointRuleController seqCtrlFullDfs = new SequentialFixpointRuleController(true);
+        SequentialOnceRuleController seqOnceCtrl = new SequentialOnceRuleController(true);
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
+                RuleCollections.buildInitialTranslationRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
+                RuleCollections.buildTypeInferenceRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
+                RuleCollections.buildAutogenerateIDRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,
+                RuleCollections.buildNormalizationRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
+                RuleCollections.buildCondPushDownAndJoinInferenceRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,
+                RuleCollections.buildLoadFieldsRuleCollection()));
+        // fj
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,
+                RuleCollections.buildFuzzyJoinRuleCollection()));
+        //
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,
+                RuleCollections.buildNormalizationRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
+                RuleCollections.buildCondPushDownAndJoinInferenceRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlFullDfs,
+                RuleCollections.buildLoadFieldsRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
+                RuleCollections.buildDataExchangeRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
+                RuleCollections.buildConsolidationRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
+                RuleCollections.buildAccessMethodRuleCollection()));
+        defaultLogicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqCtrlNoDfs,
+                RuleCollections.buildPlanCleanupRuleCollection()));
+
+        //put TXnRuleCollection!
+        return defaultLogicalRewrites;
+    }
+
+    private static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> buildDefaultPhysicalRewrites() {
+        List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> defaultPhysicalRewrites = new ArrayList<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>>();
+        SequentialOnceRuleController seqOnceCtrl = new SequentialOnceRuleController(true);
+        SequentialOnceRuleController seqOnceTopLevel = new SequentialOnceRuleController(false);
+        defaultPhysicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
+                RuleCollections.buildPhysicalRewritesAllLevelsRuleCollection()));
+        defaultPhysicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceTopLevel,
+                RuleCollections.buildPhysicalRewritesTopLevelRuleCollection()));
+        defaultPhysicalRewrites.add(new Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>(seqOnceCtrl,
+                RuleCollections.prepareForJobGenRuleCollection()));
+        return defaultPhysicalRewrites;
+    }
+
+    private static class AqlOptimizationContextFactory implements IOptimizationContextFactory {
+
+        public static final AqlOptimizationContextFactory INSTANCE = new AqlOptimizationContextFactory();
+
+        private AqlOptimizationContextFactory() {
+        }
+
+        @Override
+        public IOptimizationContext createOptimizationContext(int varCounter,
+                IExpressionEvalSizeComputer expressionEvalSizeComputer,
+                IMergeAggregationExpressionFactory mergeAggregationExpressionFactory,
+                IExpressionTypeComputer expressionTypeComputer, INullableTypeComputer nullableTypeComputer,
+                PhysicalOptimizationConfig physicalOptimizationConfig) {
+            return new AlgebricksOptimizationContext(varCounter, expressionEvalSizeComputer,
+                    mergeAggregationExpressionFactory, expressionTypeComputer, nullableTypeComputer,
+                    physicalOptimizationConfig);
+        }
+
+    }
+
+    public static Pair<Query, Integer> reWriteQuery(List<FunctionDecl> declaredFunctions,
+            AqlMetadataProvider metadataProvider, Query q, SessionConfig conf) throws AsterixException {
+
+        if (!conf.is(SessionConfig.FORMAT_ONLY_PHYSICAL_OPS) && conf.is(SessionConfig.OOB_EXPR_TREE)) {
+            conf.out().println();
+
+            if (conf.is(SessionConfig.FORMAT_HTML)) {
+                conf.out().println("<h4>Expression tree:</h4>");
+                conf.out().println("<pre>");
+            } else {
+                conf.out().println("----------Expression tree:");
+            }
+
+            if (q != null) {
+                q.accept(new AQLPrintVisitor(conf.out()), 0);
+            }
+
+            if (conf.is(SessionConfig.FORMAT_HTML)) {
+                conf.out().println("</pre>");
+            }
+        }
+        AqlRewriter rw = new AqlRewriter(declaredFunctions, q, metadataProvider);
+        rw.rewrite();
+        Query rwQ = rw.getExpr();
+        return new Pair(rwQ, rw.getVarCounter());
+    }
+
+    public static JobSpecification compileQuery(List<FunctionDecl> declaredFunctions,
+            AqlMetadataProvider queryMetadataProvider, Query rwQ, int varCounter, String outputDatasetName,
+            SessionConfig conf, ICompiledDmlStatement statement) throws AsterixException, AlgebricksException,
+            JSONException, RemoteException, ACIDException {
+
+        if (!conf.is(SessionConfig.FORMAT_ONLY_PHYSICAL_OPS) && conf.is(SessionConfig.OOB_REWRITTEN_EXPR_TREE)) {
+            conf.out().println();
+
+            if (conf.is(SessionConfig.FORMAT_HTML)) {
+                conf.out().println("<h4>Rewritten expression tree:</h4>");
+                conf.out().println("<pre>");
+            } else {
+                conf.out().println("----------Rewritten expression:");
+            }
+
+            if (rwQ != null) {
+                rwQ.accept(new AQLPrintVisitor(conf.out()), 0);
+            }
+
+            if (conf.is(SessionConfig.FORMAT_HTML)) {
+                conf.out().println("</pre>");
+            }
+        }
+
+        edu.uci.ics.asterix.common.transactions.JobId asterixJobId = JobIdFactory.generateJobId();
+        queryMetadataProvider.setJobId(asterixJobId);
+        AqlExpressionToPlanTranslator t = new AqlExpressionToPlanTranslator(queryMetadataProvider, varCounter,
+                outputDatasetName, statement);
+
+        ILogicalPlan plan;
+        // statement = null when it's a query
+        if (statement == null || statement.getKind() != Kind.LOAD) {
+            plan = t.translate(rwQ);
+        } else {
+            plan = t.translateLoad();
+        }
+
+        LogicalOperatorPrettyPrintVisitor pvisitor = new LogicalOperatorPrettyPrintVisitor();
+        if (!conf.is(SessionConfig.FORMAT_ONLY_PHYSICAL_OPS) && conf.is(SessionConfig.OOB_LOGICAL_PLAN)) {
+            conf.out().println();
+
+            if (conf.is(SessionConfig.FORMAT_HTML)) {
+                conf.out().println("<h4>Logical plan:</h4>");
+                conf.out().println("<pre>");
+            } else {
+                conf.out().println("----------Logical plan:");
+            }
+
+            if (rwQ != null || statement.getKind() == Kind.LOAD) {
+                StringBuilder buffer = new StringBuilder();
+                PlanPrettyPrinter.printPlan(plan, buffer, pvisitor, 0);
+                conf.out().print(buffer);
+            }
+
+            if (conf.is(SessionConfig.FORMAT_HTML)) {
+                conf.out().println("</pre>");
+            }
+        }
+
+        //print the plot for the logical plan
+        AsterixExternalProperties xProps = AsterixAppContextInfo.getInstance().getExternalProperties();
+        Boolean plot = xProps.getIsPlottingEnabled();
+        if (plot) {
+            PlanPlotter.printLogicalPlan(plan);
+        }
+
+        AsterixCompilerProperties compilerProperties = AsterixAppContextInfo.getInstance().getCompilerProperties();
+        int frameSize = compilerProperties.getFrameSize();
+        int sortFrameLimit = (int) (compilerProperties.getSortMemorySize() / frameSize);
+        int groupFrameLimit = (int) (compilerProperties.getGroupMemorySize() / frameSize);
+        int joinFrameLimit = (int) (compilerProperties.getJoinMemorySize() / frameSize);
+        OptimizationConfUtil.getPhysicalOptimizationConfig().setFrameSize(frameSize);
+        OptimizationConfUtil.getPhysicalOptimizationConfig().setMaxFramesExternalSort(sortFrameLimit);
+        OptimizationConfUtil.getPhysicalOptimizationConfig().setMaxFramesExternalGroupBy(groupFrameLimit);
+        OptimizationConfUtil.getPhysicalOptimizationConfig().setMaxFramesHybridHash(joinFrameLimit);
+
+        HeuristicCompilerFactoryBuilder builder = new HeuristicCompilerFactoryBuilder(
+                AqlOptimizationContextFactory.INSTANCE);
+        builder.setPhysicalOptimizationConfig(OptimizationConfUtil.getPhysicalOptimizationConfig());
+        builder.setLogicalRewrites(buildDefaultLogicalRewrites());
+        builder.setPhysicalRewrites(buildDefaultPhysicalRewrites());
+        IDataFormat format = queryMetadataProvider.getFormat();
+        ICompilerFactory compilerFactory = builder.create();
+        builder.setExpressionEvalSizeComputer(format.getExpressionEvalSizeComputer());
+        builder.setIMergeAggregationExpressionFactory(new AqlMergeAggregationExpressionFactory());
+        builder.setPartialAggregationTypeComputer(new AqlPartialAggregationTypeComputer());
+        builder.setExpressionTypeComputer(AqlExpressionTypeComputer.INSTANCE);
+        builder.setNullableTypeComputer(AqlNullableTypeComputer.INSTANCE);
+
+        ICompiler compiler = compilerFactory.createCompiler(plan, queryMetadataProvider, t.getVarCounter());
+        if (conf.isOptimize()) {
+            compiler.optimize();
+            //plot optimized logical plan
+            if (plot)
+                PlanPlotter.printOptimizedLogicalPlan(plan);
+            if (conf.is(SessionConfig.OOB_OPTIMIZED_LOGICAL_PLAN)) {
+                if (conf.is(SessionConfig.FORMAT_ONLY_PHYSICAL_OPS)) {
+                    // For Optimizer tests.
+                    StringBuilder buffer = new StringBuilder();
+                    PlanPrettyPrinter.printPhysicalOps(plan, buffer, 0);
+                    conf.out().print(buffer);
+                } else {
+                    if (conf.is(SessionConfig.FORMAT_HTML)) {
+                        conf.out().println("<h4>Optimized logical plan:</h4>");
+                        conf.out().println("<pre>");
+                    } else {
+                        conf.out().println("----------Optimized logical plan:");
+                    }
+
+                    if (rwQ != null || statement.getKind() == Kind.LOAD) {
+                        StringBuilder buffer = new StringBuilder();
+                        PlanPrettyPrinter.printPlan(plan, buffer, pvisitor, 0);
+                        conf.out().print(buffer);
+                    }
+
+                    if (conf.is(SessionConfig.FORMAT_HTML)) {
+                        conf.out().println("</pre>");
+                    }
+                }
+            }
+        }
+
+        if (!conf.isGenerateJobSpec()) {
+            return null;
+        }
+
+        AlgebricksPartitionConstraint clusterLocs = queryMetadataProvider.getClusterLocations();
+        builder.setBinaryBooleanInspectorFactory(format.getBinaryBooleanInspectorFactory());
+        builder.setBinaryIntegerInspectorFactory(format.getBinaryIntegerInspectorFactory());
+        builder.setClusterLocations(clusterLocs);
+        builder.setComparatorFactoryProvider(format.getBinaryComparatorFactoryProvider());
+        builder.setExpressionRuntimeProvider(new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter(
+                AqlLogicalExpressionJobGen.INSTANCE));
+        builder.setHashFunctionFactoryProvider(format.getBinaryHashFunctionFactoryProvider());
+        builder.setHashFunctionFamilyProvider(format.getBinaryHashFunctionFamilyProvider());
+        builder.setNullWriterFactory(format.getNullWriterFactory());
+        builder.setPredicateEvaluatorFactoryProvider(format.getPredicateEvaluatorFactoryProvider());
+
+        switch (conf.fmt()) {
+            case JSON:
+                builder.setPrinterProvider(format.getJSONPrinterFactoryProvider());
+                break;
+            case CSV:
+                builder.setPrinterProvider(format.getCSVPrinterFactoryProvider());
+                break;
+            case ADM:
+                builder.setPrinterProvider(format.getPrinterFactoryProvider());
+                break;
+            default:
+                throw new RuntimeException("Unexpected OutputFormat!");
+        }
+
+        builder.setSerializerDeserializerProvider(format.getSerdeProvider());
+        builder.setTypeTraitProvider(format.getTypeTraitProvider());
+        builder.setNormalizedKeyComputerFactoryProvider(format.getNormalizedKeyComputerFactoryProvider());
+
+        JobEventListenerFactory jobEventListenerFactory = new JobEventListenerFactory(asterixJobId,
+                queryMetadataProvider.isWriteTransaction());
+        JobSpecification spec = compiler.createJob(AsterixAppContextInfo.getInstance(), jobEventListenerFactory);
+
+        if (conf.is(SessionConfig.OOB_HYRACKS_JOB)) {
+            if (conf.is(SessionConfig.FORMAT_HTML)) {
+                conf.out().println("<h4>Hyracks job:</h4>");
+                conf.out().println("<pre>");
+            } else {
+                conf.out().println("----------Hyracks job:");
+            }
+
+            if (rwQ != null) {
+                conf.out().println(spec.toJSON().toString(1));
+                conf.out().println(spec.getUserConstraints());
+            }
+
+            if (conf.is(SessionConfig.FORMAT_HTML)) {
+                conf.out().println("</pre>");
+            }
+        }
+        return spec;
+    }
+
+    public static void executeJobArray(IHyracksClientConnection hcc, JobSpecification[] specs, PrintWriter out)
+            throws Exception {
+        for (int i = 0; i < specs.length; i++) {
+            specs[i].setMaxReattempts(0);
+            JobId jobId = hcc.startJob(specs[i]);
+            long startTime = System.currentTimeMillis();
+            hcc.waitForCompletion(jobId);
+            long endTime = System.currentTimeMillis();
+            double duration = (endTime - startTime) / 1000.00;
+            out.println("<pre>Duration: " + duration + " sec</pre>");
+        }
+
+    }
+
+    public static void executeJobArray(IHyracksClientConnection hcc, Job[] jobs, PrintWriter out) throws Exception {
+        for (int i = 0; i < jobs.length; i++) {
+            jobs[i].getJobSpec().setMaxReattempts(0);
+            long startTime = System.currentTimeMillis();
+            try {
+                JobId jobId = hcc.startJob(jobs[i].getJobSpec());
+                if (jobs[i].getSubmissionMode() == SubmissionMode.ASYNCHRONOUS) {
+                    continue;
+                }
+                hcc.waitForCompletion(jobId);
+            } catch (Exception e) {
+                e.printStackTrace();
+                continue;
+            }
+            long endTime = System.currentTimeMillis();
+            double duration = (endTime - startTime) / 1000.00;
+            out.println("<pre>Duration: " + duration + " sec</pre>");
+        }
+
+    }
+
+    private static IDataFormat getDataFormat(MetadataTransactionContext mdTxnCtx, String dataverseName)
+            throws AsterixException {
+        Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
+        IDataFormat format;
+        try {
+            format = (IDataFormat) Class.forName(dataverse.getDataFormat()).newInstance();
+        } catch (Exception e) {
+            throw new AsterixException(e);
+        }
+        return format;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixAppRuntimeContext.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixAppRuntimeContext.java b/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixAppRuntimeContext.java
new file mode 100644
index 0000000..6d7f2a4
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixAppRuntimeContext.java
@@ -0,0 +1,271 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.common;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.common.api.AsterixThreadExecutor;
+import edu.uci.ics.asterix.common.api.IAsterixAppRuntimeContext;
+import edu.uci.ics.asterix.common.config.AsterixCompilerProperties;
+import edu.uci.ics.asterix.common.config.AsterixExternalProperties;
+import edu.uci.ics.asterix.common.config.AsterixFeedProperties;
+import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
+import edu.uci.ics.asterix.common.config.AsterixPropertiesAccessor;
+import edu.uci.ics.asterix.common.config.AsterixStorageProperties;
+import edu.uci.ics.asterix.common.config.AsterixTransactionProperties;
+import edu.uci.ics.asterix.common.config.IAsterixPropertiesProvider;
+import edu.uci.ics.asterix.common.context.AsterixFileMapManager;
+import edu.uci.ics.asterix.common.context.DatasetLifecycleManager;
+import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.common.feeds.api.IFeedManager;
+import edu.uci.ics.asterix.common.transactions.IAsterixAppRuntimeContextProvider;
+import edu.uci.ics.asterix.common.transactions.ITransactionSubsystem;
+import edu.uci.ics.asterix.feeds.FeedManager;
+import edu.uci.ics.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
+import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceRepository;
+import edu.uci.ics.asterix.transaction.management.resource.PersistentLocalResourceRepositoryFactory;
+import edu.uci.ics.asterix.transaction.management.service.logging.LogManager;
+import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionSubsystem;
+import edu.uci.ics.hyracks.api.application.INCApplicationContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.IIOManager;
+import edu.uci.ics.hyracks.api.lifecycle.ILifeCycleComponent;
+import edu.uci.ics.hyracks.api.lifecycle.ILifeCycleComponentManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.IVirtualBufferCache;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.AsynchronousScheduler;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.PrefixMergePolicyFactory;
+import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
+import edu.uci.ics.hyracks.storage.common.buffercache.DelayPageCleanerPolicy;
+import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IPageCleanerPolicy;
+import edu.uci.ics.hyracks.storage.common.buffercache.IPageReplacementStrategy;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepository;
+import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepositoryFactory;
+import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactory;
+import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactoryProvider;
+
+public class AsterixAppRuntimeContext implements IAsterixAppRuntimeContext, IAsterixPropertiesProvider {
+
+    private static final AsterixPropertiesAccessor ASTERIX_PROPERTIES_ACCESSOR;
+
+    static {
+        try {
+            ASTERIX_PROPERTIES_ACCESSOR = new AsterixPropertiesAccessor();
+        } catch (AsterixException e) {
+            throw new ExceptionInInitializerError(e);
+        }
+    }
+
+    private static final int METADATA_IO_DEVICE_ID = 0;
+
+    private ILSMMergePolicyFactory metadataMergePolicyFactory;
+    private final INCApplicationContext ncApplicationContext;
+
+    private AsterixCompilerProperties compilerProperties;
+    private AsterixExternalProperties externalProperties;
+    private AsterixMetadataProperties metadataProperties;
+    private AsterixStorageProperties storageProperties;
+    private AsterixTransactionProperties txnProperties;
+    private AsterixFeedProperties feedProperties;
+
+
+    private AsterixThreadExecutor threadExecutor;
+    private DatasetLifecycleManager indexLifecycleManager;
+    private IFileMapManager fileMapManager;
+    private IBufferCache bufferCache;
+    private ITransactionSubsystem txnSubsystem;
+
+    private ILSMIOOperationScheduler lsmIOScheduler;
+    private ILocalResourceRepository localResourceRepository;
+    private ResourceIdFactory resourceIdFactory;
+    private IIOManager ioManager;
+    private boolean isShuttingdown;
+
+    private IFeedManager feedManager;
+
+    public AsterixAppRuntimeContext(INCApplicationContext ncApplicationContext) throws AsterixException {
+        this.ncApplicationContext = ncApplicationContext;
+        compilerProperties = new AsterixCompilerProperties(ASTERIX_PROPERTIES_ACCESSOR);
+        externalProperties = new AsterixExternalProperties(ASTERIX_PROPERTIES_ACCESSOR);
+        metadataProperties = new AsterixMetadataProperties(ASTERIX_PROPERTIES_ACCESSOR);
+        storageProperties = new AsterixStorageProperties(ASTERIX_PROPERTIES_ACCESSOR);
+        txnProperties = new AsterixTransactionProperties(ASTERIX_PROPERTIES_ACCESSOR);
+        feedProperties = new AsterixFeedProperties(ASTERIX_PROPERTIES_ACCESSOR);
+    }
+
+    public void initialize() throws IOException, ACIDException, AsterixException {
+        Logger.getLogger("edu.uci.ics").setLevel(externalProperties.getLogLevel());
+
+        threadExecutor = new AsterixThreadExecutor(ncApplicationContext.getThreadFactory());
+        fileMapManager = new AsterixFileMapManager();
+        ICacheMemoryAllocator allocator = new HeapBufferAllocator();
+        IPageCleanerPolicy pcp = new DelayPageCleanerPolicy(600000);
+        ioManager = ncApplicationContext.getRootContext().getIOManager();
+        IPageReplacementStrategy prs = new ClockPageReplacementStrategy(allocator,
+                storageProperties.getBufferCachePageSize(), storageProperties.getBufferCacheNumPages());
+        bufferCache = new BufferCache(ioManager, prs, pcp, fileMapManager,
+                storageProperties.getBufferCacheMaxOpenFiles(), ncApplicationContext.getThreadFactory());
+
+        AsynchronousScheduler.INSTANCE.init(ncApplicationContext.getThreadFactory());
+        lsmIOScheduler = AsynchronousScheduler.INSTANCE;
+
+        metadataMergePolicyFactory = new PrefixMergePolicyFactory();
+
+        ILocalResourceRepositoryFactory persistentLocalResourceRepositoryFactory = new PersistentLocalResourceRepositoryFactory(
+                ioManager);
+        localResourceRepository = (PersistentLocalResourceRepository) persistentLocalResourceRepositoryFactory
+                .createRepository();
+        resourceIdFactory = (new ResourceIdFactoryProvider(localResourceRepository)).createResourceIdFactory();
+
+        IAsterixAppRuntimeContextProvider asterixAppRuntimeContextProvider = new AsterixAppRuntimeContextProdiverForRecovery(
+                this);
+        txnSubsystem = new TransactionSubsystem(ncApplicationContext.getNodeId(), asterixAppRuntimeContextProvider,
+                txnProperties);
+        
+        indexLifecycleManager = new DatasetLifecycleManager(storageProperties, localResourceRepository,
+                MetadataPrimaryIndexes.FIRST_AVAILABLE_USER_DATASET_ID,(LogManager)txnSubsystem.getLogManager());
+        
+        isShuttingdown = false;
+
+        feedManager = new FeedManager(ncApplicationContext.getNodeId(), feedProperties,
+                compilerProperties.getFrameSize());
+
+        // The order of registration is important. The buffer cache must registered before recovery and transaction managers.
+        ILifeCycleComponentManager lccm = ncApplicationContext.getLifeCycleComponentManager();
+        lccm.register((ILifeCycleComponent) bufferCache);
+        lccm.register((ILifeCycleComponent) txnSubsystem.getTransactionManager());
+        lccm.register((ILifeCycleComponent) txnSubsystem.getLogManager());
+        lccm.register((ILifeCycleComponent) indexLifecycleManager);
+        lccm.register((ILifeCycleComponent) txnSubsystem.getLockManager());
+        lccm.register((ILifeCycleComponent) txnSubsystem.getRecoveryManager());
+    }
+
+    public boolean isShuttingdown() {
+        return isShuttingdown;
+    }
+
+    public void setShuttingdown(boolean isShuttingdown) {
+        this.isShuttingdown = isShuttingdown;
+    }
+
+    public void deinitialize() throws HyracksDataException {
+    }
+
+    public IBufferCache getBufferCache() {
+        return bufferCache;
+    }
+
+    public IFileMapProvider getFileMapManager() {
+        return fileMapManager;
+    }
+
+    public ITransactionSubsystem getTransactionSubsystem() {
+        return txnSubsystem;
+    }
+
+    public IIndexLifecycleManager getIndexLifecycleManager() {
+        return indexLifecycleManager;
+    }
+
+    public double getBloomFilterFalsePositiveRate() {
+        return storageProperties.getBloomFilterFalsePositiveRate();
+    }
+
+    public ILSMIOOperationScheduler getLSMIOScheduler() {
+        return lsmIOScheduler;
+    }
+
+    public ILocalResourceRepository getLocalResourceRepository() {
+        return localResourceRepository;
+    }
+
+    public ResourceIdFactory getResourceIdFactory() {
+        return resourceIdFactory;
+    }
+
+    public IIOManager getIOManager() {
+        return ioManager;
+    }
+
+    public int getMetaDataIODeviceId() {
+        return METADATA_IO_DEVICE_ID;
+    }
+
+    @Override
+    public AsterixStorageProperties getStorageProperties() {
+        return storageProperties;
+    }
+
+    @Override
+    public AsterixTransactionProperties getTransactionProperties() {
+        return txnProperties;
+    }
+
+    @Override
+    public AsterixCompilerProperties getCompilerProperties() {
+        return compilerProperties;
+    }
+
+    @Override
+    public AsterixMetadataProperties getMetadataProperties() {
+        return metadataProperties;
+    }
+
+    @Override
+    public AsterixExternalProperties getExternalProperties() {
+        return externalProperties;
+    }
+    
+    @Override
+    public AsterixFeedProperties getFeedProperties() {
+        return feedProperties;
+    }
+
+    @Override
+    public List<IVirtualBufferCache> getVirtualBufferCaches(int datasetID) {
+        return indexLifecycleManager.getVirtualBufferCaches(datasetID);
+    }
+
+    @Override
+    public ILSMOperationTracker getLSMBTreeOperationTracker(int datasetID) {
+        return indexLifecycleManager.getOperationTracker(datasetID);
+    }
+
+    @Override
+    public AsterixThreadExecutor getThreadExecutor() {
+        return threadExecutor;
+    }
+
+    public ILSMMergePolicyFactory getMetadataMergePolicyFactory() {
+        return metadataMergePolicyFactory;
+    }
+
+    @Override
+    public IFeedManager getFeedManager() {
+        return feedManager;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixAppRuntimeContextProdiverForRecovery.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixAppRuntimeContextProdiverForRecovery.java b/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixAppRuntimeContextProdiverForRecovery.java
new file mode 100644
index 0000000..938a8b9
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixAppRuntimeContextProdiverForRecovery.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.common;
+
+import java.util.List;
+
+import edu.uci.ics.asterix.common.api.AsterixThreadExecutor;
+import edu.uci.ics.asterix.common.api.IAsterixAppRuntimeContext;
+import edu.uci.ics.asterix.common.transactions.IAsterixAppRuntimeContextProvider;
+import edu.uci.ics.asterix.common.transactions.ITransactionSubsystem;
+import edu.uci.ics.hyracks.api.io.IIOManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.IVirtualBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepository;
+import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactory;
+
+public class AsterixAppRuntimeContextProdiverForRecovery implements IAsterixAppRuntimeContextProvider {
+
+    private final AsterixAppRuntimeContext asterixAppRuntimeContext;
+
+    public AsterixAppRuntimeContextProdiverForRecovery(AsterixAppRuntimeContext asterixAppRuntimeContext) {
+        this.asterixAppRuntimeContext = asterixAppRuntimeContext;
+    }
+
+    @Override
+    public IBufferCache getBufferCache() {
+        return asterixAppRuntimeContext.getBufferCache();
+    }
+
+    @Override
+    public IFileMapProvider getFileMapManager() {
+        return asterixAppRuntimeContext.getFileMapManager();
+    }
+
+    @Override
+    public ITransactionSubsystem getTransactionSubsystem() {
+        return asterixAppRuntimeContext.getTransactionSubsystem();
+    }
+
+    @Override
+    public IIndexLifecycleManager getIndexLifecycleManager() {
+        return asterixAppRuntimeContext.getIndexLifecycleManager();
+    }
+
+    @Override
+    public double getBloomFilterFalsePositiveRate() {
+        return asterixAppRuntimeContext.getBloomFilterFalsePositiveRate();
+    }
+
+    @Override
+    public ILSMIOOperationScheduler getLSMIOScheduler() {
+        return asterixAppRuntimeContext.getLSMIOScheduler();
+    }
+
+    @Override
+    public ILocalResourceRepository getLocalResourceRepository() {
+        return asterixAppRuntimeContext.getLocalResourceRepository();
+    }
+
+    @Override
+    public ResourceIdFactory getResourceIdFactory() {
+        return asterixAppRuntimeContext.getResourceIdFactory();
+    }
+
+    @Override
+    public IIOManager getIOManager() {
+        return asterixAppRuntimeContext.getIOManager();
+    }
+
+    @Override
+    public List<IVirtualBufferCache> getVirtualBufferCaches(int datasetID) {
+        return asterixAppRuntimeContext.getVirtualBufferCaches(datasetID);
+    }
+
+    @Override
+    public ILSMOperationTracker getLSMBTreeOperationTracker(int datasetID) {
+        return asterixAppRuntimeContext.getLSMBTreeOperationTracker(datasetID);
+    }
+
+    @Override
+    public IAsterixAppRuntimeContext getAppContext() {
+        return asterixAppRuntimeContext;
+    }
+
+    @Override
+    public AsterixThreadExecutor getThreadExecutor() {
+        return asterixAppRuntimeContext.getThreadExecutor();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixClientConfig.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixClientConfig.java b/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixClientConfig.java
new file mode 100644
index 0000000..9c3223e
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixClientConfig.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.common;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.kohsuke.args4j.Argument;
+import org.kohsuke.args4j.Option;
+
+public class AsterixClientConfig {
+    @Option(name = "-optimize", usage = "Turns compiler optimizations on (if set to true) or off (if set to false). It is true by default.")
+    public String optimize = "true";
+
+    @Option(name = "-only-physical", usage = "Prints only the physical annotations, not the entire operators. It is false by default.")
+    public String onlyPhysical = "false";
+
+    @Option(name = "-execute", usage = "Executes the job produced by the compiler. It is false by default.")
+    public String execute = "false";
+
+    @Option(name = "-hyracks-job", usage = "Generates and prints the Hyracks job. It is false by default.")
+    public String hyracksJob = "false";
+
+    @Option(name = "-hyracks-port", usage = "The port used to connect to the Hyracks server.")
+    public int hyracksPort = AsterixHyracksIntegrationUtil.DEFAULT_HYRACKS_CC_CLIENT_PORT;
+
+    @Argument
+    private List<String> arguments = new ArrayList<String>();
+
+    public List<String> getArguments() {
+        return arguments;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/34d81630/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java b/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
new file mode 100644
index 0000000..91aa897
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
@@ -0,0 +1,155 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.api.common;
+
+import java.io.File;
+import java.util.EnumSet;
+
+import edu.uci.ics.asterix.common.config.GlobalConfig;
+import edu.uci.ics.asterix.hyracks.bootstrap.CCApplicationEntryPoint;
+import edu.uci.ics.asterix.hyracks.bootstrap.NCApplicationEntryPoint;
+import edu.uci.ics.hyracks.api.client.HyracksConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.job.JobFlag;
+import edu.uci.ics.hyracks.api.job.JobId;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
+import edu.uci.ics.hyracks.control.common.controllers.CCConfig;
+import edu.uci.ics.hyracks.control.common.controllers.NCConfig;
+import edu.uci.ics.hyracks.control.nc.NodeControllerService;
+
+public class AsterixHyracksIntegrationUtil {
+
+    public static final int NODES = 2;
+    public static final int PARTITONS = 2;
+
+    public static final int DEFAULT_HYRACKS_CC_CLIENT_PORT = 1098;
+
+    public static final int DEFAULT_HYRACKS_CC_CLUSTER_PORT = 1099;
+
+    private static ClusterControllerService cc;
+    private static NodeControllerService[] ncs = new NodeControllerService[NODES];
+    private static IHyracksClientConnection hcc;
+
+    public static void init() throws Exception {
+        CCConfig ccConfig = new CCConfig();
+        ccConfig.clusterNetIpAddress = "127.0.0.1";
+        ccConfig.clientNetIpAddress = "127.0.0.1";
+        ccConfig.clientNetPort = DEFAULT_HYRACKS_CC_CLIENT_PORT;
+        ccConfig.clusterNetPort = DEFAULT_HYRACKS_CC_CLUSTER_PORT;
+        ccConfig.defaultMaxJobAttempts = 0;
+        ccConfig.resultTTL = 30000;
+        ccConfig.resultSweepThreshold = 1000;
+        ccConfig.appCCMainClass = CCApplicationEntryPoint.class.getName();
+        // ccConfig.useJOL = true;
+        cc = new ClusterControllerService(ccConfig);
+        cc.start();
+
+        int n = 0;
+        for (String ncName : getNcNames()) {
+            NCConfig ncConfig1 = new NCConfig();
+            ncConfig1.ccHost = "localhost";
+            ncConfig1.ccPort = DEFAULT_HYRACKS_CC_CLUSTER_PORT;
+            ncConfig1.clusterNetIPAddress = "127.0.0.1";
+            ncConfig1.dataIPAddress = "127.0.0.1";
+            ncConfig1.resultIPAddress = "127.0.0.1";
+            ncConfig1.nodeId = ncName;
+            ncConfig1.resultTTL = 30000;
+            ncConfig1.resultSweepThreshold = 1000;
+            for (int p = 0; p < PARTITONS; ++p) {
+                if (p == 0) {
+                    ncConfig1.ioDevices = System.getProperty("java.io.tmpdir") + File.separator + ncConfig1.nodeId
+                            + "/iodevice" + p;
+                } else {
+                    ncConfig1.ioDevices += "," + System.getProperty("java.io.tmpdir") + File.separator
+                            + ncConfig1.nodeId + "/iodevice" + p;
+                }
+            }
+            ncConfig1.appNCMainClass = NCApplicationEntryPoint.class.getName();
+            ncs[n] = new NodeControllerService(ncConfig1);
+            ncs[n].start();
+            ++n;
+        }
+
+        hcc = new HyracksConnection(cc.getConfig().clientNetIpAddress, cc.getConfig().clientNetPort);
+    }
+
+    public static String[] getNcNames() {
+        String[] names = new String[NODES];
+        for (int n = 0; n < NODES; ++n) {
+            names[n] = "nc" + (n + 1);
+        }
+        return names;
+    }
+
+    public static String[] getDataDirs() {
+        String[] names = new String[NODES];
+        for (int n = 0; n < NODES; ++n) {
+            names[n] = "nc" + (n + 1) + "data";
+        }
+        return names;
+    }
+
+    public static IHyracksClientConnection getHyracksClientConnection() {
+        return hcc;
+    }
+
+    public static void deinit() throws Exception {
+        for (int n = 0; n < ncs.length; ++n) {
+            if (ncs[n] != null)
+                ncs[n].stop();
+
+        }
+        if (cc != null)
+            cc.stop();
+    }
+
+    public static void runJob(JobSpecification spec) throws Exception {
+        GlobalConfig.ASTERIX_LOGGER.info(spec.toJSON().toString());
+        JobId jobId = hcc.startJob(spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
+        GlobalConfig.ASTERIX_LOGGER.info(jobId.toString());
+        hcc.waitForCompletion(jobId);
+    }
+
+    /**
+     * main method to run a simple 2 node cluster in-process
+     * suggested VM arguments: <code>-enableassertions -Xmx2048m -Dfile.encoding=UTF-8</code>
+     *
+     * @param args
+     *            unused
+     */
+    public static void main(String[] args) {
+        Runtime.getRuntime().addShutdownHook(new Thread() {
+            public void run() {
+                try {
+                    deinit();
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+            }
+        });
+        try {
+            System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, "asterix-build-configuration.xml");
+
+            init();
+            while (true) {
+                Thread.sleep(10000);
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
+
+}