You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@asterixdb.apache.org by am...@apache.org on 2016/01/14 21:31:55 UTC

[01/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Repository: incubator-asterixdb
Updated Branches:
  refs/heads/master dace5f2f6 -> e800e6d5f


http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/SocketMessageListener.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/SocketMessageListener.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/SocketMessageListener.java
deleted file mode 100644
index ef1d242..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/SocketMessageListener.java
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.ServerSocket;
-import java.net.Socket;
-import java.nio.CharBuffer;
-import java.util.concurrent.Executor;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.feeds.api.IMessageReceiver;
-
-/**
- * Listens for messages at a configured port and redirects them to a
- * an instance of {@code IMessageReceiver}.
- * Messages may arrive in parallel from multiple senders. Each sender is handled by
- * a respective instance of {@code ClientHandler}.
- */
-public class SocketMessageListener {
-
-    private static final Logger LOGGER = Logger.getLogger(SocketMessageListener.class.getName());
-
-    private final IMessageReceiver<String> messageReceiver;
-    private final MessageListenerServer listenerServer;
-
-    private ExecutorService executorService = Executors.newFixedThreadPool(10);
-
-    public SocketMessageListener(int port, IMessageReceiver<String> messageReceiver) {
-        this.messageReceiver = messageReceiver;
-        this.listenerServer = new MessageListenerServer(port, messageReceiver);
-    }
-
-    public void stop() throws IOException {
-        listenerServer.stop();
-        messageReceiver.close(false);
-        if (!executorService.isShutdown()) {
-            executorService.shutdownNow();
-        }
-    }
-
-    public void start() {
-        messageReceiver.start();
-        executorService.execute(listenerServer);
-    }
-
-    private static class MessageListenerServer implements Runnable {
-
-        private final int port;
-        private final IMessageReceiver<String> messageReceiver;
-        private ServerSocket server;
-        private final Executor executor;
-
-        public MessageListenerServer(int port, IMessageReceiver<String> messageReceiver) {
-            this.port = port;
-            this.messageReceiver = messageReceiver;
-            this.executor = Executors.newCachedThreadPool();
-        }
-
-        public void stop() throws IOException {
-            server.close();
-        }
-
-        @Override
-        public void run() {
-            Socket client = null;
-            try {
-                server = new ServerSocket(port);
-                while (true) {
-                    client = server.accept();
-                    ClientHandler handler = new ClientHandler(client, messageReceiver);
-                    executor.execute(handler);
-                }
-            } catch (Exception e) {
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("Unable to start Message listener" + server);
-                }
-            } finally {
-                if (server != null) {
-                    try {
-                        server.close();
-                    } catch (Exception e) {
-                        e.printStackTrace();
-                    }
-                }
-            }
-        }
-
-        private static class ClientHandler implements Runnable {
-
-            private static final char EOL = (char) "\n".getBytes()[0];
-
-            private final Socket client;
-            private final IMessageReceiver<String> messageReceiver;
-
-            public ClientHandler(Socket client, IMessageReceiver<String> messageReceiver) {
-                this.client = client;
-                this.messageReceiver = messageReceiver;
-            }
-
-            @Override
-            public void run() {
-                try {
-                    InputStream in = client.getInputStream();
-                    CharBuffer buffer = CharBuffer.allocate(5000);
-                    char ch;
-                    while (true) {
-                        ch = (char) in.read();
-                        if ((ch) == -1) {
-                            break;
-                        }
-                        while (ch != EOL) {
-                            buffer.put(ch);
-                            ch = (char) in.read();
-                        }
-                        buffer.flip();
-                        String s = new String(buffer.array(), 0, buffer.limit());
-                        messageReceiver.sendMessage(s + "\n");
-                        buffer.position(0);
-                        buffer.limit(5000);
-                    }
-                } catch (Exception e) {
-                    e.printStackTrace();
-                    if (LOGGER.isLoggable(Level.WARNING)) {
-                        LOGGER.warning("Unable to process mesages from client" + client);
-                    }
-                } finally {
-                    if (client != null) {
-                        try {
-                            client.close();
-                        } catch (Exception e) {
-                            e.printStackTrace();
-                        }
-                    }
-                }
-            }
-        }
-
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/TerminateDataFlowMessage.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/TerminateDataFlowMessage.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/TerminateDataFlowMessage.java
deleted file mode 100644
index aaf20c5..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/TerminateDataFlowMessage.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.asterix.metadata.feeds;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedConstants;
-import org.apache.asterix.common.feeds.message.FeedMessage;
-
-public class TerminateDataFlowMessage extends FeedMessage {
-
-    private static final long serialVersionUID = 1L;
-
-    private final FeedConnectionId connectionId;
-
-    public TerminateDataFlowMessage(FeedConnectionId connectionId) {
-        super(MessageType.TERMINATE_FLOW);
-        this.connectionId = connectionId;
-    }
-
-    @Override
-    public JSONObject toJSON() throws JSONException {
-        JSONObject obj = new JSONObject();
-        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
-        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
-        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
-        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
-        return obj;
-    }
-
-    public FeedConnectionId getConnectionId() {
-        return connectionId;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/XAQLFeedMessage.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/XAQLFeedMessage.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/XAQLFeedMessage.java
deleted file mode 100644
index d6f7e0a..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/XAQLFeedMessage.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedConstants;
-import org.apache.asterix.common.feeds.message.FeedMessage;
-
-/**
- * A feed control message indicating the need to execute a give AQL.
- */
-public class XAQLFeedMessage extends FeedMessage {
-
-    private static final long serialVersionUID = 1L;
-
-    private final String aql;
-    private final FeedConnectionId connectionId;
-
-    public XAQLFeedMessage(FeedConnectionId connectionId, String aql) {
-        super(MessageType.XAQL);
-        this.connectionId = connectionId;
-        this.aql = aql;
-    }
-
-    @Override
-    public String toString() {
-        return messageType.name() + " " + connectionId + " [" + aql + "] ";
-    }
-
-    public FeedConnectionId getConnectionId() {
-        return connectionId;
-    }
-
-    public String getAql() {
-        return aql;
-    }
-
-    @Override
-    public JSONObject toJSON() throws JSONException {
-        JSONObject obj = new JSONObject();
-        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
-        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
-        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
-        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
-        obj.put(FeedConstants.MessageConstants.AQL, aql);
-        return obj;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtils.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtils.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtils.java
index f7a0e5d..f011f04 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtils.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtils.java
@@ -26,6 +26,7 @@ import java.util.Map;
 
 import org.apache.asterix.builders.IARecordBuilder;
 import org.apache.asterix.builders.RecordBuilder;
+import org.apache.asterix.common.config.MetadataConstants;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.context.CorrelatedPrefixMergePolicyFactory;
 import org.apache.asterix.common.exceptions.AsterixException;
@@ -35,7 +36,6 @@ import org.apache.asterix.formats.nontagged.AqlTypeTraitProvider;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
-import org.apache.asterix.metadata.bootstrap.MetadataConstants;
 import org.apache.asterix.metadata.entities.CompactionPolicy;
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.entities.ExternalDatasetDetails;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java
index 7c32bdf..b6f3c9e 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java
@@ -23,40 +23,34 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.asterix.common.cluster.ClusterPartition;
+import org.apache.asterix.common.config.MetadataConstants;
+import org.apache.asterix.common.utils.StoragePathUtil;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
-import org.apache.asterix.metadata.bootstrap.MetadataConstants;
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.om.util.AsterixClusterProperties;
-import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.common.utils.Pair;
-import org.apache.hyracks.api.io.FileReference;
-import org.apache.hyracks.dataflow.std.file.ConstantFileSplitProvider;
 import org.apache.hyracks.dataflow.std.file.FileSplit;
 import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
 
 public class SplitsAndConstraintsUtil {
 
-    public static final String PARTITION_DIR_PREFIX = "partition_";
-    public static final String TEMP_DATASETS_STORAGE_FOLDER = "temp";
-    public static final String DATASET_INDEX_NAME_SEPARATOR = "_idx_";
-
     private static FileSplit[] splitsForDataverse(String dataverseName) {
         File relPathFile = new File(dataverseName);
         List<FileSplit> splits = new ArrayList<FileSplit>();
-        //get all partitions
+        // get all partitions
         ClusterPartition[] clusterPartition = AsterixClusterProperties.INSTANCE.getClusterPartitons();
         String storageDirName = AsterixClusterProperties.INSTANCE.getStorageDirectoryName();
         for (int j = 0; j < clusterPartition.length; j++) {
             int nodeParitions = AsterixClusterProperties.INSTANCE
                     .getNodePartitionsCount(clusterPartition[j].getNodeId());
             for (int i = 0; i < nodeParitions; i++) {
-                File f = new File(prepareStoragePartitionPath(storageDirName, clusterPartition[i].getPartitionId())
-                        + File.separator + relPathFile);
-                splits.add(getFileSplitForClusterPartition(clusterPartition[j], f));
+                File f = new File(StoragePathUtil.prepareStoragePartitionPath(storageDirName,
+                        clusterPartition[i].getPartitionId()) + File.separator + relPathFile);
+                splits.add(StoragePathUtil.getFileSplitForClusterPartition(clusterPartition[j], f));
             }
         }
         return splits.toArray(new FileSplit[] {});
@@ -65,7 +59,8 @@ public class SplitsAndConstraintsUtil {
     public static FileSplit[] splitsForDataset(MetadataTransactionContext mdTxnCtx, String dataverseName,
             String datasetName, String targetIdxName, boolean temp) throws AlgebricksException {
         try {
-            File relPathFile = new File(prepareDataverseIndexName(dataverseName, datasetName, targetIdxName));
+            File relPathFile = new File(
+                    StoragePathUtil.prepareDataverseIndexName(dataverseName, datasetName, targetIdxName));
             Dataset dataset = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
             List<String> nodeGroup = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, dataset.getNodeGroupName())
                     .getNodeNames();
@@ -78,18 +73,18 @@ public class SplitsAndConstraintsUtil {
             for (String nd : nodeGroup) {
                 int numPartitions = AsterixClusterProperties.INSTANCE.getNodePartitionsCount(nd);
                 ClusterPartition[] nodePartitions = AsterixClusterProperties.INSTANCE.getNodePartitions(nd);
-                //currently this case is never executed since the metadata group doesn't exists
+                // currently this case is never executed since the metadata group doesn't exists
                 if (dataset.getNodeGroupName().compareTo(MetadataConstants.METADATA_NODEGROUP_NAME) == 0) {
                     numPartitions = 1;
                 }
 
                 for (int k = 0; k < numPartitions; k++) {
-                    //format: 'storage dir name'/partition_#/dataverse/dataset_idx_index
-                    //temp format: 'storage dir name'/temp/partition_#/dataverse/dataset_idx_index
-                    File f = new File(prepareStoragePartitionPath(
-                            storageDirName + (temp ? (File.separator + TEMP_DATASETS_STORAGE_FOLDER) : ""),
-                            nodePartitions[k].getPartitionId()) + File.separator + relPathFile);
-                    splits.add(getFileSplitForClusterPartition(nodePartitions[k], f));
+                    // format: 'storage dir name'/partition_#/dataverse/dataset_idx_index
+                    File f = new File(StoragePathUtil.prepareStoragePartitionPath(storageDirName,
+                            nodePartitions[k].getPartitionId())
+                            + (temp ? (File.separator + StoragePathUtil.TEMP_DATASETS_STORAGE_FOLDER) : "")
+                            + File.separator + relPathFile);
+                    splits.add(StoragePathUtil.getFileSplitForClusterPartition(nodePartitions[k], f));
                 }
             }
             return splits.toArray(new FileSplit[] {});
@@ -101,7 +96,8 @@ public class SplitsAndConstraintsUtil {
     private static FileSplit[] splitsForFilesIndex(MetadataTransactionContext mdTxnCtx, String dataverseName,
             String datasetName, String targetIdxName, boolean create) throws AlgebricksException {
         try {
-            File relPathFile = new File(prepareDataverseIndexName(dataverseName, datasetName, targetIdxName));
+            File relPathFile = new File(
+                    StoragePathUtil.prepareDataverseIndexName(dataverseName, datasetName, targetIdxName));
             Dataset dataset = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
             List<String> nodeGroup = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, dataset.getNodeGroupName())
                     .getNodeNames();
@@ -111,21 +107,20 @@ public class SplitsAndConstraintsUtil {
 
             List<FileSplit> splits = new ArrayList<FileSplit>();
             for (String nodeId : nodeGroup) {
-                //get node partitions
+                // get node partitions
                 ClusterPartition[] nodePartitions = AsterixClusterProperties.INSTANCE.getNodePartitions(nodeId);
                 String storageDirName = AsterixClusterProperties.INSTANCE.getStorageDirectoryName();
                 int firstPartition = 0;
                 if (create) {
                     // Only the first partition when create
-                    File f = new File(
-                            prepareStoragePartitionPath(storageDirName, nodePartitions[firstPartition].getPartitionId())
-                                    + File.separator + relPathFile);
-                    splits.add(getFileSplitForClusterPartition(nodePartitions[firstPartition], f));
+                    File f = new File(StoragePathUtil.prepareStoragePartitionPath(storageDirName,
+                            nodePartitions[firstPartition].getPartitionId()) + File.separator + relPathFile);
+                    splits.add(StoragePathUtil.getFileSplitForClusterPartition(nodePartitions[firstPartition], f));
                 } else {
                     for (int k = 0; k < nodePartitions.length; k++) {
-                        File f = new File(prepareStoragePartitionPath(storageDirName,
+                        File f = new File(StoragePathUtil.prepareStoragePartitionPath(storageDirName,
                                 nodePartitions[firstPartition].getPartitionId()) + File.separator + relPathFile);
-                        splits.add(getFileSplitForClusterPartition(nodePartitions[firstPartition], f));
+                        splits.add(StoragePathUtil.getFileSplitForClusterPartition(nodePartitions[firstPartition], f));
                     }
                 }
             }
@@ -138,37 +133,13 @@ public class SplitsAndConstraintsUtil {
     public static Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitProviderAndPartitionConstraintsForDataverse(
             String dataverse) {
         FileSplit[] splits = splitsForDataverse(dataverse);
-        return splitProviderAndPartitionConstraints(splits);
+        return StoragePathUtil.splitProviderAndPartitionConstraints(splits);
     }
 
     public static Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitProviderAndPartitionConstraintsForFilesIndex(
             MetadataTransactionContext mdTxnCtx, String dataverseName, String datasetName, String targetIdxName,
             boolean create) throws AlgebricksException {
         FileSplit[] splits = splitsForFilesIndex(mdTxnCtx, dataverseName, datasetName, targetIdxName, create);
-        return splitProviderAndPartitionConstraints(splits);
-    }
-
-    public static Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitProviderAndPartitionConstraints(
-            FileSplit[] splits) {
-        IFileSplitProvider splitProvider = new ConstantFileSplitProvider(splits);
-        String[] loc = new String[splits.length];
-        for (int p = 0; p < splits.length; p++) {
-            loc[p] = splits[p].getNodeName();
-        }
-        AlgebricksPartitionConstraint pc = new AlgebricksAbsolutePartitionConstraint(loc);
-        return new Pair<IFileSplitProvider, AlgebricksPartitionConstraint>(splitProvider, pc);
-    }
-
-    private static FileSplit getFileSplitForClusterPartition(ClusterPartition partition, File relativeFile) {
-        return new FileSplit(partition.getActiveNodeId(), new FileReference(relativeFile), partition.getIODeviceNum(),
-                partition.getPartitionId());
-    }
-
-    public static String prepareStoragePartitionPath(String storageDirName, int partitonId) {
-        return storageDirName + File.separator + PARTITION_DIR_PREFIX + partitonId;
-    }
-
-    private static String prepareDataverseIndexName(String dataverseName, String datasetName, String idxName) {
-        return dataverseName + File.separator + datasetName + DATASET_INDEX_NAME_SEPARATOR + idxName;
+        return StoragePathUtil.splitProviderAndPartitionConstraints(splits);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-om/src/main/java/org/apache/asterix/om/util/AsterixClusterProperties.java
----------------------------------------------------------------------
diff --git a/asterix-om/src/main/java/org/apache/asterix/om/util/AsterixClusterProperties.java b/asterix-om/src/main/java/org/apache/asterix/om/util/AsterixClusterProperties.java
index 95eea63..80008c5 100644
--- a/asterix-om/src/main/java/org/apache/asterix/om/util/AsterixClusterProperties.java
+++ b/asterix-om/src/main/java/org/apache/asterix/om/util/AsterixClusterProperties.java
@@ -77,7 +77,7 @@ public class AsterixClusterProperties {
         } else {
             cluster = null;
         }
-        //if this is the CC process
+        // if this is the CC process
         if (AsterixAppContextInfo.getInstance() != null) {
             if (AsterixAppContextInfo.getInstance().getCCApplicationContext() != null) {
                 node2PartitionsMap = AsterixAppContextInfo.getInstance().getMetadataProperties().getNodePartitions();
@@ -94,11 +94,11 @@ public class AsterixClusterProperties {
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info(" Removing configuration parameters for node id " + nodeId);
         }
-        //TODO implement fault tolerance as follows:
-        //1. collect the partitions of the failed NC
-        //2. For each partition, request a remote replica to take over. 
-        //3. wait until each remote replica completes the recovery for the lost partitions
-        //4. update the cluster state
+        // TODO implement fault tolerance as follows:
+        // 1. collect the partitions of the failed NC
+        // 2. For each partition, request a remote replica to take over.
+        // 3. wait until each remote replica completes the recovery for the lost partitions
+        // 4. update the cluster state
     }
 
     public synchronized void addNCConfiguration(String nodeId, Map<String, String> configuration) {
@@ -111,10 +111,10 @@ public class AsterixClusterProperties {
 
     private synchronized void updateNodePartitions(String nodeId, boolean added) {
         ClusterPartition[] nodePartitions = node2PartitionsMap.get(nodeId);
-        //if this isn't a storage node, it will not have cluster partitions
+        // if this isn't a storage node, it will not have cluster partitions
         if (nodePartitions != null) {
             for (ClusterPartition p : nodePartitions) {
-                //set the active node for this node's partitions
+                // set the active node for this node's partitions
                 p.setActive(added);
                 if (added) {
                     p.setActiveNodeId(nodeId);
@@ -135,14 +135,13 @@ public class AsterixClusterProperties {
                 return;
             }
         }
-        //if all storage partitions are active, then the cluster is active
+        // if all storage partitions are active, then the cluster is active
         state = ClusterState.ACTIVE;
         LOGGER.info("Cluster is now ACTIVE");
     }
 
     /**
      * Returns the number of IO devices configured for a Node Controller
-     *
      * @param nodeId
      *            unique identifier of the Node Controller
      * @return number of IO devices. -1 if the node id is not valid. A node id
@@ -156,7 +155,6 @@ public class AsterixClusterProperties {
 
     /**
      * Returns the IO devices configured for a Node Controller
-     *
      * @param nodeId
      *            unique identifier of the Node Controller
      * @return a list of IO devices. null if node id is not valid. A node id is not valid
@@ -223,7 +221,7 @@ public class AsterixClusterProperties {
 
     public static boolean isClusterActive() {
         if (AsterixClusterProperties.INSTANCE.getCluster() == null) {
-            //this is a virtual cluster
+            // this is a virtual cluster
             return true;
         }
         return AsterixClusterProperties.INSTANCE.getState() == ClusterState.ACTIVE;
@@ -256,7 +254,7 @@ public class AsterixClusterProperties {
         if (cluster != null) {
             return cluster.getStore();
         }
-        //virtual cluster without cluster config file
+        // virtual cluster without cluster config file
         return DEFAULT_STORAGE_DIR_NAME;
     }
 }


[08/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/SocketClientAdapter.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/SocketClientAdapter.java b/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/SocketClientAdapter.java
deleted file mode 100644
index db38c12..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/SocketClientAdapter.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.runtime;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.OutputStream;
-import java.net.Socket;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.feeds.api.IDataSourceAdapter;
-import org.apache.hyracks.api.comm.IFrameWriter;
-
-public class SocketClientAdapter implements IDataSourceAdapter {
-
-    private static final long serialVersionUID = 1L;
-
-    private static final Logger LOGGER = Logger.getLogger(SocketClientAdapter.class.getName());
-
-    private static final String LOCALHOST = "127.0.0.1";
-
-    private static final long RECONNECT_PERIOD = 2000;
-
-    private final String localFile;
-
-    private final int port;
-
-    private boolean continueStreaming = true;
-
-    public SocketClientAdapter(Integer port, String localFile) {
-        this.localFile = localFile;
-        this.port = port;
-    }
-
-    @Override
-    public void start(int partition, IFrameWriter writer) throws Exception {
-        Socket socket = waitForReceiver();
-        OutputStream os = socket.getOutputStream();
-        FileInputStream fin = new FileInputStream(new File(localFile));
-        byte[] chunk = new byte[1024];
-        int read;
-        try {
-            while (continueStreaming) {
-                read = fin.read(chunk);
-                if (read > 0) {
-                    os.write(chunk, 0, read);
-                } else {
-                    break;
-                }
-            }
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Finished streaming file " + localFile + "to port [" + port + "]");
-            }
-
-        } finally {
-            socket.close();
-            fin.close();
-        }
-
-    }
-
-    private Socket waitForReceiver() throws Exception {
-        Socket socket = null;
-        while (socket == null) {
-            try {
-                socket = new Socket(LOCALHOST, port);
-            } catch (Exception e) {
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("Receiver not ready, would wait for " + (RECONNECT_PERIOD / 1000)
-                            + " seconds before reconnecting");
-                }
-                Thread.sleep(RECONNECT_PERIOD);
-            }
-        }
-        return socket;
-    }
-
-    @Override
-    public boolean stop() throws Exception {
-        continueStreaming = false;
-        return true;
-    }
-
-    @Override
-    public boolean handleException(Throwable e) {
-        return false;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/SocketClientAdapterFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/SocketClientAdapterFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/SocketClientAdapterFactory.java
deleted file mode 100644
index a1e90a8..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/SocketClientAdapterFactory.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.runtime;
-
-import java.util.Map;
-
-import org.apache.asterix.common.feeds.api.IDataSourceAdapter;
-import org.apache.asterix.external.api.IAdapterFactory;
-import org.apache.asterix.external.util.ExternalDataConstants;
-import org.apache.asterix.om.types.ARecordType;
-import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import org.apache.hyracks.algebricks.common.utils.Pair;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-
-public class SocketClientAdapterFactory implements IAdapterFactory {
-
-    private static final long serialVersionUID = 1L;
-
-    private ARecordType outputType;
-
-    private GenericSocketFeedAdapterFactory genericSocketAdapterFactory;
-
-    private String[] fileSplits;
-
-    public static final String KEY_FILE_SPLITS = "file_splits";
-
-    @Override
-    public void configure(Map<String, String> configuration, ARecordType outputType) throws Exception {
-        this.outputType = outputType;
-        String fileSplitsValue = configuration.get(KEY_FILE_SPLITS);
-        if (fileSplitsValue == null) {
-            throw new IllegalArgumentException(
-                    "File splits not specified. File split is specified as a comma separated list of paths");
-        }
-        fileSplits = fileSplitsValue.trim().split(",");
-        genericSocketAdapterFactory = new GenericSocketFeedAdapterFactory();
-        genericSocketAdapterFactory.configure(configuration, outputType);
-    }
-
-    @Override
-    public String getAlias() {
-        return ExternalDataConstants.ALIAS_SOCKET_CLIENT_ADAPTER;
-    }
-
-    @Override
-    public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
-        return genericSocketAdapterFactory.getPartitionConstraint();
-    }
-
-    @Override
-    public IDataSourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
-        Pair<String, Integer> socket = genericSocketAdapterFactory.getSockets().get(partition);
-        return new SocketClientAdapter(socket.second, fileSplits[partition]);
-    }
-
-    @Override
-    public ARecordType getAdapterOutputType() {
-        return outputType;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/TweetGenerator.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/TweetGenerator.java b/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/TweetGenerator.java
deleted file mode 100644
index b5fd454..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/TweetGenerator.java
+++ /dev/null
@@ -1,156 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.runtime;
-
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.external.runtime.DataGenerator.InitializationInfo;
-import org.apache.asterix.external.runtime.DataGenerator.TweetMessage;
-import org.apache.asterix.external.runtime.DataGenerator.TweetMessageIterator;
-
-public class TweetGenerator {
-
-    private static Logger LOGGER = Logger.getLogger(TweetGenerator.class.getName());
-
-    public static final String KEY_DURATION = "duration";
-    public static final String KEY_TPS = "tps";
-    public static final String KEY_VERBOSE = "verbose";
-    public static final String KEY_FIELDS = "fields";
-    public static final int INFINITY = 0;
-
-    private static final int DEFAULT_DURATION = INFINITY;
-
-    private int duration;
-    private TweetMessageIterator tweetIterator = null;
-    private int partition;
-    private long tweetCount = 0;
-    private int frameTweetCount = 0;
-    private int numFlushedTweets = 0;
-    private DataGenerator dataGenerator = null;
-    private ByteBuffer outputBuffer = ByteBuffer.allocate(32 * 1024);
-    private String[] fields;
-    private final List<OutputStream> subscribers;
-    private final Object lock = new Object();
-    private final List<OutputStream> subscribersForRemoval = new ArrayList<OutputStream>();
-
-    public TweetGenerator(Map<String, String> configuration, int partition) throws Exception {
-        this.partition = partition;
-        String value = configuration.get(KEY_DURATION);
-        this.duration = value != null ? Integer.parseInt(value) : DEFAULT_DURATION;
-        dataGenerator = new DataGenerator(new InitializationInfo());
-        tweetIterator = dataGenerator.new TweetMessageIterator(duration);
-        this.fields = configuration.get(KEY_FIELDS) != null ? configuration.get(KEY_FIELDS).split(",") : null;
-        this.subscribers = new ArrayList<OutputStream>();
-    }
-
-    private void writeTweetString(TweetMessage tweetMessage) throws IOException {
-        String tweet = tweetMessage.getAdmEquivalent(fields) + "\n";
-        System.out.println(tweet);
-        tweetCount++;
-        byte[] b = tweet.getBytes();
-        if (outputBuffer.position() + b.length > outputBuffer.limit()) {
-            flush();
-            numFlushedTweets += frameTweetCount;
-            frameTweetCount = 0;
-            outputBuffer.put(b);
-        } else {
-            outputBuffer.put(b);
-        }
-        frameTweetCount++;
-    }
-
-    private void flush() throws IOException {
-        outputBuffer.flip();
-        synchronized (lock) {
-            for (OutputStream os : subscribers) {
-                try {
-                    os.write(outputBuffer.array(), 0, outputBuffer.limit());
-                } catch (Exception e) {
-                    subscribersForRemoval.add(os);
-                }
-            }
-            if (!subscribersForRemoval.isEmpty()) {
-                subscribers.removeAll(subscribersForRemoval);
-                subscribersForRemoval.clear();
-            }
-        }
-        outputBuffer.position(0);
-        outputBuffer.limit(32 * 1024);
-    }
-
-    public boolean generateNextBatch(int numTweets) throws Exception {
-        boolean moreData = tweetIterator.hasNext();
-        if (!moreData) {
-            if (outputBuffer.position() > 0) {
-                flush();
-            }
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Reached end of batch. Tweet Count: [" + partition + "]" + tweetCount);
-            }
-            return false;
-        } else {
-            int count = 0;
-            while (count < numTweets) {
-                writeTweetString(tweetIterator.next());
-                count++;
-            }
-            return true;
-        }
-    }
-
-    public int getNumFlushedTweets() {
-        return numFlushedTweets;
-    }
-
-    public void registerSubscriber(OutputStream os) {
-        synchronized (lock) {
-            subscribers.add(os);
-        }
-    }
-
-    public void deregisterSubscribers(OutputStream os) {
-        synchronized (lock) {
-            subscribers.remove(os);
-        }
-    }
-
-    public void close() throws IOException {
-        synchronized (lock) {
-            for (OutputStream os : subscribers) {
-                os.close();
-            }
-        }
-    }
-
-    public boolean isSubscribed() {
-        return !subscribers.isEmpty();
-    }
-
-    public long getTweetCount() {
-        return tweetCount;
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/util/DNSResolverFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/DNSResolverFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/DNSResolverFactory.java
deleted file mode 100644
index f8585bb..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/DNSResolverFactory.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.util;
-
-import org.apache.asterix.external.api.INodeResolver;
-import org.apache.asterix.external.api.INodeResolverFactory;
-
-/**
- * Factory for creating instance of {@link NodeResolver}
- */
-public class DNSResolverFactory implements INodeResolverFactory {
-
-    private static final INodeResolver INSTANCE = new NodeResolver();
-
-    @Override
-    public INodeResolver createNodeResolver() {
-        return INSTANCE;
-    }
-
-}


[24/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feed/FeedTrackingManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feed/FeedTrackingManager.java b/asterix-app/src/main/java/org/apache/asterix/feed/FeedTrackingManager.java
new file mode 100644
index 0000000..b65d8b1
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feed/FeedTrackingManager.java
@@ -0,0 +1,188 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.feed;
+
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.external.feed.api.IFeedTrackingManager;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.message.FeedTupleCommitAckMessage;
+import org.apache.asterix.external.feed.message.FeedTupleCommitResponseMessage;
+import org.apache.asterix.file.FeedOperations;
+import org.apache.hyracks.api.job.JobSpecification;
+
+public class FeedTrackingManager implements IFeedTrackingManager {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedTrackingManager.class.getName());
+
+    private final BitSet allOnes;
+
+    private Map<FeedConnectionId, Map<AckId, BitSet>> ackHistory;
+    private Map<FeedConnectionId, Map<AckId, Integer>> maxBaseAcked;
+
+    public FeedTrackingManager() {
+        byte[] allOneBytes = new byte[128];
+        Arrays.fill(allOneBytes, (byte) 0xff);
+        allOnes = BitSet.valueOf(allOneBytes);
+        ackHistory = new HashMap<FeedConnectionId, Map<AckId, BitSet>>();
+        maxBaseAcked = new HashMap<FeedConnectionId, Map<AckId, Integer>>();
+    }
+
+    @Override
+    public synchronized void submitAckReport(FeedTupleCommitAckMessage ackMessage) {
+        AckId ackId = getAckId(ackMessage);
+        Map<AckId, BitSet> acksForConnection = ackHistory.get(ackMessage.getConnectionId());
+        if (acksForConnection == null) {
+            acksForConnection = new HashMap<AckId, BitSet>();
+            acksForConnection.put(ackId, BitSet.valueOf(ackMessage.getCommitAcks()));
+            ackHistory.put(ackMessage.getConnectionId(), acksForConnection);
+        }
+        BitSet currentAcks = acksForConnection.get(ackId);
+        if (currentAcks == null) {
+            currentAcks = BitSet.valueOf(ackMessage.getCommitAcks());
+            acksForConnection.put(ackId, currentAcks);
+        } else {
+            currentAcks.or(BitSet.valueOf(ackMessage.getCommitAcks()));
+        }
+        if (Arrays.equals(currentAcks.toByteArray(), allOnes.toByteArray())) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info(ackMessage.getIntakePartition() + " (" + ackMessage.getBase() + ")" + " is convered");
+            }
+            Map<AckId, Integer> maxBaseAckedForConnection = maxBaseAcked.get(ackMessage.getConnectionId());
+            if (maxBaseAckedForConnection == null) {
+                maxBaseAckedForConnection = new HashMap<AckId, Integer>();
+                maxBaseAcked.put(ackMessage.getConnectionId(), maxBaseAckedForConnection);
+            }
+            Integer maxBaseAckedValue = maxBaseAckedForConnection.get(ackId);
+            if (maxBaseAckedValue == null) {
+                maxBaseAckedValue = ackMessage.getBase();
+                maxBaseAckedForConnection.put(ackId, ackMessage.getBase());
+                sendCommitResponseMessage(ackMessage.getConnectionId(), ackMessage.getIntakePartition(),
+                        ackMessage.getBase());
+            } else if (ackMessage.getBase() == maxBaseAckedValue + 1) {
+                maxBaseAckedForConnection.put(ackId, ackMessage.getBase());
+                sendCommitResponseMessage(ackMessage.getConnectionId(), ackMessage.getIntakePartition(),
+                        ackMessage.getBase());
+            } else {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Ignoring discountiuous acked base " + ackMessage.getBase() + " for " + ackId);
+                }
+            }
+
+        } else {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("AckId " + ackId + " pending number of acks " + (128 * 8 - currentAcks.cardinality()));
+            }
+        }
+    }
+
+    public synchronized void disableTracking(FeedConnectionId connectionId) {
+        ackHistory.remove(connectionId);
+        maxBaseAcked.remove(connectionId);
+    }
+
+    private void sendCommitResponseMessage(FeedConnectionId connectionId, int partition, int base) {
+        FeedTupleCommitResponseMessage response = new FeedTupleCommitResponseMessage(connectionId, partition, base);
+        List<String> storageLocations = FeedLifecycleListener.INSTANCE.getStoreLocations(connectionId);
+        List<String> collectLocations = FeedLifecycleListener.INSTANCE.getCollectLocations(connectionId);
+        String collectLocation = collectLocations.get(partition);
+        Set<String> messageDestinations = new HashSet<String>();
+        messageDestinations.add(collectLocation);
+        messageDestinations.addAll(storageLocations);
+        try {
+            JobSpecification spec = FeedOperations.buildCommitAckResponseJob(response, messageDestinations);
+            CentralFeedManager.runJob(spec, false);
+        } catch (Exception e) {
+            e.printStackTrace();
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Unable to send commit response message " + response + " exception " + e.getMessage());
+            }
+        }
+    }
+
+    private static AckId getAckId(FeedTupleCommitAckMessage ackMessage) {
+        return new AckId(ackMessage.getConnectionId(), ackMessage.getIntakePartition(), ackMessage.getBase());
+    }
+
+    private static class AckId {
+        private FeedConnectionId connectionId;
+        private int intakePartition;
+        private int base;
+
+        public AckId(FeedConnectionId connectionId, int intakePartition, int base) {
+            this.connectionId = connectionId;
+            this.intakePartition = intakePartition;
+            this.base = base;
+        }
+
+        @Override
+        public boolean equals(Object o) {
+            if (this == o) {
+                return true;
+            }
+            if (!(o instanceof AckId)) {
+                return false;
+            }
+            AckId other = (AckId) o;
+            return other.getConnectionId().equals(connectionId) && other.getIntakePartition() == intakePartition
+                    && other.getBase() == base;
+        }
+
+        @Override
+        public String toString() {
+            return connectionId + "[" + intakePartition + "]" + "(" + base + ")";
+        }
+
+        @Override
+        public int hashCode() {
+            return toString().hashCode();
+        }
+
+        public FeedConnectionId getConnectionId() {
+            return connectionId;
+        }
+
+        public int getIntakePartition() {
+            return intakePartition;
+        }
+
+        public int getBase() {
+            return base;
+        }
+
+    }
+
+    @Override
+    public void disableAcking(FeedConnectionId connectionId) {
+        ackHistory.remove(connectionId);
+        maxBaseAcked.remove(connectionId);
+        if (LOGGER.isLoggable(Level.WARNING)) {
+            LOGGER.warning("Acking disabled for " + connectionId);
+        }
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feed/FeedWorkCollection.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feed/FeedWorkCollection.java b/asterix-app/src/main/java/org/apache/asterix/feed/FeedWorkCollection.java
new file mode 100644
index 0000000..9d746c8
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feed/FeedWorkCollection.java
@@ -0,0 +1,206 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.feed;
+
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.api.common.SessionConfig;
+import org.apache.asterix.api.common.SessionConfig.OutputFormat;
+import org.apache.asterix.aql.translator.QueryTranslator;
+import org.apache.asterix.compiler.provider.AqlCompilationProvider;
+import org.apache.asterix.compiler.provider.ILangCompilationProvider;
+import org.apache.asterix.external.feed.api.IFeedWork;
+import org.apache.asterix.external.feed.api.IFeedWorkEventListener;
+import org.apache.asterix.external.feed.management.FeedCollectInfo;
+import org.apache.asterix.external.feed.management.FeedConnectionRequest;
+import org.apache.asterix.external.feed.management.FeedConnectionRequest.ConnectionStatus;
+import org.apache.asterix.lang.aql.statement.SubscribeFeedStatement;
+import org.apache.asterix.lang.common.base.Statement;
+import org.apache.asterix.lang.common.statement.DataverseDecl;
+import org.apache.asterix.lang.common.struct.Identifier;
+import org.apache.asterix.om.util.AsterixAppContextInfo;
+import org.apache.hyracks.api.job.JobId;
+
+/**
+ * A collection of feed management related task, each represented as an implementation of {@code IFeedWork}.
+ */
+public class FeedWorkCollection {
+
+    private static Logger LOGGER = Logger.getLogger(FeedWorkCollection.class.getName());
+    private static final ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
+
+    /**
+     * The task of subscribing to a feed to obtain data.
+     */
+    public static class SubscribeFeedWork implements IFeedWork {
+
+        private final Runnable runnable;
+
+        private final FeedConnectionRequest request;
+
+        @Override
+        public Runnable getRunnable() {
+            return runnable;
+        }
+
+        public SubscribeFeedWork(String[] locations, FeedConnectionRequest request) {
+            this.runnable = new SubscribeFeedWorkRunnable(locations, request);
+            this.request = request;
+        }
+
+        private static class SubscribeFeedWorkRunnable implements Runnable {
+
+            private final FeedConnectionRequest request;
+            private final String[] locations;
+
+            public SubscribeFeedWorkRunnable(String[] locations, FeedConnectionRequest request) {
+                this.request = request;
+                this.locations = locations;
+            }
+
+            @Override
+            public void run() {
+                try {
+                    PrintWriter writer = new PrintWriter(System.out, true);
+                    SessionConfig pc = new SessionConfig(writer, OutputFormat.ADM);
+                    DataverseDecl dataverseDecl = new DataverseDecl(
+                            new Identifier(request.getReceivingFeedId().getDataverse()));
+                    SubscribeFeedStatement subscribeStmt = new SubscribeFeedStatement(locations, request);
+                    List<Statement> statements = new ArrayList<Statement>();
+                    statements.add(dataverseDecl);
+                    statements.add(subscribeStmt);
+                    QueryTranslator translator = new QueryTranslator(statements, pc, compilationProvider);
+                    translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null,
+                            QueryTranslator.ResultDelivery.SYNC);
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info("Submitted connection requests for execution: " + request);
+                    }
+                } catch (Exception e) {
+                    if (LOGGER.isLoggable(Level.SEVERE)) {
+                        LOGGER.severe("Exception in executing " + request);
+                    }
+                    throw new RuntimeException(e);
+                }
+            }
+        }
+
+        public static class FeedSubscribeWorkEventListener implements IFeedWorkEventListener {
+
+            @Override
+            public void workFailed(IFeedWork work, Exception e) {
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning(" Feed subscription request " + ((SubscribeFeedWork) work).request
+                            + " failed with exception " + e);
+                }
+            }
+
+            @Override
+            public void workCompleted(IFeedWork work) {
+                ((SubscribeFeedWork) work).request.setSubscriptionStatus(ConnectionStatus.ACTIVE);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.warning(" Feed subscription request " + ((SubscribeFeedWork) work).request + " completed ");
+                }
+            }
+
+        }
+
+        public FeedConnectionRequest getRequest() {
+            return request;
+        }
+
+        @Override
+        public String toString() {
+            return "SubscribeFeedWork for [" + request + "]";
+        }
+
+    }
+
+    /**
+     * The task of activating a set of feeds.
+     */
+    public static class ActivateFeedWork implements IFeedWork {
+
+        private final Runnable runnable;
+
+        @Override
+        public Runnable getRunnable() {
+            return runnable;
+        }
+
+        public ActivateFeedWork(List<FeedCollectInfo> feedsToRevive) {
+            this.runnable = new FeedsActivateRunnable(feedsToRevive);
+        }
+
+        public ActivateFeedWork() {
+            this.runnable = new FeedsActivateRunnable();
+        }
+
+        private static class FeedsActivateRunnable implements Runnable {
+
+            private List<FeedCollectInfo> feedsToRevive;
+            private Mode mode;
+
+            public enum Mode {
+                REVIVAL_POST_NODE_REJOIN
+            }
+
+            public FeedsActivateRunnable(List<FeedCollectInfo> feedsToRevive) {
+                this.feedsToRevive = feedsToRevive;
+            }
+
+            public FeedsActivateRunnable() {
+            }
+
+            @Override
+            public void run() {
+                switch (mode) {
+                    case REVIVAL_POST_NODE_REJOIN:
+                        try {
+                            Thread.sleep(10000);
+                        } catch (InterruptedException e1) {
+                            if (LOGGER.isLoggable(Level.INFO)) {
+                                LOGGER.info("Attempt to resume feed interrupted");
+                            }
+                            throw new IllegalStateException(e1.getMessage());
+                        }
+                        for (FeedCollectInfo finfo : feedsToRevive) {
+                            try {
+                                JobId jobId = AsterixAppContextInfo.getInstance().getHcc().startJob(finfo.jobSpec);
+                                if (LOGGER.isLoggable(Level.INFO)) {
+                                    LOGGER.info("Resumed feed :" + finfo.feedConnectionId + " job id " + jobId);
+                                    LOGGER.info("Job:" + finfo.jobSpec);
+                                }
+                            } catch (Exception e) {
+                                if (LOGGER.isLoggable(Level.WARNING)) {
+                                    LOGGER.warning(
+                                            "Unable to resume feed " + finfo.feedConnectionId + " " + e.getMessage());
+                                }
+                            }
+                        }
+                }
+            }
+
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feed/FeedWorkRequestResponseHandler.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feed/FeedWorkRequestResponseHandler.java b/asterix-app/src/main/java/org/apache/asterix/feed/FeedWorkRequestResponseHandler.java
new file mode 100644
index 0000000..b30d8a7
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feed/FeedWorkRequestResponseHandler.java
@@ -0,0 +1,269 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.feed;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.common.api.IClusterManagementWork;
+import org.apache.asterix.common.api.IClusterManagementWorkResponse;
+import org.apache.asterix.external.feed.watch.FeedConnectJobInfo;
+import org.apache.asterix.external.feed.watch.FeedIntakeInfo;
+import org.apache.asterix.external.feed.watch.FeedJobInfo;
+import org.apache.asterix.metadata.cluster.AddNodeWork;
+import org.apache.asterix.metadata.cluster.AddNodeWorkResponse;
+import org.apache.asterix.om.util.AsterixAppContextInfo;
+import org.apache.asterix.om.util.AsterixClusterProperties;
+import org.apache.hyracks.api.client.IHyracksClientConnection;
+import org.apache.hyracks.api.constraints.Constraint;
+import org.apache.hyracks.api.constraints.PartitionConstraintHelper;
+import org.apache.hyracks.api.constraints.expressions.ConstantExpression;
+import org.apache.hyracks.api.constraints.expressions.ConstraintExpression;
+import org.apache.hyracks.api.constraints.expressions.ConstraintExpression.ExpressionTag;
+import org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression;
+import org.apache.hyracks.api.constraints.expressions.PartitionCountExpression;
+import org.apache.hyracks.api.constraints.expressions.PartitionLocationExpression;
+import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
+import org.apache.hyracks.api.dataflow.OperatorDescriptorId;
+import org.apache.hyracks.api.job.JobSpecification;
+
+public class FeedWorkRequestResponseHandler implements Runnable {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedWorkRequestResponseHandler.class.getName());
+
+    private final LinkedBlockingQueue<IClusterManagementWorkResponse> inbox;
+
+    private Map<Integer, Map<String, List<FeedJobInfo>>> feedsWaitingForResponse = new HashMap<Integer, Map<String, List<FeedJobInfo>>>();
+
+    public FeedWorkRequestResponseHandler(LinkedBlockingQueue<IClusterManagementWorkResponse> inbox) {
+        this.inbox = inbox;
+    }
+
+    @Override
+    public void run() {
+        while (true) {
+            IClusterManagementWorkResponse response = null;
+            try {
+                response = inbox.take();
+            } catch (InterruptedException e) {
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("Interrupted exception " + e.getMessage());
+                }
+            }
+            IClusterManagementWork submittedWork = response.getWork();
+            Map<String, String> nodeSubstitution = new HashMap<String, String>();
+            switch (submittedWork.getClusterManagementWorkType()) {
+                case ADD_NODE:
+                    AddNodeWork addNodeWork = (AddNodeWork) submittedWork;
+                    int workId = addNodeWork.getWorkId();
+                    Map<String, List<FeedJobInfo>> failureAnalysis = feedsWaitingForResponse.get(workId);
+                    AddNodeWorkResponse resp = (AddNodeWorkResponse) response;
+                    List<String> nodesAdded = resp.getNodesAdded();
+                    List<String> unsubstitutedNodes = new ArrayList<String>();
+                    unsubstitutedNodes.addAll(addNodeWork.getDeadNodes());
+                    int nodeIndex = 0;
+
+                    /** form a mapping between the failed node and its substitute **/
+                    if (nodesAdded != null && nodesAdded.size() > 0) {
+                        for (String failedNodeId : addNodeWork.getDeadNodes()) {
+                            String substitute = nodesAdded.get(nodeIndex);
+                            nodeSubstitution.put(failedNodeId, substitute);
+                            nodeIndex = (nodeIndex + 1) % nodesAdded.size();
+                            unsubstitutedNodes.remove(failedNodeId);
+                            if (LOGGER.isLoggable(Level.INFO)) {
+                                LOGGER.info("Node " + substitute + " chosen to substiute lost node " + failedNodeId);
+                            }
+                        }
+                    }
+                    if (unsubstitutedNodes.size() > 0) {
+                        String[] participantNodes = AsterixClusterProperties.INSTANCE.getParticipantNodes()
+                                .toArray(new String[] {});
+                        nodeIndex = 0;
+                        for (String unsubstitutedNode : unsubstitutedNodes) {
+                            nodeSubstitution.put(unsubstitutedNode, participantNodes[nodeIndex]);
+                            if (LOGGER.isLoggable(Level.INFO)) {
+                                LOGGER.info("Node " + participantNodes[nodeIndex] + " chosen to substiute lost node "
+                                        + unsubstitutedNode);
+                            }
+                            nodeIndex = (nodeIndex + 1) % participantNodes.length;
+                        }
+
+                        if (LOGGER.isLoggable(Level.WARNING)) {
+                            LOGGER.warning("Request " + resp.getWork() + " completed using internal nodes");
+                        }
+                    }
+
+                    // alter failed feed intake jobs
+
+                    for (Entry<String, List<FeedJobInfo>> entry : failureAnalysis.entrySet()) {
+                        String failedNode = entry.getKey();
+                        List<FeedJobInfo> impactedJobInfos = entry.getValue();
+                        for (FeedJobInfo info : impactedJobInfos) {
+                            JobSpecification spec = info.getSpec();
+                            replaceNode(spec, failedNode, nodeSubstitution.get(failedNode));
+                            info.setSpec(spec);
+                        }
+                    }
+
+                    Set<FeedIntakeInfo> revisedIntakeJobs = new HashSet<FeedIntakeInfo>();
+                    Set<FeedConnectJobInfo> revisedConnectJobInfos = new HashSet<FeedConnectJobInfo>();
+
+                    for (List<FeedJobInfo> infos : failureAnalysis.values()) {
+                        for (FeedJobInfo info : infos) {
+                            switch (info.getJobType()) {
+                                case INTAKE:
+                                    revisedIntakeJobs.add((FeedIntakeInfo) info);
+                                    break;
+                                case FEED_CONNECT:
+                                    revisedConnectJobInfos.add((FeedConnectJobInfo) info);
+                                    break;
+                            }
+                        }
+                    }
+
+                    IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+                    try {
+                        for (FeedIntakeInfo info : revisedIntakeJobs) {
+                            hcc.startJob(info.getSpec());
+                        }
+                        Thread.sleep(2000);
+                        for (FeedConnectJobInfo info : revisedConnectJobInfos) {
+                            hcc.startJob(info.getSpec());
+                            Thread.sleep(2000);
+                        }
+                    } catch (Exception e) {
+                        if (LOGGER.isLoggable(Level.WARNING)) {
+                            LOGGER.warning("Unable to start revised job post failure");
+                        }
+                    }
+
+                    break;
+                case REMOVE_NODE:
+                    throw new IllegalStateException("Invalid work submitted");
+            }
+        }
+    }
+
+    private void replaceNode(JobSpecification jobSpec, String failedNodeId, String replacementNode) {
+        Set<Constraint> userConstraints = jobSpec.getUserConstraints();
+        List<Constraint> locationConstraintsToReplace = new ArrayList<Constraint>();
+        List<Constraint> countConstraintsToReplace = new ArrayList<Constraint>();
+        List<OperatorDescriptorId> modifiedOperators = new ArrayList<OperatorDescriptorId>();
+        Map<OperatorDescriptorId, List<Constraint>> candidateConstraints = new HashMap<OperatorDescriptorId, List<Constraint>>();
+        Map<OperatorDescriptorId, Map<Integer, String>> newConstraints = new HashMap<OperatorDescriptorId, Map<Integer, String>>();
+        OperatorDescriptorId opId = null;
+        for (Constraint constraint : userConstraints) {
+            LValueConstraintExpression lexpr = constraint.getLValue();
+            ConstraintExpression cexpr = constraint.getRValue();
+            switch (lexpr.getTag()) {
+                case PARTITION_COUNT:
+                    opId = ((PartitionCountExpression) lexpr).getOperatorDescriptorId();
+                    if (modifiedOperators.contains(opId)) {
+                        countConstraintsToReplace.add(constraint);
+                    } else {
+                        List<Constraint> clist = candidateConstraints.get(opId);
+                        if (clist == null) {
+                            clist = new ArrayList<Constraint>();
+                            candidateConstraints.put(opId, clist);
+                        }
+                        clist.add(constraint);
+                    }
+                    break;
+                case PARTITION_LOCATION:
+                    opId = ((PartitionLocationExpression) lexpr).getOperatorDescriptorId();
+                    String oldLocation = (String) ((ConstantExpression) cexpr).getValue();
+                    if (oldLocation.equals(failedNodeId)) {
+                        locationConstraintsToReplace.add(constraint);
+                        modifiedOperators.add(((PartitionLocationExpression) lexpr).getOperatorDescriptorId());
+                        Map<Integer, String> newLocs = newConstraints.get(opId);
+                        if (newLocs == null) {
+                            newLocs = new HashMap<Integer, String>();
+                            newConstraints.put(opId, newLocs);
+                        }
+                        int partition = ((PartitionLocationExpression) lexpr).getPartition();
+                        newLocs.put(partition, replacementNode);
+                    } else {
+                        if (modifiedOperators.contains(opId)) {
+                            locationConstraintsToReplace.add(constraint);
+                            Map<Integer, String> newLocs = newConstraints.get(opId);
+                            if (newLocs == null) {
+                                newLocs = new HashMap<Integer, String>();
+                                newConstraints.put(opId, newLocs);
+                            }
+                            int partition = ((PartitionLocationExpression) lexpr).getPartition();
+                            newLocs.put(partition, oldLocation);
+                        } else {
+                            List<Constraint> clist = candidateConstraints.get(opId);
+                            if (clist == null) {
+                                clist = new ArrayList<Constraint>();
+                                candidateConstraints.put(opId, clist);
+                            }
+                            clist.add(constraint);
+                        }
+                    }
+                    break;
+                default:
+                    break;
+            }
+        }
+
+        jobSpec.getUserConstraints().removeAll(locationConstraintsToReplace);
+        jobSpec.getUserConstraints().removeAll(countConstraintsToReplace);
+
+        for (OperatorDescriptorId mopId : modifiedOperators) {
+            List<Constraint> clist = candidateConstraints.get(mopId);
+            if (clist != null && !clist.isEmpty()) {
+                jobSpec.getUserConstraints().removeAll(clist);
+
+                for (Constraint c : clist) {
+                    if (c.getLValue().getTag().equals(ExpressionTag.PARTITION_LOCATION)) {
+                        ConstraintExpression cexpr = c.getRValue();
+                        int partition = ((PartitionLocationExpression) c.getLValue()).getPartition();
+                        String oldLocation = (String) ((ConstantExpression) cexpr).getValue();
+                        newConstraints.get(mopId).put(partition, oldLocation);
+                    }
+                }
+            }
+        }
+
+        for (Entry<OperatorDescriptorId, Map<Integer, String>> entry : newConstraints.entrySet()) {
+            OperatorDescriptorId nopId = entry.getKey();
+            Map<Integer, String> clist = entry.getValue();
+            IOperatorDescriptor op = jobSpec.getOperatorMap().get(nopId);
+            String[] locations = new String[clist.size()];
+            for (int i = 0; i < locations.length; i++) {
+                locations[i] = clist.get(i);
+            }
+            PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, op, locations);
+        }
+
+    }
+
+    public void registerFeedWork(int workId, Map<String, List<FeedJobInfo>> impactedJobs) {
+        feedsWaitingForResponse.put(workId, impactedJobs);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feed/FeedsActivator.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feed/FeedsActivator.java b/asterix-app/src/main/java/org/apache/asterix/feed/FeedsActivator.java
new file mode 100644
index 0000000..1a8a119
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feed/FeedsActivator.java
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.feed;
+
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.api.common.SessionConfig;
+import org.apache.asterix.api.common.SessionConfig.OutputFormat;
+import org.apache.asterix.aql.translator.QueryTranslator;
+import org.apache.asterix.compiler.provider.AqlCompilationProvider;
+import org.apache.asterix.compiler.provider.ILangCompilationProvider;
+import org.apache.asterix.external.feed.management.FeedCollectInfo;
+import org.apache.asterix.lang.common.base.Statement;
+import org.apache.asterix.lang.common.statement.ConnectFeedStatement;
+import org.apache.asterix.lang.common.statement.DataverseDecl;
+import org.apache.asterix.lang.common.struct.Identifier;
+import org.apache.asterix.om.util.AsterixAppContextInfo;
+import org.apache.hyracks.api.job.JobId;
+
+public class FeedsActivator implements Runnable {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedJobNotificationHandler.class.getName());
+    private static final ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
+
+    private List<FeedCollectInfo> feedsToRevive;
+    private Mode mode;
+
+    public enum Mode {
+        REVIVAL_POST_CLUSTER_REBOOT,
+        REVIVAL_POST_NODE_REJOIN
+    }
+
+    public FeedsActivator() {
+        this.mode = Mode.REVIVAL_POST_CLUSTER_REBOOT;
+    }
+
+    public FeedsActivator(List<FeedCollectInfo> feedsToRevive) {
+        this.feedsToRevive = feedsToRevive;
+        this.mode = Mode.REVIVAL_POST_NODE_REJOIN;
+    }
+
+    @Override
+    public void run() {
+        switch (mode) {
+            case REVIVAL_POST_CLUSTER_REBOOT:
+                //revivePostClusterReboot();
+                break;
+            case REVIVAL_POST_NODE_REJOIN:
+                try {
+                    Thread.sleep(10000);
+                } catch (InterruptedException e1) {
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info("Attempt to resume feed interrupted");
+                    }
+                    throw new IllegalStateException(e1.getMessage());
+                }
+                for (FeedCollectInfo finfo : feedsToRevive) {
+                    try {
+                        JobId jobId = AsterixAppContextInfo.getInstance().getHcc().startJob(finfo.jobSpec);
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info("Resumed feed :" + finfo.feedConnectionId + " job id " + jobId);
+                            LOGGER.info("Job:" + finfo.jobSpec);
+                        }
+                    } catch (Exception e) {
+                        if (LOGGER.isLoggable(Level.WARNING)) {
+                            LOGGER.warning("Unable to resume feed " + finfo.feedConnectionId + " " + e.getMessage());
+                        }
+                    }
+                }
+        }
+    }
+
+    public void reviveFeed(String dataverse, String feedName, String dataset, String feedPolicy) {
+        PrintWriter writer = new PrintWriter(System.out, true);
+        SessionConfig pc = new SessionConfig(writer, OutputFormat.ADM);
+        try {
+            DataverseDecl dataverseDecl = new DataverseDecl(new Identifier(dataverse));
+            ConnectFeedStatement stmt = new ConnectFeedStatement(new Identifier(dataverse), new Identifier(feedName),
+                    new Identifier(dataset), feedPolicy, 0);
+            stmt.setForceConnect(true);
+            List<Statement> statements = new ArrayList<Statement>();
+            statements.add(dataverseDecl);
+            statements.add(stmt);
+            QueryTranslator translator = new QueryTranslator(statements, pc, compilationProvider);
+            translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null,
+                    QueryTranslator.ResultDelivery.SYNC);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Resumed feed: " + dataverse + ":" + dataset + " using policy " + feedPolicy);
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Exception in resuming loser feed: " + dataverse + ":" + dataset + " using policy "
+                        + feedPolicy + " Exception " + e.getMessage());
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feeds/CentralFeedManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/CentralFeedManager.java b/asterix-app/src/main/java/org/apache/asterix/feeds/CentralFeedManager.java
deleted file mode 100644
index 7326d08..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feeds/CentralFeedManager.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feeds;
-
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.io.StringReader;
-import java.util.List;
-
-import org.apache.asterix.api.common.SessionConfig;
-import org.apache.asterix.api.common.SessionConfig.OutputFormat;
-import org.apache.asterix.aql.translator.QueryTranslator;
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.feeds.api.ICentralFeedManager;
-import org.apache.asterix.common.feeds.api.IFeedLoadManager;
-import org.apache.asterix.common.feeds.api.IFeedTrackingManager;
-import org.apache.asterix.compiler.provider.AqlCompilationProvider;
-import org.apache.asterix.compiler.provider.ILangCompilationProvider;
-import org.apache.asterix.lang.aql.parser.AQLParserFactory;
-import org.apache.asterix.lang.common.base.IParser;
-import org.apache.asterix.lang.common.base.IParserFactory;
-import org.apache.asterix.lang.common.base.Statement;
-import org.apache.asterix.metadata.feeds.SocketMessageListener;
-import org.apache.asterix.om.util.AsterixAppContextInfo;
-import org.apache.hyracks.api.client.IHyracksClientConnection;
-import org.apache.hyracks.api.job.JobId;
-import org.apache.hyracks.api.job.JobSpecification;
-
-public class CentralFeedManager implements ICentralFeedManager {
-
-    private static final ICentralFeedManager centralFeedManager = new CentralFeedManager();
-    private static final ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
-
-    public static ICentralFeedManager getInstance() {
-        return centralFeedManager;
-    }
-
-    private final int port;
-    private final IFeedLoadManager feedLoadManager;
-    private final IFeedTrackingManager feedTrackingManager;
-    private final SocketMessageListener messageListener;
-
-    private CentralFeedManager() {
-        this.port = AsterixAppContextInfo.getInstance().getFeedProperties().getFeedCentralManagerPort();
-        this.feedLoadManager = new FeedLoadManager();
-        this.feedTrackingManager = new FeedTrackingManager();
-        this.messageListener = new SocketMessageListener(port, new FeedMessageReceiver(this));
-    }
-
-    @Override
-    public void start() throws AsterixException {
-        messageListener.start();
-    }
-
-    @Override
-    public void stop() throws AsterixException, IOException {
-        messageListener.stop();
-    }
-
-    public static JobId runJob(JobSpecification spec, boolean waitForCompletion) throws Exception {
-        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
-        JobId jobId = hcc.startJob(spec);
-        if (waitForCompletion) {
-            hcc.waitForCompletion(jobId);
-        }
-        return jobId;
-    }
-
-    @Override
-    public IFeedLoadManager getFeedLoadManager() {
-        return feedLoadManager;
-    }
-
-    @Override
-    public IFeedTrackingManager getFeedTrackingManager() {
-        return feedTrackingManager;
-    }
-
-    public static class AQLExecutor {
-
-        private static final PrintWriter out = new PrintWriter(System.out, true);
-        private static final IParserFactory parserFactory = new AQLParserFactory();
-
-        public static void executeAQL(String aql) throws Exception {
-            IParser parser = parserFactory.createParser(new StringReader(aql));
-            List<Statement> statements = parser.parse();
-            SessionConfig pc = new SessionConfig(out, OutputFormat.ADM);
-            QueryTranslator translator = new QueryTranslator(statements, pc, compilationProvider);
-            translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null,
-                    QueryTranslator.ResultDelivery.SYNC);
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feeds/FeedCollectInfo.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedCollectInfo.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedCollectInfo.java
deleted file mode 100644
index aca6ab6..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedCollectInfo.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feeds;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.hyracks.api.job.JobId;
-import org.apache.hyracks.api.job.JobSpecification;
-
-public class FeedCollectInfo extends FeedInfo {
-    public FeedId sourceFeedId;
-    public FeedConnectionId feedConnectionId;
-    public List<String> collectLocations = new ArrayList<String>();
-    public List<String> computeLocations = new ArrayList<String>();
-    public List<String> storageLocations = new ArrayList<String>();
-    public Map<String, String> feedPolicy;
-    public String superFeedManagerHost;
-    public int superFeedManagerPort;
-    public boolean fullyConnected;
-
-    public FeedCollectInfo(FeedId sourceFeedId, FeedConnectionId feedConnectionId, JobSpecification jobSpec,
-            JobId jobId, Map<String, String> feedPolicy) {
-        super(jobSpec, jobId, FeedInfoType.COLLECT);
-        this.sourceFeedId = sourceFeedId;
-        this.feedConnectionId = feedConnectionId;
-        this.feedPolicy = feedPolicy;
-        this.fullyConnected = true;
-    }
-
-    @Override
-    public String toString() {
-        return FeedInfoType.COLLECT + "[" + feedConnectionId + "]";
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feeds/FeedInfo.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedInfo.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedInfo.java
deleted file mode 100644
index a247488..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedInfo.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feeds;
-
-import org.apache.hyracks.api.job.JobId;
-import org.apache.hyracks.api.job.JobInfo;
-import org.apache.hyracks.api.job.JobSpecification;
-
-public class FeedInfo {
-    public JobSpecification jobSpec;
-    public JobInfo jobInfo;
-    public JobId jobId;
-    public FeedInfoType infoType;
-    public State state;
-
-    public enum State {
-        ACTIVE,
-        INACTIVE
-    }
-
-    public enum FeedInfoType {
-        INTAKE,
-        COLLECT
-    }
-
-    public FeedInfo(JobSpecification jobSpec, JobId jobId, FeedInfoType infoType) {
-        this.jobSpec = jobSpec;
-        this.jobId = jobId;
-        this.infoType = infoType;
-        this.state = State.INACTIVE;
-    }
-
-    @Override
-    public String toString() {
-        return " job id " + jobId;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feeds/FeedJobNotificationHandler.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedJobNotificationHandler.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedJobNotificationHandler.java
deleted file mode 100644
index aa7388e..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedJobNotificationHandler.java
+++ /dev/null
@@ -1,743 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feeds;
-
-import java.rmi.RemoteException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.commons.lang3.StringUtils;
-
-import org.apache.asterix.api.common.FeedWorkCollection.SubscribeFeedWork;
-import org.apache.asterix.common.exceptions.ACIDException;
-import org.apache.asterix.common.feeds.FeedActivity;
-import org.apache.asterix.common.feeds.FeedConnectJobInfo;
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedConnectionRequest;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.FeedIntakeInfo;
-import org.apache.asterix.common.feeds.FeedJobInfo;
-import org.apache.asterix.common.feeds.FeedJobInfo.FeedJobState;
-import org.apache.asterix.common.feeds.FeedJobInfo.JobType;
-import org.apache.asterix.common.feeds.FeedJointKey;
-import org.apache.asterix.common.feeds.FeedPolicyAccessor;
-import org.apache.asterix.common.feeds.api.IFeedJoint;
-import org.apache.asterix.common.feeds.api.IFeedJoint.State;
-import org.apache.asterix.common.feeds.api.IFeedLifecycleEventSubscriber;
-import org.apache.asterix.common.feeds.api.IFeedLifecycleEventSubscriber.FeedLifecycleEvent;
-import org.apache.asterix.common.feeds.api.IIntakeProgressTracker;
-import org.apache.asterix.common.feeds.message.StorageReportFeedMessage;
-import org.apache.asterix.feeds.FeedLifecycleListener.Message;
-import org.apache.asterix.metadata.feeds.BuiltinFeedPolicies;
-import org.apache.asterix.metadata.feeds.FeedCollectOperatorDescriptor;
-import org.apache.asterix.metadata.feeds.FeedIntakeOperatorDescriptor;
-import org.apache.asterix.metadata.feeds.FeedMetaOperatorDescriptor;
-import org.apache.asterix.metadata.feeds.FeedWorkManager;
-import org.apache.asterix.om.util.AsterixAppContextInfo;
-import org.apache.hyracks.algebricks.common.utils.Pair;
-import org.apache.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
-import org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
-import org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory;
-import org.apache.hyracks.api.client.IHyracksClientConnection;
-import org.apache.hyracks.api.dataflow.IConnectorDescriptor;
-import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
-import org.apache.hyracks.api.dataflow.OperatorDescriptorId;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.job.JobId;
-import org.apache.hyracks.api.job.JobInfo;
-import org.apache.hyracks.api.job.JobSpecification;
-import org.apache.hyracks.api.job.JobStatus;
-import org.apache.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexInsertUpdateDeleteOperatorDescriptor;
-
-public class FeedJobNotificationHandler implements Runnable {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedJobNotificationHandler.class.getName());
-
-    private final LinkedBlockingQueue<Message> inbox;
-    private final Map<FeedConnectionId, List<IFeedLifecycleEventSubscriber>> eventSubscribers;
-
-    private final Map<JobId, FeedJobInfo> jobInfos;
-    private final Map<FeedId, FeedIntakeInfo> intakeJobInfos;
-    private final Map<FeedConnectionId, FeedConnectJobInfo> connectJobInfos;
-    private final Map<FeedId, List<IFeedJoint>> feedPipeline;
-    private final Map<FeedConnectionId, Pair<IIntakeProgressTracker, Long>> feedIntakeProgressTrackers;
-
-    public FeedJobNotificationHandler(LinkedBlockingQueue<Message> inbox) {
-        this.inbox = inbox;
-        this.jobInfos = new HashMap<JobId, FeedJobInfo>();
-        this.intakeJobInfos = new HashMap<FeedId, FeedIntakeInfo>();
-        this.connectJobInfos = new HashMap<FeedConnectionId, FeedConnectJobInfo>();
-        this.feedPipeline = new HashMap<FeedId, List<IFeedJoint>>();
-        this.eventSubscribers = new HashMap<FeedConnectionId, List<IFeedLifecycleEventSubscriber>>();
-        this.feedIntakeProgressTrackers = new HashMap<FeedConnectionId, Pair<IIntakeProgressTracker, Long>>();
-    }
-
-    @Override
-    public void run() {
-        Message mesg;
-        while (true) {
-            try {
-                mesg = inbox.take();
-                switch (mesg.messageKind) {
-                    case JOB_START:
-                        handleJobStartMessage(mesg);
-                        break;
-                    case JOB_FINISH:
-                        handleJobFinishMessage(mesg);
-                        break;
-                }
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-
-        }
-    }
-
-    public void registerFeedIntakeProgressTracker(FeedConnectionId connectionId,
-            IIntakeProgressTracker feedIntakeProgressTracker) {
-        if (feedIntakeProgressTrackers.get(connectionId) == null) {
-            this.feedIntakeProgressTrackers.put(connectionId, new Pair<IIntakeProgressTracker, Long>(
-                    feedIntakeProgressTracker, 0L));
-        } else {
-            throw new IllegalStateException(" Progress tracker for connection " + connectionId
-                    + " is alreader registered");
-        }
-    }
-
-    public void deregisterFeedIntakeProgressTracker(FeedConnectionId connectionId) {
-        this.feedIntakeProgressTrackers.remove(connectionId);
-    }
-
-    public void updateTrackingInformation(StorageReportFeedMessage srm) {
-        Pair<IIntakeProgressTracker, Long> p = feedIntakeProgressTrackers.get(srm.getConnectionId());
-        if (p != null && p.second < srm.getLastPersistedTupleIntakeTimestamp()) {
-            p.second = srm.getLastPersistedTupleIntakeTimestamp();
-            p.first.notifyIngestedTupleTimestamp(p.second);
-        }
-    }
-
-    public Collection<FeedIntakeInfo> getFeedIntakeInfos() {
-        return intakeJobInfos.values();
-    }
-
-    public Collection<FeedConnectJobInfo> getFeedConnectInfos() {
-        return connectJobInfos.values();
-    }
-
-    public void registerFeedJoint(IFeedJoint feedJoint) {
-        List<IFeedJoint> feedJointsOnPipeline = feedPipeline.get(feedJoint.getOwnerFeedId());
-        if (feedJointsOnPipeline == null) {
-            feedJointsOnPipeline = new ArrayList<IFeedJoint>();
-            feedPipeline.put(feedJoint.getOwnerFeedId(), feedJointsOnPipeline);
-            feedJointsOnPipeline.add(feedJoint);
-        } else {
-            if (!feedJointsOnPipeline.contains(feedJoint)) {
-                feedJointsOnPipeline.add(feedJoint);
-            } else {
-                throw new IllegalArgumentException("Feed joint " + feedJoint + " already registered");
-            }
-        }
-    }
-
-    public void registerFeedIntakeJob(FeedId feedId, JobId jobId, JobSpecification jobSpec) throws HyracksDataException {
-        if (jobInfos.get(jobId) != null) {
-            throw new IllegalStateException("Feed job already registered");
-        }
-
-        List<IFeedJoint> joints = feedPipeline.get(feedId);
-        IFeedJoint intakeJoint = null;
-        for (IFeedJoint joint : joints) {
-            if (joint.getType().equals(IFeedJoint.FeedJointType.INTAKE)) {
-                intakeJoint = joint;
-                break;
-            }
-        }
-
-        if (intakeJoint != null) {
-            FeedIntakeInfo intakeJobInfo = new FeedIntakeInfo(jobId, FeedJobState.CREATED, FeedJobInfo.JobType.INTAKE,
-                    feedId, intakeJoint, jobSpec);
-            intakeJobInfos.put(feedId, intakeJobInfo);
-            jobInfos.put(jobId, intakeJobInfo);
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Registered feed intake [" + jobId + "]" + " for feed " + feedId);
-            }
-        } else {
-            throw new HyracksDataException("Could not register feed intake job [" + jobId + "]" + " for feed  "
-                    + feedId);
-        }
-    }
-
-    public void registerFeedCollectionJob(FeedId sourceFeedId, FeedConnectionId connectionId, JobId jobId,
-            JobSpecification jobSpec, Map<String, String> feedPolicy) {
-        if (jobInfos.get(jobId) != null) {
-            throw new IllegalStateException("Feed job already registered");
-        }
-
-        List<IFeedJoint> feedJoints = feedPipeline.get(sourceFeedId);
-        FeedConnectionId cid = null;
-        IFeedJoint sourceFeedJoint = null;
-        for (IFeedJoint joint : feedJoints) {
-            cid = joint.getReceiver(connectionId);
-            if (cid != null) {
-                sourceFeedJoint = joint;
-                break;
-            }
-        }
-
-        if (cid != null) {
-            FeedConnectJobInfo cInfo = new FeedConnectJobInfo(jobId, FeedJobState.CREATED, connectionId,
-                    sourceFeedJoint, null, jobSpec, feedPolicy);
-            jobInfos.put(jobId, cInfo);
-            connectJobInfos.put(connectionId, cInfo);
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Registered feed connection [" + jobId + "]" + " for feed " + connectionId);
-            }
-        } else {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Could not register feed collection job [" + jobId + "]" + " for feed connection "
-                        + connectionId);
-            }
-        }
-
-    }
-
-    public void deregisterFeedIntakeJob(JobId jobId) {
-        if (jobInfos.get(jobId) == null) {
-            throw new IllegalStateException(" Feed Intake job not registered ");
-        }
-
-        FeedIntakeInfo info = (FeedIntakeInfo) jobInfos.get(jobId);
-        jobInfos.remove(jobId);
-        intakeJobInfos.remove(info.getFeedId());
-
-        if (!info.getState().equals(FeedJobState.UNDER_RECOVERY)) {
-            List<IFeedJoint> joints = feedPipeline.get(info.getFeedId());
-            joints.remove(info.getIntakeFeedJoint());
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Deregistered feed intake job [" + jobId + "]");
-            }
-        } else {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Not removing feed joint as intake job is in " + FeedJobState.UNDER_RECOVERY + " state.");
-            }
-        }
-
-    }
-
-    private void handleJobStartMessage(Message message) throws Exception {
-        FeedJobInfo jobInfo = jobInfos.get(message.jobId);
-        switch (jobInfo.getJobType()) {
-            case INTAKE:
-                handleIntakeJobStartMessage((FeedIntakeInfo) jobInfo);
-                break;
-            case FEED_CONNECT:
-                handleCollectJobStartMessage((FeedConnectJobInfo) jobInfo);
-                break;
-        }
-
-    }
-
-    private void handleJobFinishMessage(Message message) throws Exception {
-        FeedJobInfo jobInfo = jobInfos.get(message.jobId);
-        switch (jobInfo.getJobType()) {
-            case INTAKE:
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Intake Job finished for feed intake " + jobInfo.getJobId());
-                }
-                handleFeedIntakeJobFinishMessage((FeedIntakeInfo) jobInfo, message);
-                break;
-            case FEED_CONNECT:
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Collect Job finished for  " + (FeedConnectJobInfo) jobInfo);
-                }
-                handleFeedCollectJobFinishMessage((FeedConnectJobInfo) jobInfo);
-                break;
-        }
-
-    }
-
-    private synchronized void handleIntakeJobStartMessage(FeedIntakeInfo intakeJobInfo) throws Exception {
-        List<OperatorDescriptorId> intakeOperatorIds = new ArrayList<OperatorDescriptorId>();
-        Map<OperatorDescriptorId, IOperatorDescriptor> operators = intakeJobInfo.getSpec().getOperatorMap();
-        for (Entry<OperatorDescriptorId, IOperatorDescriptor> entry : operators.entrySet()) {
-            IOperatorDescriptor opDesc = entry.getValue();
-            if (opDesc instanceof FeedIntakeOperatorDescriptor) {
-                intakeOperatorIds.add(opDesc.getOperatorId());
-            }
-        }
-
-        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
-        JobInfo info = hcc.getJobInfo(intakeJobInfo.getJobId());
-        List<String> intakeLocations = new ArrayList<String>();
-        for (OperatorDescriptorId intakeOperatorId : intakeOperatorIds) {
-            Map<Integer, String> operatorLocations = info.getOperatorLocations().get(intakeOperatorId);
-            int nOperatorInstances = operatorLocations.size();
-            for (int i = 0; i < nOperatorInstances; i++) {
-                intakeLocations.add(operatorLocations.get(i));
-            }
-        }
-        // intakeLocations is an ordered list; element at position i corresponds to location of i'th instance of operator
-        intakeJobInfo.setIntakeLocation(intakeLocations);
-        intakeJobInfo.getIntakeFeedJoint().setState(State.ACTIVE);
-        intakeJobInfo.setState(FeedJobState.ACTIVE);
-
-        // notify event listeners 
-        notifyFeedEventSubscribers(intakeJobInfo, FeedLifecycleEvent.FEED_INTAKE_STARTED);
-    }
-
-    private void handleCollectJobStartMessage(FeedConnectJobInfo cInfo) throws RemoteException, ACIDException {
-        // set locations of feed sub-operations (intake, compute, store)
-        setLocations(cInfo);
-
-        // activate joints
-        List<IFeedJoint> joints = feedPipeline.get(cInfo.getConnectionId().getFeedId());
-        for (IFeedJoint joint : joints) {
-            if (joint.getProvider().equals(cInfo.getConnectionId())) {
-                joint.setState(State.ACTIVE);
-                if (joint.getType().equals(IFeedJoint.FeedJointType.COMPUTE)) {
-                    cInfo.setComputeFeedJoint(joint);
-                }
-            }
-        }
-        cInfo.setState(FeedJobState.ACTIVE);
-
-        // register activity in metadata
-        registerFeedActivity(cInfo);
-        // notify event listeners
-        notifyFeedEventSubscribers(cInfo, FeedLifecycleEvent.FEED_COLLECT_STARTED);
-    }
-
-    private void notifyFeedEventSubscribers(FeedJobInfo jobInfo, FeedLifecycleEvent event) {
-        JobType jobType = jobInfo.getJobType();
-        List<FeedConnectionId> impactedConnections = new ArrayList<FeedConnectionId>();
-        if (jobType.equals(JobType.INTAKE)) {
-            FeedId feedId = ((FeedIntakeInfo) jobInfo).getFeedId();
-            for (FeedConnectionId connId : eventSubscribers.keySet()) {
-                if (connId.getFeedId().equals(feedId)) {
-                    impactedConnections.add(connId);
-                }
-            }
-        } else {
-            impactedConnections.add(((FeedConnectJobInfo) jobInfo).getConnectionId());
-        }
-
-        for (FeedConnectionId connId : impactedConnections) {
-            List<IFeedLifecycleEventSubscriber> subscribers = eventSubscribers.get(connId);
-            if (subscribers != null && !subscribers.isEmpty()) {
-                for (IFeedLifecycleEventSubscriber subscriber : subscribers) {
-                    subscriber.handleFeedEvent(event);
-                }
-            }
-        }
-    }
-
-    public synchronized void submitFeedConnectionRequest(IFeedJoint feedJoint, final FeedConnectionRequest request)
-            throws Exception {
-        List<String> locations = null;
-        switch (feedJoint.getType()) {
-            case INTAKE:
-                FeedIntakeInfo intakeInfo = intakeJobInfos.get(feedJoint.getOwnerFeedId());
-                locations = intakeInfo.getIntakeLocation();
-                break;
-            case COMPUTE:
-                FeedConnectionId connectionId = feedJoint.getProvider();
-                FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
-                locations = cInfo.getComputeLocations();
-                break;
-        }
-
-        SubscribeFeedWork work = new SubscribeFeedWork(locations.toArray(new String[] {}), request);
-        FeedWorkManager.INSTANCE.submitWork(work, new SubscribeFeedWork.FeedSubscribeWorkEventListener());
-    }
-
-    public IFeedJoint getSourceFeedJoint(FeedConnectionId connectionId) {
-        FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
-        if (cInfo != null) {
-            return cInfo.getSourceFeedJoint();
-        }
-        return null;
-    }
-
-    public Set<FeedConnectionId> getActiveFeedConnections() {
-        Set<FeedConnectionId> activeConnections = new HashSet<FeedConnectionId>();
-        for (FeedConnectJobInfo cInfo : connectJobInfos.values()) {
-            if (cInfo.getState().equals(FeedJobState.ACTIVE)) {
-                activeConnections.add(cInfo.getConnectionId());
-            }
-        }
-        return activeConnections;
-    }
-
-    public boolean isFeedConnectionActive(FeedConnectionId connectionId) {
-        FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
-        if (cInfo != null) {
-            return cInfo.getState().equals(FeedJobState.ACTIVE);
-        }
-        return false;
-    }
-
-    public void setJobState(FeedConnectionId connectionId, FeedJobState jobState) {
-        FeedConnectJobInfo connectJobInfo = connectJobInfos.get(connectionId);
-        connectJobInfo.setState(jobState);
-    }
-
-    public FeedJobState getFeedJobState(FeedConnectionId connectionId) {
-        return connectJobInfos.get(connectionId).getState();
-    }
-
-    private void handleFeedIntakeJobFinishMessage(FeedIntakeInfo intakeInfo, Message message) throws Exception {
-        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
-        JobInfo info = hcc.getJobInfo(message.jobId);
-        JobStatus status = info.getStatus();
-        FeedLifecycleEvent event;
-        event = status.equals(JobStatus.FAILURE) ? FeedLifecycleEvent.FEED_INTAKE_FAILURE
-                : FeedLifecycleEvent.FEED_ENDED;
-
-        // remove feed joints
-        deregisterFeedIntakeJob(message.jobId);
-
-        // notify event listeners 
-        notifyFeedEventSubscribers(intakeInfo, event);
-
-    }
-
-    private void handleFeedCollectJobFinishMessage(FeedConnectJobInfo cInfo) throws Exception {
-        FeedConnectionId connectionId = cInfo.getConnectionId();
-
-        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
-        JobInfo info = hcc.getJobInfo(cInfo.getJobId());
-        JobStatus status = info.getStatus();
-        boolean failure = status != null && status.equals(JobStatus.FAILURE);
-        FeedPolicyAccessor fpa = new FeedPolicyAccessor(cInfo.getFeedPolicy());
-
-        boolean removeJobHistory = !failure;
-        boolean retainSubsription = cInfo.getState().equals(FeedJobState.UNDER_RECOVERY)
-                || (failure && fpa.continueOnHardwareFailure());
-
-        if (!retainSubsription) {
-            IFeedJoint feedJoint = cInfo.getSourceFeedJoint();
-            feedJoint.removeReceiver(connectionId);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Subscription " + cInfo.getConnectionId() + " completed successfully. Removed subscription");
-            }
-            removeFeedJointsPostPipelineTermination(cInfo.getConnectionId());
-        }
-
-        if (removeJobHistory) {
-            connectJobInfos.remove(connectionId);
-            jobInfos.remove(cInfo.getJobId());
-            feedIntakeProgressTrackers.remove(cInfo.getConnectionId());
-        }
-        deregisterFeedActivity(cInfo);
-
-        // notify event listeners 
-        FeedLifecycleEvent event = failure ? FeedLifecycleEvent.FEED_COLLECT_FAILURE : FeedLifecycleEvent.FEED_ENDED;
-        notifyFeedEventSubscribers(cInfo, event);
-    }
-
-    private void registerFeedActivity(FeedConnectJobInfo cInfo) {
-        Map<String, String> feedActivityDetails = new HashMap<String, String>();
-
-        if (cInfo.getCollectLocations() != null) {
-            feedActivityDetails.put(FeedActivity.FeedActivityDetails.INTAKE_LOCATIONS,
-                    StringUtils.join(cInfo.getCollectLocations().iterator(), ','));
-        }
-
-        if (cInfo.getComputeLocations() != null) {
-            feedActivityDetails.put(FeedActivity.FeedActivityDetails.COMPUTE_LOCATIONS,
-                    StringUtils.join(cInfo.getComputeLocations().iterator(), ','));
-        }
-
-        if (cInfo.getStorageLocations() != null) {
-            feedActivityDetails.put(FeedActivity.FeedActivityDetails.STORAGE_LOCATIONS,
-                    StringUtils.join(cInfo.getStorageLocations().iterator(), ','));
-        }
-
-        String policyName = cInfo.getFeedPolicy().get(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY);
-        feedActivityDetails.put(FeedActivity.FeedActivityDetails.FEED_POLICY_NAME, policyName);
-
-        feedActivityDetails.put(FeedActivity.FeedActivityDetails.FEED_CONNECT_TIMESTAMP, (new Date()).toString());
-        try {
-            FeedActivity feedActivity = new FeedActivity(cInfo.getConnectionId().getFeedId().getDataverse(), cInfo
-                    .getConnectionId().getFeedId().getFeedName(), cInfo.getConnectionId().getDatasetName(),
-                    feedActivityDetails);
-            CentralFeedManager.getInstance().getFeedLoadManager()
-                    .reportFeedActivity(cInfo.getConnectionId(), feedActivity);
-
-        } catch (Exception e) {
-            e.printStackTrace();
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Unable to register feed activity for " + cInfo + " " + e.getMessage());
-            }
-
-        }
-
-    }
-
-    public void deregisterFeedActivity(FeedConnectJobInfo cInfo) {
-        try {
-            CentralFeedManager.getInstance().getFeedLoadManager().removeFeedActivity(cInfo.getConnectionId());
-        } catch (Exception e) {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Unable to deregister feed activity for " + cInfo + " " + e.getMessage());
-            }
-        }
-    }
-
-    public void removeFeedJointsPostPipelineTermination(FeedConnectionId connectionId) {
-        FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
-        List<IFeedJoint> feedJoints = feedPipeline.get(connectionId.getFeedId());
-
-        IFeedJoint sourceJoint = cInfo.getSourceFeedJoint();
-        List<FeedConnectionId> all = sourceJoint.getReceivers();
-        boolean removeSourceJoint = all.size() < 2;
-        if (removeSourceJoint) {
-            feedJoints.remove(sourceJoint);
-        }
-
-        IFeedJoint computeJoint = cInfo.getComputeFeedJoint();
-        if (computeJoint != null && computeJoint.getReceivers().size() < 2) {
-            feedJoints.remove(computeJoint);
-        }
-    }
-
-    public boolean isRegisteredFeedJob(JobId jobId) {
-        return jobInfos.get(jobId) != null;
-    }
-
-    public List<String> getFeedComputeLocations(FeedId feedId) {
-        List<IFeedJoint> feedJoints = feedPipeline.get(feedId);
-        for (IFeedJoint joint : feedJoints) {
-            if (joint.getFeedJointKey().getFeedId().equals(feedId)) {
-                return connectJobInfos.get(joint.getProvider()).getComputeLocations();
-            }
-        }
-        return null;
-    }
-
-    public List<String> getFeedStorageLocations(FeedConnectionId connectionId) {
-        return connectJobInfos.get(connectionId).getStorageLocations();
-    }
-
-    public List<String> getFeedCollectLocations(FeedConnectionId connectionId) {
-        return connectJobInfos.get(connectionId).getCollectLocations();
-    }
-
-    public List<String> getFeedIntakeLocations(FeedId feedId) {
-        return intakeJobInfos.get(feedId).getIntakeLocation();
-    }
-
-    public JobId getFeedCollectJobId(FeedConnectionId connectionId) {
-        return connectJobInfos.get(connectionId).getJobId();
-    }
-
-    public void registerFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
-        List<IFeedLifecycleEventSubscriber> subscribers = eventSubscribers.get(connectionId);
-        if (subscribers == null) {
-            subscribers = new ArrayList<IFeedLifecycleEventSubscriber>();
-            eventSubscribers.put(connectionId, subscribers);
-        }
-        subscribers.add(subscriber);
-    }
-
-    public void deregisterFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
-        List<IFeedLifecycleEventSubscriber> subscribers = eventSubscribers.get(connectionId);
-        if (subscribers != null) {
-            subscribers.remove(subscriber);
-        }
-    }
-
-    //============================
-
-    public boolean isFeedPointAvailable(FeedJointKey feedJointKey) {
-        List<IFeedJoint> joints = feedPipeline.get(feedJointKey.getFeedId());
-        if (joints != null && !joints.isEmpty()) {
-            for (IFeedJoint joint : joints) {
-                if (joint.getFeedJointKey().equals(feedJointKey)) {
-                    return true;
-                }
-            }
-        }
-        return false;
-    }
-
-    public Collection<IFeedJoint> getFeedIntakeJoints() {
-        List<IFeedJoint> intakeFeedPoints = new ArrayList<IFeedJoint>();
-        for (FeedIntakeInfo info : intakeJobInfos.values()) {
-            intakeFeedPoints.add(info.getIntakeFeedJoint());
-        }
-        return intakeFeedPoints;
-    }
-
-    public IFeedJoint getFeedJoint(FeedJointKey feedPointKey) {
-        List<IFeedJoint> joints = feedPipeline.get(feedPointKey.getFeedId());
-        if (joints != null && !joints.isEmpty()) {
-            for (IFeedJoint joint : joints) {
-                if (joint.getFeedJointKey().equals(feedPointKey)) {
-                    return joint;
-                }
-            }
-        }
-        return null;
-    }
-
-    public IFeedJoint getAvailableFeedJoint(FeedJointKey feedJointKey) {
-        IFeedJoint feedJoint = getFeedJoint(feedJointKey);
-        if (feedJoint != null) {
-            return feedJoint;
-        } else {
-            String jointKeyString = feedJointKey.getStringRep();
-            List<IFeedJoint> jointsOnPipeline = feedPipeline.get(feedJointKey.getFeedId());
-            IFeedJoint candidateJoint = null;
-            if (jointsOnPipeline != null) {
-                for (IFeedJoint joint : jointsOnPipeline) {
-                    if (jointKeyString.contains(joint.getFeedJointKey().getStringRep())) {
-                        if (candidateJoint == null) {
-                            candidateJoint = joint;
-                        } else if (joint.getFeedJointKey().getStringRep()
-                                .contains(candidateJoint.getFeedJointKey().getStringRep())) { // found feed point is a super set of the earlier find
-                            candidateJoint = joint;
-                        }
-                    }
-                }
-            }
-            return candidateJoint;
-        }
-    }
-
-    public JobSpecification getCollectJobSpecification(FeedConnectionId connectionId) {
-        return connectJobInfos.get(connectionId).getSpec();
-    }
-
-    public IFeedJoint getFeedPoint(FeedId sourceFeedId, IFeedJoint.FeedJointType type) {
-        List<IFeedJoint> joints = feedPipeline.get(sourceFeedId);
-        for (IFeedJoint joint : joints) {
-            if (joint.getType().equals(type)) {
-                return joint;
-            }
-        }
-        return null;
-    }
-
-    public FeedConnectJobInfo getFeedConnectJobInfo(FeedConnectionId connectionId) {
-        return connectJobInfos.get(connectionId);
-    }
-
-    private void setLocations(FeedConnectJobInfo cInfo) {
-        JobSpecification jobSpec = cInfo.getSpec();
-
-        List<OperatorDescriptorId> collectOperatorIds = new ArrayList<OperatorDescriptorId>();
-        List<OperatorDescriptorId> computeOperatorIds = new ArrayList<OperatorDescriptorId>();
-        List<OperatorDescriptorId> storageOperatorIds = new ArrayList<OperatorDescriptorId>();
-
-        Map<OperatorDescriptorId, IOperatorDescriptor> operators = jobSpec.getOperatorMap();
-        for (Entry<OperatorDescriptorId, IOperatorDescriptor> entry : operators.entrySet()) {
-            IOperatorDescriptor opDesc = entry.getValue();
-            IOperatorDescriptor actualOp = null;
-            if (opDesc instanceof FeedMetaOperatorDescriptor) {
-                actualOp = ((FeedMetaOperatorDescriptor) opDesc).getCoreOperator();
-            } else {
-                actualOp = opDesc;
-            }
-
-            if (actualOp instanceof AlgebricksMetaOperatorDescriptor) {
-                AlgebricksMetaOperatorDescriptor op = ((AlgebricksMetaOperatorDescriptor) actualOp);
-                IPushRuntimeFactory[] runtimeFactories = op.getPipeline().getRuntimeFactories();
-                boolean computeOp = false;
-                for (IPushRuntimeFactory rf : runtimeFactories) {
-                    if (rf instanceof AssignRuntimeFactory) {
-                        IConnectorDescriptor connDesc = jobSpec.getOperatorInputMap().get(op.getOperatorId()).get(0);
-                        IOperatorDescriptor sourceOp = jobSpec.getConnectorOperatorMap().get(connDesc.getConnectorId())
-                                .getLeft().getLeft();
-                        if (sourceOp instanceof FeedCollectOperatorDescriptor) {
-                            computeOp = true;
-                            break;
-                        }
-                    }
-                }
-                if (computeOp) {
-                    computeOperatorIds.add(entry.getKey());
-                }
-            } else if (actualOp instanceof LSMTreeIndexInsertUpdateDeleteOperatorDescriptor) {
-                storageOperatorIds.add(entry.getKey());
-            } else if (actualOp instanceof FeedCollectOperatorDescriptor) {
-                collectOperatorIds.add(entry.getKey());
-            }
-        }
-
-        try {
-            IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
-            JobInfo info = hcc.getJobInfo(cInfo.getJobId());
-            List<String> collectLocations = new ArrayList<String>();
-            for (OperatorDescriptorId collectOpId : collectOperatorIds) {
-                Map<Integer, String> operatorLocations = info.getOperatorLocations().get(collectOpId);
-                int nOperatorInstances = operatorLocations.size();
-                for (int i = 0; i < nOperatorInstances; i++) {
-                    collectLocations.add(operatorLocations.get(i));
-                }
-            }
-
-            List<String> computeLocations = new ArrayList<String>();
-            for (OperatorDescriptorId computeOpId : computeOperatorIds) {
-                Map<Integer, String> operatorLocations = info.getOperatorLocations().get(computeOpId);
-                if (operatorLocations != null) {
-                    int nOperatorInstances = operatorLocations.size();
-                    for (int i = 0; i < nOperatorInstances; i++) {
-                        computeLocations.add(operatorLocations.get(i));
-                    }
-                } else {
-                    computeLocations.clear();
-                    computeLocations.addAll(collectLocations);
-                }
-            }
-
-            List<String> storageLocations = new ArrayList<String>();
-            for (OperatorDescriptorId storageOpId : storageOperatorIds) {
-                Map<Integer, String> operatorLocations = info.getOperatorLocations().get(storageOpId);
-                if (operatorLocations == null) {
-                    continue;
-                }
-                int nOperatorInstances = operatorLocations.size();
-                for (int i = 0; i < nOperatorInstances; i++) {
-                    storageLocations.add(operatorLocations.get(i));
-                }
-            }
-            cInfo.setCollectLocations(collectLocations);
-            cInfo.setComputeLocations(computeLocations);
-            cInfo.setStorageLocations(storageLocations);
-
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feeds/FeedJoint.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedJoint.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedJoint.java
deleted file mode 100644
index a76a1e9..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedJoint.java
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feeds;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedConnectionRequest;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.FeedJointKey;
-import org.apache.asterix.common.feeds.api.IFeedJoint;
-import org.apache.asterix.common.feeds.api.IFeedLifecycleListener.ConnectionLocation;
-
-public class FeedJoint implements IFeedJoint {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedJoint.class.getName());
-
-    /** A unique key associated with the feed point **/
-    private final FeedJointKey key;
-
-    /** The state associated with the FeedJoint **/
-    private State state;
-
-    /** A list of subscribers that receive data from this FeedJoint **/
-    private final List<FeedConnectionId> receivers;
-
-    /** The feedId on which the feedPoint resides **/
-    private final FeedId ownerFeedId;
-
-    /** A list of feed subscription requests submitted for subscribing to the FeedPoint's data **/
-    private final List<FeedConnectionRequest> connectionRequests;
-
-    private final ConnectionLocation connectionLocation;
-
-    private final FeedJointType type;
-
-    private FeedConnectionId provider;
-
-    public FeedJoint(FeedJointKey key, FeedId ownerFeedId, ConnectionLocation subscriptionLocation, FeedJointType type,
-            FeedConnectionId provider) {
-        this.key = key;
-        this.ownerFeedId = ownerFeedId;
-        this.type = type;
-        this.receivers = new ArrayList<FeedConnectionId>();
-        this.state = State.CREATED;
-        this.connectionLocation = subscriptionLocation;
-        this.connectionRequests = new ArrayList<FeedConnectionRequest>();
-        this.provider = provider;
-    }
-
-    @Override
-    public int hashCode() {
-        return key.hashCode();
-    }
-
-    public void addReceiver(FeedConnectionId connectionId) {
-        receivers.add(connectionId);
-    }
-
-    public void removeReceiver(FeedConnectionId connectionId) {
-        receivers.remove(connectionId);
-    }
-
-    public synchronized void addConnectionRequest(FeedConnectionRequest request) {
-        connectionRequests.add(request);
-        if (state.equals(State.ACTIVE)) {
-            handlePendingConnectionRequest();
-        }
-    }
-
-    public synchronized void setState(State state) {
-        if (this.state.equals(state)) {
-            return;
-        }
-        this.state = state;
-        if (this.state.equals(State.ACTIVE)) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Feed joint " + this + " is now " + State.ACTIVE);
-            }
-            handlePendingConnectionRequest();
-        }
-    }
-
-    private void handlePendingConnectionRequest() {
-        for (FeedConnectionRequest connectionRequest : connectionRequests) {
-            FeedConnectionId connectionId = new FeedConnectionId(connectionRequest.getReceivingFeedId(),
-                    connectionRequest.getTargetDataset());
-            try {
-                FeedLifecycleListener.INSTANCE.submitFeedConnectionRequest(this, connectionRequest);
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Submitted feed connection request " + connectionRequest + " at feed joint " + this);
-                }
-                addReceiver(connectionId);
-            } catch (Exception e) {
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("Unsuccessful attempt at submitting connection request " + connectionRequest
-                            + " at feed joint " + this + ". Message " + e.getMessage());
-                }
-                e.printStackTrace();
-            }
-        }
-        connectionRequests.clear();
-    }
-
-    public FeedConnectionId getReceiver(FeedConnectionId connectionId) {
-        for (FeedConnectionId cid : receivers) {
-            if (cid.equals(connectionId)) {
-                return cid;
-            }
-        }
-        return null;
-    }
-
-    @Override
-    public String toString() {
-        return key.toString() + " [" + connectionLocation + "]" + "[" + state + "]";
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (o == null) {
-            return false;
-        }
-        if (o == this) {
-            return true;
-        }
-        if (!(o instanceof FeedJoint)) {
-            return false;
-        }
-        return ((FeedJoint) o).getFeedJointKey().equals(this.key);
-    }
-
-    public FeedId getOwnerFeedId() {
-        return ownerFeedId;
-    }
-
-    public List<FeedConnectionRequest> getConnectionRequests() {
-        return connectionRequests;
-    }
-
-    public ConnectionLocation getConnectionLocation() {
-        return connectionLocation;
-    }
-
-    public FeedJointType getType() {
-        return type;
-    }
-
-    @Override
-    public FeedConnectionId getProvider() {
-        return provider;
-    }
-
-    public List<FeedConnectionId> getReceivers() {
-        return receivers;
-    }
-
-    public FeedJointKey getKey() {
-        return key;
-    }
-
-    public synchronized State getState() {
-        return state;
-    }
-
-    @Override
-    public FeedJointKey getFeedJointKey() {
-        return key;
-    }
-
-}



[12/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/FeedActivity.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/FeedActivity.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/FeedActivity.java
new file mode 100644
index 0000000..fc06ab9
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/FeedActivity.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.external.feed.watch;
+
+import java.util.Map;
+
+public class FeedActivity implements Comparable<FeedActivity> {
+
+    private int activityId;
+
+    private final String dataverseName;
+    private final String datasetName;
+    private final String feedName;
+    private final Map<String, String> feedActivityDetails;
+
+    public static class FeedActivityDetails {
+        public static final String INTAKE_LOCATIONS = "intake-locations";
+        public static final String COMPUTE_LOCATIONS = "compute-locations";
+        public static final String STORAGE_LOCATIONS = "storage-locations";
+        public static final String COLLECT_LOCATIONS = "collect-locations";
+        public static final String FEED_POLICY_NAME = "feed-policy-name";
+        public static final String FEED_CONNECT_TIMESTAMP = "feed-connect-timestamp";
+    }
+
+    public FeedActivity(String dataverseName, String feedName, String datasetName,
+            Map<String, String> feedActivityDetails) {
+        this.dataverseName = dataverseName;
+        this.feedName = feedName;
+        this.datasetName = datasetName;
+        this.feedActivityDetails = feedActivityDetails;
+    }
+
+    public String getDataverseName() {
+        return dataverseName;
+    }
+
+    public String getDatasetName() {
+        return datasetName;
+    }
+
+    public String getFeedName() {
+        return feedName;
+    }
+
+    @Override
+    public boolean equals(Object other) {
+        if (this == other) {
+            return true;
+        }
+        if (!(other instanceof FeedActivity)) {
+            return false;
+        }
+
+        if (!((FeedActivity) other).dataverseName.equals(dataverseName)) {
+            return false;
+        }
+        if (!((FeedActivity) other).datasetName.equals(datasetName)) {
+            return false;
+        }
+        if (!((FeedActivity) other).getFeedName().equals(feedName)) {
+            return false;
+        }
+        if (((FeedActivity) other).getActivityId() != (activityId)) {
+            return false;
+        }
+
+        return true;
+    }
+
+    @Override
+    public int hashCode() {
+        return toString().hashCode();
+    }
+
+    @Override
+    public String toString() {
+        return dataverseName + "." + feedName + " --> " + datasetName + " " + activityId;
+    }
+
+    public String getConnectTimestamp() {
+        return feedActivityDetails.get(FeedActivityDetails.FEED_CONNECT_TIMESTAMP);
+    }
+
+    public int getActivityId() {
+        return activityId;
+    }
+
+    public void setActivityId(int activityId) {
+        this.activityId = activityId;
+    }
+
+    public Map<String, String> getFeedActivityDetails() {
+        return feedActivityDetails;
+    }
+
+    @Override
+    public int compareTo(FeedActivity o) {
+        return o.getActivityId() - this.activityId;
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/FeedConnectJobInfo.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/FeedConnectJobInfo.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/FeedConnectJobInfo.java
new file mode 100644
index 0000000..3e42169
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/FeedConnectJobInfo.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.watch;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.asterix.external.feed.api.IFeedJoint;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.hyracks.api.job.JobId;
+import org.apache.hyracks.api.job.JobSpecification;
+
+public class FeedConnectJobInfo extends FeedJobInfo {
+
+    private final FeedConnectionId connectionId;
+    private final Map<String, String> feedPolicy;
+    private final IFeedJoint sourceFeedJoint;
+    private IFeedJoint computeFeedJoint;
+
+    private List<String> collectLocations;
+    private List<String> computeLocations;
+    private List<String> storageLocations;
+
+    public FeedConnectJobInfo(JobId jobId, FeedJobState state, FeedConnectionId connectionId,
+            IFeedJoint sourceFeedJoint, IFeedJoint computeFeedJoint, JobSpecification spec,
+            Map<String, String> feedPolicy) {
+        super(jobId, state, FeedJobInfo.JobType.FEED_CONNECT, spec);
+        this.connectionId = connectionId;
+        this.sourceFeedJoint = sourceFeedJoint;
+        this.computeFeedJoint = computeFeedJoint;
+        this.feedPolicy = feedPolicy;
+    }
+
+    public FeedConnectionId getConnectionId() {
+        return connectionId;
+    }
+
+    public List<String> getCollectLocations() {
+        return collectLocations;
+    }
+
+    public List<String> getComputeLocations() {
+        return computeLocations;
+    }
+
+    public List<String> getStorageLocations() {
+        return storageLocations;
+    }
+
+    public void setCollectLocations(List<String> collectLocations) {
+        this.collectLocations = collectLocations;
+    }
+
+    public void setComputeLocations(List<String> computeLocations) {
+        this.computeLocations = computeLocations;
+    }
+
+    public void setStorageLocations(List<String> storageLocations) {
+        this.storageLocations = storageLocations;
+    }
+
+    public IFeedJoint getSourceFeedJoint() {
+        return sourceFeedJoint;
+    }
+
+    public IFeedJoint getComputeFeedJoint() {
+        return computeFeedJoint;
+    }
+
+    public Map<String, String> getFeedPolicy() {
+        return feedPolicy;
+    }
+
+    public void setComputeFeedJoint(IFeedJoint computeFeedJoint) {
+        this.computeFeedJoint = computeFeedJoint;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/FeedIntakeInfo.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/FeedIntakeInfo.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/FeedIntakeInfo.java
new file mode 100644
index 0000000..3b11811
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/FeedIntakeInfo.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.watch;
+
+import java.util.List;
+
+import org.apache.asterix.external.feed.api.IFeedJoint;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.hyracks.api.job.JobId;
+import org.apache.hyracks.api.job.JobSpecification;
+
+public class FeedIntakeInfo extends FeedJobInfo {
+
+    private final FeedId feedId;
+    private final IFeedJoint intakeFeedJoint;
+    private final JobSpecification spec;
+    private List<String> intakeLocation;
+
+    public FeedIntakeInfo(JobId jobId, FeedJobState state, JobType jobType, FeedId feedId, IFeedJoint intakeFeedJoint,
+            JobSpecification spec) {
+        super(jobId, state, FeedJobInfo.JobType.INTAKE, spec);
+        this.feedId = feedId;
+        this.intakeFeedJoint = intakeFeedJoint;
+        this.spec = spec;
+    }
+
+    public FeedId getFeedId() {
+        return feedId;
+    }
+
+    public IFeedJoint getIntakeFeedJoint() {
+        return intakeFeedJoint;
+    }
+
+    public JobSpecification getSpec() {
+        return spec;
+    }
+
+    public List<String> getIntakeLocation() {
+        return intakeLocation;
+    }
+
+    public void setIntakeLocation(List<String> intakeLocation) {
+        this.intakeLocation = intakeLocation;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/FeedJobInfo.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/FeedJobInfo.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/FeedJobInfo.java
new file mode 100644
index 0000000..92e00cb
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/FeedJobInfo.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.watch;
+
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.hyracks.api.job.JobId;
+import org.apache.hyracks.api.job.JobSpecification;
+
+public class FeedJobInfo {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedJobInfo.class.getName());
+
+    public enum JobType {
+        INTAKE,
+        FEED_CONNECT
+    }
+
+    public enum FeedJobState {
+        CREATED,
+        ACTIVE,
+        UNDER_RECOVERY,
+        ENDED
+    }
+
+    protected final JobId jobId;
+    protected final JobType jobType;
+    protected FeedJobState state;
+    protected JobSpecification spec;
+
+    public FeedJobInfo(JobId jobId, FeedJobState state, JobType jobType, JobSpecification spec) {
+        this.jobId = jobId;
+        this.state = state;
+        this.jobType = jobType;
+        this.spec = spec;
+    }
+
+    public JobId getJobId() {
+        return jobId;
+    }
+
+    public FeedJobState getState() {
+        return state;
+    }
+
+    public void setState(FeedJobState state) {
+        this.state = state;
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(this + " is in " + state + " state.");
+        }
+    }
+
+    public JobType getJobType() {
+        return jobType;
+    }
+
+    public JobSpecification getSpec() {
+        return spec;
+    }
+
+    public void setSpec(JobSpecification spec) {
+        this.spec = spec;
+    }
+
+    public String toString() {
+        return jobId + " [" + jobType + "]";
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/FeedMetricCollector.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/FeedMetricCollector.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/FeedMetricCollector.java
new file mode 100644
index 0000000..f0db639
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/FeedMetricCollector.java
@@ -0,0 +1,189 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.watch;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.external.feed.api.IFeedMetricCollector;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+
+public class FeedMetricCollector implements IFeedMetricCollector {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedMetricCollector.class.getName());
+
+    private static final int UNKNOWN = -1;
+
+    private final AtomicInteger globalSenderId = new AtomicInteger(1);
+    private final Map<Integer, Sender> senders = new HashMap<Integer, Sender>();
+    private final Map<Integer, Series> statHistory = new HashMap<Integer, Series>();
+    private final Map<String, Sender> sendersByName = new HashMap<String, Sender>();
+
+    public FeedMetricCollector(String nodeId) {
+    }
+
+    @Override
+    public synchronized int createReportSender(FeedConnectionId connectionId, FeedRuntimeId runtimeId,
+            ValueType valueType, MetricType metricType) {
+        Sender sender = new Sender(globalSenderId.getAndIncrement(), connectionId, runtimeId, valueType, metricType);
+        senders.put(sender.senderId, sender);
+        sendersByName.put(sender.getDisplayName(), sender);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Sender id " + sender.getSenderId() + " created for " + sender);
+        }
+        return sender.senderId;
+    }
+
+    @Override
+    public void removeReportSender(int senderId) {
+        Sender sender = senders.get(senderId);
+        if (sender != null) {
+            statHistory.remove(senderId);
+            senders.remove(senderId);
+        } else {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Unable to remove sender Id");
+            }
+            throw new IllegalStateException("Unable to remove sender Id " + senderId + " senders " + senders);
+        }
+    }
+
+    @Override
+    public boolean sendReport(int senderId, int value) {
+        Sender sender = senders.get(senderId);
+        if (sender != null) {
+            Series series = statHistory.get(sender.senderId);
+            if (series == null) {
+                switch (sender.mType) {
+                    case AVG:
+                        series = new SeriesAvg();
+                        break;
+                    case RATE:
+                        series = new SeriesRate();
+                        break;
+                }
+                statHistory.put(sender.senderId, series);
+            }
+            series.addValue(value);
+            return true;
+        }
+        throw new IllegalStateException("Unable to send report sender Id " + senderId + " senders " + senders);
+    }
+
+    @Override
+    public void resetReportSender(int senderId) {
+        Sender sender = senders.get(senderId);
+        if (sender != null) {
+            Series series = statHistory.get(sender.senderId);
+            if (series != null) {
+                series.reset();
+            }
+        } else {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Sender with id " + senderId + " not found. Unable to reset!");
+            }
+            throw new IllegalStateException("Unable to reset sender Id " + senderId + " senders " + senders);
+        }
+    }
+
+    private static class Sender {
+
+        private final int senderId;
+        private final MetricType mType;
+        private final String displayName;
+
+        public Sender(int senderId, FeedConnectionId connectionId, FeedRuntimeId runtimeId, ValueType valueType,
+                MetricType mType) {
+            this.senderId = senderId;
+            this.mType = mType;
+            this.displayName = createDisplayName(connectionId, runtimeId, valueType);
+        }
+
+        @Override
+        public String toString() {
+            return displayName + "[" + senderId + "]" + "(" + mType + ")";
+        }
+
+        @Override
+        public boolean equals(Object o) {
+            if (this == o) {
+                return true;
+            }
+            if (!(o instanceof Sender)) {
+                return false;
+            }
+            return ((Sender) o).senderId == senderId;
+        }
+
+        @Override
+        public int hashCode() {
+            return senderId;
+        }
+
+        public static String createDisplayName(FeedConnectionId connectionId, FeedRuntimeId runtimeId,
+                ValueType valueType) {
+            return connectionId + " (" + runtimeId.getFeedRuntimeType() + " )" + "[" + runtimeId.getPartition() + "]"
+                    + "{" + valueType + "}";
+        }
+
+        public String getDisplayName() {
+            return displayName;
+        }
+
+        public int getSenderId() {
+            return senderId;
+        }
+    }
+
+    @Override
+    public int getMetric(int senderId) {
+        Sender sender = senders.get(senderId);
+        return getMetric(sender);
+    }
+
+    @Override
+    public int getMetric(FeedConnectionId connectionId, FeedRuntimeId runtimeId, ValueType valueType) {
+        String displayName = Sender.createDisplayName(connectionId, runtimeId, valueType);
+        Sender sender = sendersByName.get(displayName);
+        return getMetric(sender);
+    }
+
+    private int getMetric(Sender sender) {
+        if (sender == null || statHistory.get(sender.getSenderId()) == null) {
+            return UNKNOWN;
+        }
+
+        float result = -1;
+        Series series = statHistory.get(sender.getSenderId());
+        switch (sender.mType) {
+            case AVG:
+                result = ((SeriesAvg) series).getAvg();
+                break;
+            case RATE:
+                result = ((SeriesRate) series).getRate();
+                break;
+        }
+        return (int) result;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/IntakePartitionStatistics.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/IntakePartitionStatistics.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/IntakePartitionStatistics.java
new file mode 100644
index 0000000..acfd1fb
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/IntakePartitionStatistics.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.watch;
+
+import java.util.BitSet;
+
+public class IntakePartitionStatistics {
+
+    public static int ACK_WINDOW_SIZE = 1024;
+    private BitSet bitSet;
+
+    public IntakePartitionStatistics(int partition, int base) {
+        this.bitSet = new BitSet(ACK_WINDOW_SIZE);
+    }
+
+    public void ackRecordId(int recordId) {
+        int posIndexWithinBase = recordId % ACK_WINDOW_SIZE;
+        this.bitSet.set(posIndexWithinBase);
+    }
+
+    public byte[] getAckInfo() {
+        return bitSet.toByteArray();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/IntakeSideMonitoredBuffer.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/IntakeSideMonitoredBuffer.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/IntakeSideMonitoredBuffer.java
new file mode 100644
index 0000000..7a79e23
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/IntakeSideMonitoredBuffer.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.watch;
+
+import org.apache.asterix.external.feed.api.IExceptionHandler;
+import org.apache.asterix.external.feed.api.IFeedMetricCollector;
+import org.apache.asterix.external.feed.api.IFrameEventCallback;
+import org.apache.asterix.external.feed.api.IFramePostProcessor;
+import org.apache.asterix.external.feed.api.IFramePreprocessor;
+import org.apache.asterix.external.feed.dataflow.FeedRuntimeInputHandler;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+
+public class IntakeSideMonitoredBuffer extends MonitoredBuffer {
+
+    public IntakeSideMonitoredBuffer(IHyracksTaskContext ctx, FeedRuntimeInputHandler inputHandler, IFrameWriter frameWriter,
+            FrameTupleAccessor fta, RecordDescriptor recordDesc, IFeedMetricCollector metricCollector,
+            FeedConnectionId connectionId, FeedRuntimeId runtimeId, IExceptionHandler exceptionHandler,
+            IFrameEventCallback callback, int nPartitions, FeedPolicyAccessor policyAccessor) {
+        super(ctx, inputHandler, frameWriter, fta,  recordDesc, metricCollector, connectionId, runtimeId,
+                exceptionHandler, callback, nPartitions, policyAccessor);
+    }
+
+    @Override
+    protected boolean monitorProcessingRate() {
+        return false;
+    }
+
+    @Override
+    protected boolean logInflowOutflowRate() {
+        return false;
+    }
+
+    @Override
+    protected IFramePreprocessor getFramePreProcessor() {
+        return null;
+    }
+
+    @Override
+    protected IFramePostProcessor getFramePostProcessor() {
+        return null;
+    }
+
+    @Override
+    protected boolean monitorInputQueueLength() {
+        return false;
+    }
+
+    @Override
+    protected boolean reportOutflowRate() {
+        return false;
+    }
+
+    @Override
+    protected boolean reportInflowRate() {
+        return true;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/MonitoredBuffer.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/MonitoredBuffer.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/MonitoredBuffer.java
new file mode 100644
index 0000000..db38edf
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/MonitoredBuffer.java
@@ -0,0 +1,396 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.watch;
+
+import java.nio.ByteBuffer;
+import java.util.Map;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.logging.Level;
+
+import org.apache.asterix.external.feed.api.IExceptionHandler;
+import org.apache.asterix.external.feed.api.IFeedMetricCollector;
+import org.apache.asterix.external.feed.api.IFrameEventCallback;
+import org.apache.asterix.external.feed.api.IFramePostProcessor;
+import org.apache.asterix.external.feed.api.IFramePreprocessor;
+import org.apache.asterix.external.feed.api.IFeedMetricCollector.MetricType;
+import org.apache.asterix.external.feed.api.IFeedMetricCollector.ValueType;
+import org.apache.asterix.external.feed.api.IFeedRuntime.Mode;
+import org.apache.asterix.external.feed.api.IFrameEventCallback.FrameEvent;
+import org.apache.asterix.external.feed.dataflow.DataBucket;
+import org.apache.asterix.external.feed.dataflow.FeedRuntimeInputHandler;
+import org.apache.asterix.external.feed.dataflow.StorageFrameHandler;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.message.MessageReceiver;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+import org.apache.asterix.external.feed.watch.MonitoredBufferTimerTasks.LogInputOutputRateTask;
+import org.apache.asterix.external.feed.watch.MonitoredBufferTimerTasks.MonitorInputQueueLengthTimerTask;
+import org.apache.asterix.external.feed.watch.MonitoredBufferTimerTasks.MonitoreProcessRateTimerTask;
+import org.apache.asterix.external.feed.watch.MonitoredBufferTimerTasks.MonitoredBufferStorageTimerTask;
+import org.apache.asterix.external.util.FeedFrameUtil;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+
+public abstract class MonitoredBuffer extends MessageReceiver<DataBucket> {
+
+    protected static final long LOG_INPUT_OUTPUT_RATE_FREQUENCY = 5000; // 5 seconds
+    protected static final long INPUT_QUEUE_MEASURE_FREQUENCY = 1000; // 1 second
+    protected static final long PROCESSING_RATE_MEASURE_FREQUENCY = 10000; // 10 seconds
+
+    protected static final int PROCESS_RATE_REFRESH = 2; // refresh processing rate every 10th frame
+
+    protected final IHyracksTaskContext ctx;
+    protected final FeedConnectionId connectionId;
+    protected final FeedRuntimeId runtimeId;
+    protected final FrameTupleAccessor inflowFta;
+    protected final FrameTupleAccessor outflowFta;
+    protected final FeedRuntimeInputHandler inputHandler;
+    protected final IFrameEventCallback callback;
+    protected final Timer timer;
+    private final IExceptionHandler exceptionHandler;
+    protected final FeedPolicyAccessor policyAccessor;
+    protected int nPartitions;
+
+    private IFrameWriter frameWriter;
+    protected IFeedMetricCollector metricCollector;
+    protected boolean monitorProcessingRate = false;
+    protected boolean monitorInputQueueLength = false;
+    protected boolean logInflowOutflowRate = false;
+    protected boolean reportOutflowRate = false;
+    protected boolean reportInflowRate = false;
+
+    protected int inflowReportSenderId = -1;
+    protected int outflowReportSenderId = -1;
+    protected TimerTask monitorInputQueueLengthTask;
+    protected TimerTask processingRateTask;
+    protected TimerTask logInflowOutflowRateTask;
+    protected MonitoredBufferStorageTimerTask storageTimeTrackingRateTask;
+    protected StorageFrameHandler storageFromeHandler;
+
+    protected int processingRate = -1;
+    protected int frameCount = 0;
+    private long avgDelayPersistence = 0;
+    private boolean active;
+    private Map<Integer, Long> tupleTimeStats;
+    IFramePostProcessor postProcessor = null;
+    IFramePreprocessor preProcessor = null;
+
+    public static MonitoredBuffer getMonitoredBuffer(IHyracksTaskContext ctx, FeedRuntimeInputHandler inputHandler,
+            IFrameWriter frameWriter, FrameTupleAccessor fta, RecordDescriptor recordDesc,
+            IFeedMetricCollector metricCollector, FeedConnectionId connectionId, FeedRuntimeId runtimeId,
+            IExceptionHandler exceptionHandler, IFrameEventCallback callback, int nPartitions,
+            FeedPolicyAccessor policyAccessor) {
+        switch (runtimeId.getFeedRuntimeType()) {
+            case COMPUTE:
+                return new ComputeSideMonitoredBuffer(ctx, inputHandler, frameWriter, fta, recordDesc, metricCollector,
+                        connectionId, runtimeId, exceptionHandler, callback, nPartitions, policyAccessor);
+            case STORE:
+                return new StorageSideMonitoredBuffer(ctx, inputHandler, frameWriter, fta, recordDesc, metricCollector,
+                        connectionId, runtimeId, exceptionHandler, callback, nPartitions, policyAccessor);
+            case COLLECT:
+                return new IntakeSideMonitoredBuffer(ctx, inputHandler, frameWriter, fta, recordDesc, metricCollector,
+                        connectionId, runtimeId, exceptionHandler, callback, nPartitions, policyAccessor);
+            default:
+                return new BasicMonitoredBuffer(ctx, inputHandler, frameWriter, fta, recordDesc, metricCollector,
+                        connectionId, runtimeId, exceptionHandler, callback, nPartitions, policyAccessor);
+        }
+    }
+
+    protected MonitoredBuffer(IHyracksTaskContext ctx, FeedRuntimeInputHandler inputHandler, IFrameWriter frameWriter,
+            FrameTupleAccessor fta, RecordDescriptor recordDesc, IFeedMetricCollector metricCollector,
+            FeedConnectionId connectionId, FeedRuntimeId runtimeId, IExceptionHandler exceptionHandler,
+            IFrameEventCallback callback, int nPartitions, FeedPolicyAccessor policyAccessor) {
+        this.ctx = ctx;
+        this.connectionId = connectionId;
+        this.frameWriter = frameWriter;
+        this.inflowFta = new FrameTupleAccessor(recordDesc);
+        this.outflowFta = new FrameTupleAccessor(recordDesc);
+        this.runtimeId = runtimeId;
+        this.metricCollector = metricCollector;
+        this.exceptionHandler = exceptionHandler;
+        this.callback = callback;
+        this.inputHandler = inputHandler;
+        this.timer = new Timer();
+        this.policyAccessor = policyAccessor;
+        this.nPartitions = nPartitions;
+        this.active = true;
+        initializeMonitoring();
+    }
+
+    protected abstract boolean monitorProcessingRate();
+
+    protected abstract boolean logInflowOutflowRate();
+
+    protected abstract boolean reportOutflowRate();
+
+    protected abstract boolean reportInflowRate();
+
+    protected abstract boolean monitorInputQueueLength();
+
+    protected abstract IFramePreprocessor getFramePreProcessor();
+
+    protected abstract IFramePostProcessor getFramePostProcessor();
+
+    protected void initializeMonitoring() {
+        monitorProcessingRate = monitorProcessingRate();
+        monitorInputQueueLength = monitorInputQueueLength();
+        reportInflowRate = reportInflowRate();
+        reportOutflowRate = reportOutflowRate();
+        logInflowOutflowRate = policyAccessor.isLoggingStatisticsEnabled() || logInflowOutflowRate();
+
+        if (monitorProcessingRate && policyAccessor.isElastic()) { // check possibility to scale in
+            this.processingRateTask = new MonitoreProcessRateTimerTask(this, inputHandler.getFeedManager(),
+                    connectionId, nPartitions);
+            this.timer.scheduleAtFixedRate(processingRateTask, 0, PROCESSING_RATE_MEASURE_FREQUENCY);
+        }
+
+        if (monitorInputQueueLength && (policyAccessor.isElastic() || policyAccessor.throttlingEnabled()
+                || policyAccessor.spillToDiskOnCongestion() || policyAccessor.discardOnCongestion())) {
+            this.monitorInputQueueLengthTask = new MonitorInputQueueLengthTimerTask(this, callback);
+            this.timer.scheduleAtFixedRate(monitorInputQueueLengthTask, 0, INPUT_QUEUE_MEASURE_FREQUENCY);
+        }
+
+        if (logInflowOutflowRate || reportInflowRate || reportOutflowRate) {
+            this.logInflowOutflowRateTask = new LogInputOutputRateTask(this, logInflowOutflowRate, reportInflowRate,
+                    reportOutflowRate);
+            this.timer.scheduleAtFixedRate(logInflowOutflowRateTask, 0, LOG_INPUT_OUTPUT_RATE_FREQUENCY);
+            this.inflowReportSenderId = metricCollector.createReportSender(connectionId, runtimeId,
+                    ValueType.INFLOW_RATE, MetricType.RATE);
+            this.outflowReportSenderId = metricCollector.createReportSender(connectionId, runtimeId,
+                    ValueType.OUTFLOW_RATE, MetricType.RATE);
+        }
+    }
+
+    protected void deinitializeMonitoring() {
+        if (monitorInputQueueLengthTask != null) {
+            monitorInputQueueLengthTask.cancel();
+        }
+        if (processingRateTask != null) {
+            processingRateTask.cancel();
+        }
+        if (logInflowOutflowRate || reportInflowRate || reportOutflowRate) {
+            metricCollector.removeReportSender(inflowReportSenderId);
+            metricCollector.removeReportSender(outflowReportSenderId);
+            logInflowOutflowRateTask.cancel();
+        }
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Disabled monitoring for " + this.runtimeId);
+        }
+    }
+
+    protected void postProcessFrame(long startTime, ByteBuffer frame) throws Exception {
+        if (monitorProcessingRate) {
+            frameCount++;
+            if (frameCount % PROCESS_RATE_REFRESH == 0) {
+                long endTime = System.currentTimeMillis();
+                processingRate = (int) ((double) outflowFta.getTupleCount() * 1000 / (endTime - startTime));
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Processing Rate :" + processingRate + " tuples/sec");
+                }
+                frameCount = 0;
+            }
+        }
+
+        if (logInflowOutflowRate || reportOutflowRate) {
+            metricCollector.sendReport(outflowReportSenderId, outflowFta.getTupleCount());
+        }
+
+        postProcessFrame(frame);
+
+    }
+
+    protected void preProcessFrame(ByteBuffer frame) throws Exception {
+        if (preProcessor == null) {
+            preProcessor = getFramePreProcessor();
+        }
+        if (preProcessor != null) {
+            preProcessor.preProcess(frame);
+        }
+    }
+
+    protected void postProcessFrame(ByteBuffer frame) throws Exception {
+        if (postProcessor == null) {
+            postProcessor = getFramePostProcessor();
+        }
+        if (postProcessor != null) {
+            outflowFta.reset(frame);
+            postProcessor.postProcessFrame(frame, outflowFta);
+        }
+    }
+
+    @Override
+    public void sendMessage(DataBucket message) {
+        inbox.add(message);
+    }
+
+    public void sendReport(ByteBuffer frame) {
+        if ((logInflowOutflowRate || reportInflowRate) && !(inputHandler.getMode().equals(Mode.PROCESS_BACKLOG)
+                || inputHandler.getMode().equals(Mode.PROCESS_SPILL))) {
+            inflowFta.reset(frame);
+            metricCollector.sendReport(inflowReportSenderId, inflowFta.getTupleCount());
+        }
+    }
+
+    /** return rate in terms of tuples/sec **/
+    public int getInflowRate() {
+        return metricCollector.getMetric(inflowReportSenderId);
+    }
+
+    /** return rate in terms of tuples/sec **/
+    public int getOutflowRate() {
+        return metricCollector.getMetric(outflowReportSenderId);
+    }
+
+    /** return the number of pending frames from the input queue **/
+    public int getWorkSize() {
+        return inbox.size();
+    }
+
+    /** reset the number of partitions (cardinality) for the runtime **/
+    public void setNumberOfPartitions(int nPartitions) {
+        if (processingRateTask != null) {
+            int currentPartitions = ((MonitoreProcessRateTimerTask) processingRateTask).getNumberOfPartitions();
+            if (currentPartitions != nPartitions) {
+                ((MonitoreProcessRateTimerTask) processingRateTask).setNumberOfPartitions(nPartitions);
+            }
+        }
+    }
+
+    public FeedRuntimeInputHandler getInputHandler() {
+        return inputHandler;
+    }
+
+    public synchronized void close(boolean processPending, boolean disableMonitoring) {
+        super.close(processPending);
+        if (disableMonitoring) {
+            deinitializeMonitoring();
+        }
+        active = false;
+    }
+
+    @Override
+    public synchronized void processMessage(DataBucket message) throws Exception {
+        if (!active) {
+            message.doneReading();
+            return;
+        }
+        switch (message.getContentType()) {
+            case DATA:
+                boolean finishedProcessing = false;
+                ByteBuffer frameReceived = message.getContent();
+                ByteBuffer frameToProcess = null;
+                if (inputHandler.isThrottlingEnabled()) {
+                    inflowFta.reset(frameReceived);
+                    int pRate = getProcessingRate();
+                    int inflowRate = getInflowRate();
+                    if (inflowRate > pRate) {
+                        double retainFraction = (pRate * 0.8 / inflowRate);
+                        frameToProcess = throttleFrame(inflowFta, retainFraction);
+                        inflowFta.reset(frameToProcess);
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info("Throttling at fraction " + retainFraction + "inflow rate " + inflowRate
+                                    + " no of tuples remaining " + inflowFta.getTupleCount());
+
+                        }
+                    } else {
+                        frameToProcess = frameReceived;
+                    }
+                } else {
+                    frameToProcess = frameReceived;
+                }
+                outflowFta.reset(frameToProcess);
+                long startTime = 0;
+                while (!finishedProcessing) {
+                    try {
+                        inflowFta.reset(frameToProcess);
+                        startTime = System.currentTimeMillis();
+                        preProcessFrame(frameToProcess);
+                        frameWriter.nextFrame(frameToProcess);
+                        postProcessFrame(startTime, frameToProcess);
+                        finishedProcessing = true;
+                    } catch (Exception e) {
+                        e.printStackTrace();
+                        frameToProcess = exceptionHandler.handleException(e, frameToProcess);
+                        finishedProcessing = true;
+                    }
+                }
+                message.doneReading();
+                break;
+            case EOD:
+                message.doneReading();
+                timer.cancel();
+                callback.frameEvent(FrameEvent.FINISHED_PROCESSING);
+                break;
+            case EOSD:
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Done processing spillage");
+                }
+                message.doneReading();
+                callback.frameEvent(FrameEvent.FINISHED_PROCESSING_SPILLAGE);
+                break;
+
+        }
+    }
+
+    private ByteBuffer throttleFrame(FrameTupleAccessor fta, double retainFraction) throws HyracksDataException {
+        int desiredTuples = (int) (fta.getTupleCount() * retainFraction);
+        return FeedFrameUtil.getSampledFrame(ctx, fta, desiredTuples);
+    }
+
+    public Mode getMode() {
+        return inputHandler.getMode();
+    }
+
+    public FeedRuntimeId getRuntimeId() {
+        return runtimeId;
+    }
+
+    public void setFrameWriter(IFrameWriter frameWriter) {
+        this.frameWriter = frameWriter;
+    }
+
+    public void reset() {
+        active = true;
+        if (logInflowOutflowRate) {
+            metricCollector.resetReportSender(inflowReportSenderId);
+            metricCollector.resetReportSender(outflowReportSenderId);
+        }
+    }
+
+    public int getProcessingRate() {
+        return processingRate;
+    }
+
+    public Map<Integer, Long> getTupleTimeStats() {
+        return tupleTimeStats;
+    }
+
+    public long getAvgDelayRecordPersistence() {
+        return avgDelayPersistence;
+    }
+
+    public MonitoredBufferStorageTimerTask getStorageTimeTrackingRateTask() {
+        return storageTimeTrackingRateTask;
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/MonitoredBufferTimerTasks.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/MonitoredBufferTimerTasks.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/MonitoredBufferTimerTasks.java
new file mode 100644
index 0000000..86c6bca
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/MonitoredBufferTimerTasks.java
@@ -0,0 +1,299 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.watch;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.TimerTask;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.common.config.AsterixFeedProperties;
+import org.apache.asterix.external.feed.api.IFeedManager;
+import org.apache.asterix.external.feed.api.IFeedMessageService;
+import org.apache.asterix.external.feed.api.IFrameEventCallback;
+import org.apache.asterix.external.feed.api.IFeedMetricCollector.ValueType;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.api.IFeedRuntime.Mode;
+import org.apache.asterix.external.feed.api.IFrameEventCallback.FrameEvent;
+import org.apache.asterix.external.feed.dataflow.StorageFrameHandler;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.message.FeedReportMessage;
+import org.apache.asterix.external.feed.message.FeedTupleCommitAckMessage;
+import org.apache.asterix.external.feed.message.FeedTupleCommitResponseMessage;
+import org.apache.asterix.external.feed.message.ScaleInReportMessage;
+import org.apache.asterix.external.feed.message.StorageReportFeedMessage;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+
+public class MonitoredBufferTimerTasks {
+
+    private static final Logger LOGGER = Logger.getLogger(MonitorInputQueueLengthTimerTask.class.getName());
+
+    public static class MonitoredBufferStorageTimerTask extends TimerTask {
+
+        private static final int PERSISTENCE_DELAY_VIOLATION_MAX = 5;
+
+        private final StorageSideMonitoredBuffer mBuffer;
+        private final IFeedManager feedManager;
+        private final int partition;
+        private final FeedConnectionId connectionId;
+        private final FeedPolicyAccessor policyAccessor;
+        private final StorageFrameHandler storageFromeHandler;
+        private final StorageReportFeedMessage storageReportMessage;
+        private final FeedTupleCommitAckMessage tupleCommitAckMessage;
+
+        private Map<Integer, Integer> maxIntakeBaseCovered;
+        private int countDelayExceeded = 0;
+
+        public MonitoredBufferStorageTimerTask(StorageSideMonitoredBuffer mBuffer, IFeedManager feedManager,
+                FeedConnectionId connectionId, int partition, FeedPolicyAccessor policyAccessor,
+                StorageFrameHandler storageFromeHandler) {
+            this.mBuffer = mBuffer;
+            this.feedManager = feedManager;
+            this.connectionId = connectionId;
+            this.partition = partition;
+            this.policyAccessor = policyAccessor;
+            this.storageFromeHandler = storageFromeHandler;
+            this.storageReportMessage = new StorageReportFeedMessage(this.connectionId, this.partition, 0, false, 0, 0);
+            this.tupleCommitAckMessage = new FeedTupleCommitAckMessage(this.connectionId, 0, 0, null);
+            this.maxIntakeBaseCovered = new HashMap<Integer, Integer>();
+        }
+
+        @Override
+        public void run() {
+            if (mBuffer.isAckingEnabled() && !mBuffer.getInputHandler().isThrottlingEnabled()) {
+                ackRecords();
+            }
+            if (mBuffer.isTimeTrackingEnabled()) {
+                checkLatencyViolation();
+            }
+        }
+
+        private void ackRecords() {
+            Set<Integer> partitions = storageFromeHandler.getPartitionsWithStats();
+            List<Integer> basesCovered = new ArrayList<Integer>();
+            for (int intakePartition : partitions) {
+                Map<Integer, IntakePartitionStatistics> baseAcks = storageFromeHandler
+                        .getBaseAcksForPartition(intakePartition);
+                for (Entry<Integer, IntakePartitionStatistics> entry : baseAcks.entrySet()) {
+                    int base = entry.getKey();
+                    IntakePartitionStatistics stats = entry.getValue();
+                    Integer maxIntakeBaseForPartition = maxIntakeBaseCovered.get(intakePartition);
+                    if (maxIntakeBaseForPartition == null || maxIntakeBaseForPartition < base) {
+                        tupleCommitAckMessage.reset(intakePartition, base, stats.getAckInfo());
+                        feedManager.getFeedMessageService().sendMessage(tupleCommitAckMessage);
+                    } else {
+                        basesCovered.add(base);
+                    }
+                }
+                for (Integer b : basesCovered) {
+                    baseAcks.remove(b);
+                }
+                basesCovered.clear();
+            }
+        }
+
+        private void checkLatencyViolation() {
+            long avgDelayPersistence = storageFromeHandler.getAvgDelayPersistence();
+            if (avgDelayPersistence > policyAccessor.getMaxDelayRecordPersistence()) {
+                countDelayExceeded++;
+                if (countDelayExceeded > PERSISTENCE_DELAY_VIOLATION_MAX) {
+                    storageReportMessage.reset(0, false, mBuffer.getAvgDelayRecordPersistence());
+                    feedManager.getFeedMessageService().sendMessage(storageReportMessage);
+                }
+            } else {
+                countDelayExceeded = 0;
+            }
+        }
+
+        public void receiveCommitAckResponse(FeedTupleCommitResponseMessage message) {
+            maxIntakeBaseCovered.put(message.getIntakePartition(), message.getMaxWindowAcked());
+        }
+    }
+
+    public static class LogInputOutputRateTask extends TimerTask {
+
+        private final MonitoredBuffer mBuffer;
+        private final boolean log;
+        private final boolean reportInflow;
+        private final boolean reportOutflow;
+
+        private final IFeedMessageService messageService;
+        private final FeedReportMessage message;
+
+        public LogInputOutputRateTask(MonitoredBuffer mBuffer, boolean log, boolean reportInflow, boolean reportOutflow) {
+            this.mBuffer = mBuffer;
+            this.log = log;
+            this.reportInflow = reportInflow;
+            this.reportOutflow = reportOutflow;
+            if (reportInflow || reportOutflow) {
+                ValueType vType = reportInflow ? ValueType.INFLOW_RATE : ValueType.OUTFLOW_RATE;
+                messageService = mBuffer.getInputHandler().getFeedManager().getFeedMessageService();
+                message = new FeedReportMessage(mBuffer.getInputHandler().getConnectionId(), mBuffer.getRuntimeId(),
+                        vType, 0);
+            } else {
+                messageService = null;
+                message = null;
+            }
+
+        }
+
+        @Override
+        public void run() {
+            int pendingWork = mBuffer.getWorkSize();
+            int outflowRate = mBuffer.getOutflowRate();
+            int inflowRate = mBuffer.getInflowRate();
+            if (log) {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info(mBuffer.getRuntimeId() + " " + "Inflow rate:" + inflowRate + " Outflow Rate:"
+                            + outflowRate + " Pending Work " + pendingWork);
+                }
+            }
+            if (reportInflow) {
+                message.reset(inflowRate);
+            } else if (reportOutflow) {
+                message.reset(outflowRate);
+            }
+            messageService.sendMessage(message);
+        }
+    }
+
+    public static class MonitorInputQueueLengthTimerTask extends TimerTask {
+
+        private final MonitoredBuffer mBuffer;
+        private final IFrameEventCallback callback;
+        private final int pendingWorkThreshold;
+        private final int maxSuccessiveThresholdPeriods;
+        private FrameEvent lastEvent = FrameEvent.NO_OP;
+        private int pendingWorkExceedCount = 0;
+
+        public MonitorInputQueueLengthTimerTask(MonitoredBuffer mBuffer, IFrameEventCallback callback) {
+            this.mBuffer = mBuffer;
+            this.callback = callback;
+            AsterixFeedProperties props = mBuffer.getInputHandler().getFeedManager().getAsterixFeedProperties();
+            pendingWorkThreshold = props.getPendingWorkThreshold();
+            maxSuccessiveThresholdPeriods = props.getMaxSuccessiveThresholdPeriod();
+        }
+
+        @Override
+        public void run() {
+            int pendingWork = mBuffer.getWorkSize();
+            if (mBuffer.getMode().equals(Mode.PROCESS_SPILL) || mBuffer.getMode().equals(Mode.PROCESS_BACKLOG)) {
+                return;
+            }
+
+            switch (lastEvent) {
+                case NO_OP:
+                case PENDING_WORK_DONE:
+                case FINISHED_PROCESSING_SPILLAGE:
+                    if (pendingWork > pendingWorkThreshold) {
+                        pendingWorkExceedCount++;
+                        if (pendingWorkExceedCount > maxSuccessiveThresholdPeriods) {
+                            pendingWorkExceedCount = 0;
+                            lastEvent = FrameEvent.PENDING_WORK_THRESHOLD_REACHED;
+                            callback.frameEvent(lastEvent);
+                        }
+                    } else if (pendingWork == 0 && mBuffer.getMode().equals(Mode.SPILL)) {
+                        lastEvent = FrameEvent.PENDING_WORK_DONE;
+                        callback.frameEvent(lastEvent);
+                    }
+                    break;
+                case PENDING_WORK_THRESHOLD_REACHED:
+                    if (((pendingWork * 1.0) / pendingWorkThreshold) <= 0.5) {
+                        lastEvent = FrameEvent.PENDING_WORK_DONE;
+                        callback.frameEvent(lastEvent);
+                    }
+                    break;
+                case FINISHED_PROCESSING:
+                    break;
+
+            }
+        }
+    }
+
+    /**
+     * A timer task to measure and compare the processing rate and inflow rate
+     * to look for possibility to scale-in, that is reduce the degree of cardinality
+     * of the compute operator.
+     */
+    public static class MonitoreProcessRateTimerTask extends TimerTask {
+
+        private final MonitoredBuffer mBuffer;
+        private final IFeedManager feedManager;
+        private int nPartitions;
+        private ScaleInReportMessage sMessage;
+        private boolean proposedChange;
+
+        public MonitoreProcessRateTimerTask(MonitoredBuffer mBuffer, IFeedManager feedManager,
+                FeedConnectionId connectionId, int nPartitions) {
+            this.mBuffer = mBuffer;
+            this.feedManager = feedManager;
+            this.nPartitions = nPartitions;
+            this.sMessage = new ScaleInReportMessage(connectionId, FeedRuntimeType.COMPUTE, 0, 0);
+            this.proposedChange = false;
+        }
+
+        public int getNumberOfPartitions() {
+            return nPartitions;
+        }
+
+        public void setNumberOfPartitions(int nPartitions) {
+            this.nPartitions = nPartitions;
+            proposedChange = false;
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Reset the number of partitions for " + mBuffer.getRuntimeId() + " to " + nPartitions);
+            }
+        }
+
+        @Override
+        public void run() {
+            if (!proposedChange) {
+                int inflowRate = mBuffer.getInflowRate();
+                int procRate = mBuffer.getProcessingRate();
+                if (inflowRate > 0 && procRate > 0) {
+                    if (inflowRate < procRate) {
+                        int possibleCardinality = (int) Math.ceil(nPartitions * inflowRate / (double) procRate);
+                        if (possibleCardinality < nPartitions
+                                && ((((nPartitions - possibleCardinality) * 1.0) / nPartitions) >= 0.25)) {
+                            sMessage.reset(nPartitions, possibleCardinality);
+                            feedManager.getFeedMessageService().sendMessage(sMessage);
+                            proposedChange = true;
+                            if (LOGGER.isLoggable(Level.INFO)) {
+                                LOGGER.info("Proposed scale-in " + sMessage);
+                            }
+                        }
+                    } else {
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info("Inflow Rate (" + inflowRate + ") exceeds Processing Rate" + " (" + procRate
+                                    + ")");
+                        }
+                    }
+                }
+            } else {
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("Waiting for earlier proposal to scale in to be applied");
+                }
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/NodeLoad.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/NodeLoad.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/NodeLoad.java
new file mode 100644
index 0000000..d3919b5
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/NodeLoad.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.watch;
+
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+
+public class NodeLoad implements Comparable<NodeLoad> {
+
+    private final String nodeId;
+
+    private int nRuntimes;
+
+    public NodeLoad(String nodeId) {
+        this.nodeId = nodeId;
+        this.nRuntimes = 0;
+    }
+
+    public void addLoad() {
+        nRuntimes++;
+    }
+
+    public void removeLoad(FeedRuntimeType runtimeType) {
+        nRuntimes--;
+    }
+
+    @Override
+    public int compareTo(NodeLoad o) {
+        if (this == o) {
+            return 0;
+        }
+        return nRuntimes - o.getnRuntimes();
+    }
+
+    public String getNodeId() {
+        return nodeId;
+    }
+
+    public int getnRuntimes() {
+        return nRuntimes;
+    }
+
+    public void setnRuntimes(int nRuntimes) {
+        this.nRuntimes = nRuntimes;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/NodeLoadReport.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/NodeLoadReport.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/NodeLoadReport.java
new file mode 100644
index 0000000..bfddcf6
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/NodeLoadReport.java
@@ -0,0 +1,100 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.watch;
+
+import org.apache.asterix.external.util.FeedConstants;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+public class NodeLoadReport implements Comparable<NodeLoadReport> {
+
+    private final String nodeId;
+    private float cpuLoad;
+    private double usedHeap;
+    private int nRuntimes;
+
+    public NodeLoadReport(String nodeId, float cpuLoad, float usedHeap, int nRuntimes) {
+        this.nodeId = nodeId;
+        this.cpuLoad = cpuLoad;
+        this.usedHeap = usedHeap;
+        this.nRuntimes = nRuntimes;
+    }
+
+    public static NodeLoadReport read(JSONObject obj) throws JSONException {
+        NodeLoadReport r = new NodeLoadReport(obj.getString(FeedConstants.MessageConstants.NODE_ID),
+                (float) obj.getDouble(FeedConstants.MessageConstants.CPU_LOAD),
+                (float) obj.getDouble(FeedConstants.MessageConstants.HEAP_USAGE),
+                obj.getInt(FeedConstants.MessageConstants.N_RUNTIMES));
+        return r;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) {
+            return true;
+        }
+        if (!(o instanceof NodeLoadReport)) {
+            return false;
+        }
+        return ((NodeLoadReport) o).nodeId.equals(nodeId);
+    }
+
+    @Override
+    public int hashCode() {
+        return nodeId.hashCode();
+    }
+
+    @Override
+    public int compareTo(NodeLoadReport o) {
+        if (nRuntimes != o.getnRuntimes()) {
+            return nRuntimes - o.getnRuntimes();
+        } else {
+            return (int) (this.cpuLoad - ((NodeLoadReport) o).cpuLoad);
+        }
+    }
+
+    public float getCpuLoad() {
+        return cpuLoad;
+    }
+
+    public void setCpuLoad(float cpuLoad) {
+        this.cpuLoad = cpuLoad;
+    }
+
+    public double getUsedHeap() {
+        return usedHeap;
+    }
+
+    public void setUsedHeap(double usedHeap) {
+        this.usedHeap = usedHeap;
+    }
+
+    public int getnRuntimes() {
+        return nRuntimes;
+    }
+
+    public void setnRuntimes(int nRuntimes) {
+        this.nRuntimes = nRuntimes;
+    }
+
+    public String getNodeId() {
+        return nodeId;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/NodeLoadReportService.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/NodeLoadReportService.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/NodeLoadReportService.java
new file mode 100644
index 0000000..f651935
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/NodeLoadReportService.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.watch;
+
+import java.lang.management.ManagementFactory;
+import java.lang.management.MemoryMXBean;
+import java.lang.management.OperatingSystemMXBean;
+import java.util.List;
+import java.util.Timer;
+import java.util.TimerTask;
+
+import org.apache.asterix.external.feed.api.IFeedManager;
+import org.apache.asterix.external.feed.api.IFeedMessageService;
+import org.apache.asterix.external.feed.api.IFeedService;
+import org.apache.asterix.external.feed.message.NodeReportMessage;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+
+public class NodeLoadReportService implements IFeedService {
+
+    private static final int NODE_LOAD_REPORT_FREQUENCY = 2000;
+    private static final float CPU_CHANGE_THRESHOLD = 0.2f;
+    private static final float HEAP_CHANGE_THRESHOLD = 0.4f;
+
+    private final NodeLoadReportTask task;
+    private final Timer timer;
+
+    public NodeLoadReportService(String nodeId, IFeedManager feedManager) {
+        this.task = new NodeLoadReportTask(nodeId, feedManager);
+        this.timer = new Timer();
+    }
+
+    @Override
+    public void start() throws Exception {
+        timer.schedule(task, 0, NODE_LOAD_REPORT_FREQUENCY);
+    }
+
+    @Override
+    public void stop() {
+        timer.cancel();
+    }
+
+    private static class NodeLoadReportTask extends TimerTask {
+
+        private final IFeedManager feedManager;
+        private final NodeReportMessage message;
+        private final IFeedMessageService messageService;
+
+        private static OperatingSystemMXBean osBean = ManagementFactory.getOperatingSystemMXBean();
+        private static MemoryMXBean memBean = ManagementFactory.getMemoryMXBean();
+
+        public NodeLoadReportTask(String nodeId, IFeedManager feedManager) {
+            this.feedManager = feedManager;
+            this.message = new NodeReportMessage(0.0f, 0L, 0);
+            this.messageService = feedManager.getFeedMessageService();
+        }
+
+        @Override
+        public void run() {
+            List<FeedRuntimeId> runtimeIds = feedManager.getFeedConnectionManager().getRegisteredRuntimes();
+            int nRuntimes = runtimeIds.size();
+            double cpuLoad = getCpuLoad();
+            double usedHeap = getUsedHeap();
+            if (sendMessage(nRuntimes, cpuLoad, usedHeap)) {
+                message.reset(cpuLoad, usedHeap, nRuntimes);
+                messageService.sendMessage(message);
+            }
+        }
+
+        private boolean sendMessage(int nRuntimes, double cpuLoad, double usedHeap) {
+            if (message == null) {
+                return true;
+            }
+
+            boolean changeInCpu = (Math.abs(cpuLoad - message.getCpuLoad())
+                    / message.getCpuLoad()) > CPU_CHANGE_THRESHOLD;
+            boolean changeInUsedHeap = (Math.abs(usedHeap - message.getUsedHeap())
+                    / message.getUsedHeap()) > HEAP_CHANGE_THRESHOLD;
+            boolean changeInRuntimeSize = nRuntimes != message.getnRuntimes();
+            return changeInCpu || changeInUsedHeap || changeInRuntimeSize;
+        }
+
+        private double getCpuLoad() {
+            return osBean.getSystemLoadAverage();
+        }
+
+        private double getUsedHeap() {
+            return ((double) memBean.getHeapMemoryUsage().getUsed()) / memBean.getHeapMemoryUsage().getMax();
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/Series.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/Series.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/Series.java
new file mode 100644
index 0000000..ec95371
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/Series.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.watch;
+
+import org.apache.asterix.external.feed.api.IFeedMetricCollector.MetricType;
+
+public abstract class Series {
+
+    protected final MetricType type;
+    protected int runningSum;
+
+    public Series(MetricType type) {
+        this.type = type;
+    }
+
+    public abstract void addValue(int value);
+
+    public int getRunningSum() {
+        return runningSum;
+    }
+
+    public MetricType getType() {
+        return type;
+    }
+
+    public abstract void reset();
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/SeriesAvg.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/SeriesAvg.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/SeriesAvg.java
new file mode 100644
index 0000000..f75379d
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/SeriesAvg.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.watch;
+
+import org.apache.asterix.external.feed.api.IFeedMetricCollector.MetricType;
+
+public class SeriesAvg extends Series {
+
+    private int count;
+
+    public SeriesAvg() {
+        super(MetricType.AVG);
+    }
+
+    public int getAvg() {
+        return runningSum / count;
+    }
+
+    public synchronized void addValue(int value) {
+        if (value < 0) {
+            return;
+        }
+        runningSum += value;
+        count++;
+    }
+    
+    public  void reset(){
+        count = 0;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/SeriesRate.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/SeriesRate.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/SeriesRate.java
new file mode 100644
index 0000000..91eea87
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/SeriesRate.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.watch;
+
+import java.util.Timer;
+import java.util.TimerTask;
+
+import org.apache.asterix.external.feed.api.IFeedMetricCollector.MetricType;
+
+public class SeriesRate extends Series {
+
+    private static final long REFRESH_MEASUREMENT = 5000; // 5 seconds
+
+    private int rate;
+    private Timer timer;
+    private RateComputingTask task;
+
+    public SeriesRate() {
+        super(MetricType.RATE);
+        begin();
+    }
+
+    public int getRate() {
+        return rate;
+    }
+
+    public synchronized void addValue(int value) {
+        if (value < 0) {
+            return;
+        }
+        runningSum += value;
+    }
+
+    public void begin() {
+        if (timer == null) {
+            timer = new Timer();
+            task = new RateComputingTask(this);
+            timer.scheduleAtFixedRate(task, 0, REFRESH_MEASUREMENT);
+        }
+    }
+
+    public void end() {
+        if (timer != null) {
+            timer.cancel();
+        }
+    }
+
+    public void reset() {
+        rate = 0;
+        if (task != null) {
+            task.reset();
+        }
+    }
+
+    private class RateComputingTask extends TimerTask {
+
+        private int lastMeasured = 0;
+        private final SeriesRate series;
+
+        public RateComputingTask(SeriesRate series) {
+            this.series = series;
+        }
+
+        @Override
+        public void run() {
+            int currentValue = series.getRunningSum();
+            rate = (int) (((currentValue - lastMeasured) * 1000) / REFRESH_MEASUREMENT);
+            lastMeasured = currentValue;
+        }
+
+        public void reset() {
+            lastMeasured = 0;
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/StorageSideMonitoredBuffer.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/StorageSideMonitoredBuffer.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/StorageSideMonitoredBuffer.java
new file mode 100644
index 0000000..1f9551d
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/StorageSideMonitoredBuffer.java
@@ -0,0 +1,211 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.watch;
+
+import java.nio.ByteBuffer;
+
+import org.apache.asterix.external.feed.api.IExceptionHandler;
+import org.apache.asterix.external.feed.api.IFeedMetricCollector;
+import org.apache.asterix.external.feed.api.IFrameEventCallback;
+import org.apache.asterix.external.feed.api.IFramePostProcessor;
+import org.apache.asterix.external.feed.api.IFramePreprocessor;
+import org.apache.asterix.external.feed.dataflow.FeedRuntimeInputHandler;
+import org.apache.asterix.external.feed.dataflow.StorageFrameHandler;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+import org.apache.asterix.external.util.FeedConstants.StatisticsConstants;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+
+public class StorageSideMonitoredBuffer extends MonitoredBuffer {
+
+    private static final long STORAGE_TIME_TRACKING_FREQUENCY = 5000; // 10
+                                                                      // seconds
+
+    private boolean ackingEnabled;
+    private final boolean timeTrackingEnabled;
+
+    public StorageSideMonitoredBuffer(IHyracksTaskContext ctx, FeedRuntimeInputHandler inputHandler,
+            IFrameWriter frameWriter, FrameTupleAccessor fta, RecordDescriptor recordDesc,
+            IFeedMetricCollector metricCollector, FeedConnectionId connectionId, FeedRuntimeId runtimeId,
+            IExceptionHandler exceptionHandler, IFrameEventCallback callback, int nPartitions,
+            FeedPolicyAccessor policyAccessor) {
+        super(ctx, inputHandler, frameWriter, fta, recordDesc, metricCollector, connectionId, runtimeId,
+                exceptionHandler, callback, nPartitions, policyAccessor);
+        timeTrackingEnabled = policyAccessor.isTimeTrackingEnabled();
+        ackingEnabled = policyAccessor.atleastOnceSemantics();
+        if (ackingEnabled || timeTrackingEnabled) {
+            storageFromeHandler = new StorageFrameHandler();
+            this.storageTimeTrackingRateTask = new MonitoredBufferTimerTasks.MonitoredBufferStorageTimerTask(this,
+                    inputHandler.getFeedManager(), connectionId, runtimeId.getPartition(), policyAccessor,
+                    storageFromeHandler);
+            this.timer.scheduleAtFixedRate(storageTimeTrackingRateTask, 0, STORAGE_TIME_TRACKING_FREQUENCY);
+        }
+    }
+
+    @Override
+    protected boolean monitorProcessingRate() {
+        return false;
+    }
+
+    @Override
+    protected boolean logInflowOutflowRate() {
+        return true;
+    }
+
+    @Override
+    public IFramePreprocessor getFramePreProcessor() {
+        return new IFramePreprocessor() {
+
+            @Override
+            public void preProcess(ByteBuffer frame) {
+                try {
+                    if (ackingEnabled) {
+                        storageFromeHandler.updateTrackingInformation(frame, inflowFta);
+                    }
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+            }
+        };
+    }
+
+    @Override
+    protected IFramePostProcessor getFramePostProcessor() {
+        return new IFramePostProcessor() {
+
+            private static final long NORMAL_WINDOW_LIMIT = 400 * 1000;
+            private static final long HIGH_WINDOW_LIMIT = 800 * 1000;
+
+            private long delayNormalWindow = 0;
+            private long delayHighWindow = 0;
+            private long delayLowWindow = 0;
+
+            private int countNormalWindow;
+            private int countHighWindow;
+            private int countLowWindow;
+
+            private long beginIntakeTimestamp = 0;
+
+            @Override
+            public void postProcessFrame(ByteBuffer frame, FrameTupleAccessor frameAccessor) {
+                if (ackingEnabled || timeTrackingEnabled) {
+                    int nTuples = frameAccessor.getTupleCount();
+                    long intakeTimestamp;
+                    long currentTime = System.currentTimeMillis();
+                    for (int i = 0; i < nTuples; i++) {
+                        int recordStart = frameAccessor.getTupleStartOffset(i) + frameAccessor.getFieldSlotsLength();
+                        int openPartOffsetOrig = frame.getInt(recordStart + 6);
+                        int numOpenFields = frame.getInt(recordStart + openPartOffsetOrig);
+
+                        int recordIdOffset = openPartOffsetOrig + 4 + 8 * numOpenFields
+                                + (StatisticsConstants.INTAKE_TUPLEID.length() + 2) + 1;
+
+                        int partitionOffset = recordIdOffset + 4 + (StatisticsConstants.INTAKE_PARTITION.length() + 2)
+                                + 1;
+
+                        int intakeTimestampValueOffset = partitionOffset + 4
+                                + (StatisticsConstants.INTAKE_TIMESTAMP.length() + 2) + 1;
+                        intakeTimestamp = frame.getLong(recordStart + intakeTimestampValueOffset);
+                        if (beginIntakeTimestamp == 0) {
+                            beginIntakeTimestamp = intakeTimestamp;
+                            LOGGER.warning("Begin Timestamp: " + beginIntakeTimestamp);
+                        }
+
+                        updateRunningAvg(intakeTimestamp, currentTime);
+
+                        int storeTimestampValueOffset = intakeTimestampValueOffset + 8
+                                + (StatisticsConstants.STORE_TIMESTAMP.length() + 2) + 1;
+                        frame.putLong(recordStart + storeTimestampValueOffset, System.currentTimeMillis());
+                    }
+                    logRunningAvg();
+                    resetRunningAvg();
+                }
+            }
+
+            private void updateRunningAvg(long intakeTimestamp, long currentTime) {
+                long diffTimestamp = intakeTimestamp - beginIntakeTimestamp;
+                long delay = (currentTime - intakeTimestamp);
+                if (diffTimestamp < NORMAL_WINDOW_LIMIT) {
+                    delayNormalWindow += delay;
+                    countNormalWindow++;
+                } else if (diffTimestamp < HIGH_WINDOW_LIMIT) {
+                    delayHighWindow += delay;
+                    countHighWindow++;
+                } else {
+                    delayLowWindow += delay;
+                    countLowWindow++;
+                }
+            }
+
+            private void resetRunningAvg() {
+                delayNormalWindow = 0;
+                countNormalWindow = 0;
+                delayHighWindow = 0;
+                countHighWindow = 0;
+                delayLowWindow = 0;
+                countLowWindow = 0;
+            }
+
+            private void logRunningAvg() {
+                if (countNormalWindow != 0 && delayNormalWindow != 0) {
+                    LOGGER.warning("Window:" + 0 + ":" + "Avg Travel_Time:" + (delayNormalWindow / countNormalWindow));
+                }
+                if (countHighWindow != 0 && delayHighWindow != 0) {
+                    LOGGER.warning("Window:" + 1 + ":" + "Avg Travel_Time:" + (delayHighWindow / countHighWindow));
+                }
+                if (countLowWindow != 0 && delayLowWindow != 0) {
+                    LOGGER.warning("Window:" + 2 + ":" + "Avg Travel_Time:" + (delayLowWindow / countLowWindow));
+                }
+            }
+
+        };
+    }
+
+    public boolean isAckingEnabled() {
+        return ackingEnabled;
+    }
+
+    public void setAcking(boolean ackingEnabled) {
+        this.ackingEnabled = ackingEnabled;
+    }
+
+    public boolean isTimeTrackingEnabled() {
+        return timeTrackingEnabled;
+    }
+
+    @Override
+    protected boolean monitorInputQueueLength() {
+        return true;
+    }
+
+    @Override
+    protected boolean reportOutflowRate() {
+        return true;
+    }
+
+    @Override
+    protected boolean reportInflowRate() {
+        return false;
+    }
+
+}


[10/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMessageOperatorNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMessageOperatorNodePushable.java b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMessageOperatorNodePushable.java
new file mode 100644
index 0000000..3cb5d64
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMessageOperatorNodePushable.java
@@ -0,0 +1,303 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.operators;
+
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
+import org.apache.asterix.external.api.IAdapterRuntimeManager;
+import org.apache.asterix.external.feed.api.IFeedManager;
+import org.apache.asterix.external.feed.api.IFeedMessage;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.api.IFeedRuntime.Mode;
+import org.apache.asterix.external.feed.api.IIntakeProgressTracker;
+import org.apache.asterix.external.feed.api.ISubscribableRuntime;
+import org.apache.asterix.external.feed.dataflow.DistributeFeedFrameWriter;
+import org.apache.asterix.external.feed.dataflow.FeedCollectRuntimeInputHandler;
+import org.apache.asterix.external.feed.dataflow.FeedFrameCollector;
+import org.apache.asterix.external.feed.dataflow.FeedFrameCollector.State;
+import org.apache.asterix.external.feed.dataflow.FeedRuntimeInputHandler;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.management.FeedRuntimeManager;
+import org.apache.asterix.external.feed.message.EndFeedMessage;
+import org.apache.asterix.external.feed.message.FeedTupleCommitResponseMessage;
+import org.apache.asterix.external.feed.message.PrepareStallMessage;
+import org.apache.asterix.external.feed.message.TerminateDataFlowMessage;
+import org.apache.asterix.external.feed.message.ThrottlingEnabledFeedMessage;
+import org.apache.asterix.external.feed.runtime.CollectionRuntime;
+import org.apache.asterix.external.feed.runtime.FeedRuntime;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+import org.apache.asterix.external.feed.runtime.IngestionRuntime;
+import org.apache.asterix.external.feed.runtime.SubscribableFeedRuntimeId;
+import org.apache.asterix.external.feed.watch.IntakePartitionStatistics;
+import org.apache.asterix.external.feed.watch.MonitoredBufferTimerTasks.MonitoredBufferStorageTimerTask;
+import org.apache.asterix.external.feed.watch.StorageSideMonitoredBuffer;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+
+/**
+ * Runtime for the FeedMessageOpertorDescriptor. This operator is responsible for communicating
+ * a feed message to the local feed manager on the host node controller.
+ * @see FeedMessageOperatorDescriptor
+ *      IFeedMessage
+ *      IFeedManager
+ */
+public class FeedMessageOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedMessageOperatorNodePushable.class.getName());
+
+    private final FeedConnectionId connectionId;
+    private final IFeedMessage message;
+    private final IFeedManager feedManager;
+    private final int partition;
+
+    public FeedMessageOperatorNodePushable(IHyracksTaskContext ctx, FeedConnectionId connectionId,
+            IFeedMessage feedMessage, int partition, int nPartitions) {
+        this.connectionId = connectionId;
+        this.message = feedMessage;
+        this.partition = partition;
+        IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
+                .getApplicationContext().getApplicationObject();
+        this.feedManager = (IFeedManager) runtimeCtx.getFeedManager();
+    }
+
+    @Override
+    public void initialize() throws HyracksDataException {
+        try {
+            writer.open();
+            switch (message.getMessageType()) {
+                case END:
+                    EndFeedMessage endFeedMessage = (EndFeedMessage) message;
+                    switch (endFeedMessage.getEndMessageType()) {
+                        case DISCONNECT_FEED:
+                            hanldeDisconnectFeedTypeMessage(endFeedMessage);
+                            break;
+                        case DISCONTINUE_SOURCE:
+                            handleDiscontinueFeedTypeMessage(endFeedMessage);
+                            break;
+                    }
+                    break;
+                case PREPARE_STALL: {
+                    handlePrepareStallMessage((PrepareStallMessage) message);
+                    break;
+                }
+                case TERMINATE_FLOW: {
+                    FeedConnectionId connectionId = ((TerminateDataFlowMessage) message).getConnectionId();
+                    handleTerminateFlowMessage(connectionId);
+                    break;
+                }
+                case COMMIT_ACK_RESPONSE: {
+                    handleFeedTupleCommitResponseMessage((FeedTupleCommitResponseMessage) message);
+                    break;
+                }
+                case THROTTLING_ENABLED: {
+                    handleThrottlingEnabledMessage((ThrottlingEnabledFeedMessage) message);
+                    break;
+                }
+                default:
+                    break;
+
+            }
+
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        } finally {
+            writer.close();
+        }
+    }
+
+    private void handleThrottlingEnabledMessage(ThrottlingEnabledFeedMessage throttlingMessage) {
+        FeedConnectionId connectionId = throttlingMessage.getConnectionId();
+        FeedRuntimeManager runtimeManager = feedManager.getFeedConnectionManager().getFeedRuntimeManager(connectionId);
+        Set<FeedRuntimeId> runtimes = runtimeManager.getFeedRuntimes();
+        for (FeedRuntimeId runtimeId : runtimes) {
+            if (runtimeId.getFeedRuntimeType().equals(FeedRuntimeType.STORE)) {
+                FeedRuntime storeRuntime = runtimeManager.getFeedRuntime(runtimeId);
+                ((StorageSideMonitoredBuffer) (storeRuntime.getInputHandler().getmBuffer())).setAcking(false);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Acking Disabled in view of throttling that has been activted upfron in the pipeline "
+                            + connectionId);
+                }
+            }
+        }
+    }
+
+    private void handleFeedTupleCommitResponseMessage(FeedTupleCommitResponseMessage commitResponseMessage) {
+        FeedConnectionId connectionId = commitResponseMessage.getConnectionId();
+        FeedRuntimeManager runtimeManager = feedManager.getFeedConnectionManager().getFeedRuntimeManager(connectionId);
+        Set<FeedRuntimeId> runtimes = runtimeManager.getFeedRuntimes();
+        for (FeedRuntimeId runtimeId : runtimes) {
+            FeedRuntime runtime = runtimeManager.getFeedRuntime(runtimeId);
+            switch (runtimeId.getFeedRuntimeType()) {
+                case COLLECT:
+                    FeedCollectRuntimeInputHandler inputHandler = (FeedCollectRuntimeInputHandler) runtime
+                            .getInputHandler();
+                    int maxBasePersisted = commitResponseMessage.getMaxWindowAcked();
+                    inputHandler.dropTill(IntakePartitionStatistics.ACK_WINDOW_SIZE * (maxBasePersisted + 1));
+                    break;
+                case STORE:
+                    MonitoredBufferStorageTimerTask sTask = runtime.getInputHandler().getmBuffer()
+                            .getStorageTimeTrackingRateTask();
+                    sTask.receiveCommitAckResponse(commitResponseMessage);
+                    break;
+                default:
+                    break;
+            }
+        }
+
+        commitResponseMessage.getIntakePartition();
+        SubscribableFeedRuntimeId sid = new SubscribableFeedRuntimeId(connectionId.getFeedId(), FeedRuntimeType.INTAKE,
+                partition);
+        IngestionRuntime ingestionRuntime = (IngestionRuntime) feedManager.getFeedSubscriptionManager()
+                .getSubscribableRuntime(sid);
+        if (ingestionRuntime != null) {
+            IIntakeProgressTracker tracker = ingestionRuntime.getAdapterRuntimeManager().getProgressTracker();
+            if (tracker != null) {
+                tracker.notifyIngestedTupleTimestamp(System.currentTimeMillis());
+            }
+        }
+    }
+
+    private void handleTerminateFlowMessage(FeedConnectionId connectionId) throws HyracksDataException {
+        FeedRuntimeManager runtimeManager = feedManager.getFeedConnectionManager().getFeedRuntimeManager(connectionId);
+        Set<FeedRuntimeId> feedRuntimes = runtimeManager.getFeedRuntimes();
+
+        boolean found = false;
+        for (FeedRuntimeId runtimeId : feedRuntimes) {
+            FeedRuntime runtime = runtimeManager.getFeedRuntime(runtimeId);
+            if (runtime.getRuntimeId().getRuntimeType().equals(FeedRuntimeType.COLLECT)) {
+                ((CollectionRuntime) runtime).getFrameCollector().setState(State.HANDOVER);
+                found = true;
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Switched " + runtime + " to Hand Over stage");
+                }
+            }
+        }
+        if (!found) {
+            throw new HyracksDataException("COLLECT Runtime  not found!");
+        }
+    }
+
+    private void handlePrepareStallMessage(PrepareStallMessage prepareStallMessage) throws HyracksDataException {
+        FeedConnectionId connectionId = prepareStallMessage.getConnectionId();
+        int computePartitionsRetainLimit = prepareStallMessage.getComputePartitionsRetainLimit();
+        FeedRuntimeManager runtimeManager = feedManager.getFeedConnectionManager().getFeedRuntimeManager(connectionId);
+        Set<FeedRuntimeId> feedRuntimes = runtimeManager.getFeedRuntimes();
+        for (FeedRuntimeId runtimeId : feedRuntimes) {
+            FeedRuntime runtime = runtimeManager.getFeedRuntime(runtimeId);
+            switch (runtimeId.getFeedRuntimeType()) {
+                case COMPUTE:
+                    Mode requiredMode = runtimeId.getPartition() <= computePartitionsRetainLimit ? Mode.STALL
+                            : Mode.END;
+                    runtime.setMode(requiredMode);
+                    break;
+                default:
+                    runtime.setMode(Mode.STALL);
+                    break;
+            }
+        }
+    }
+
+    private void handleDiscontinueFeedTypeMessage(EndFeedMessage endFeedMessage) throws Exception {
+        FeedId sourceFeedId = endFeedMessage.getSourceFeedId();
+        SubscribableFeedRuntimeId subscribableRuntimeId = new SubscribableFeedRuntimeId(sourceFeedId,
+                FeedRuntimeType.INTAKE, partition);
+        ISubscribableRuntime feedRuntime = feedManager.getFeedSubscriptionManager()
+                .getSubscribableRuntime(subscribableRuntimeId);
+        IAdapterRuntimeManager adapterRuntimeManager = ((IngestionRuntime) feedRuntime).getAdapterRuntimeManager();
+        adapterRuntimeManager.stop();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Stopped Adapter " + adapterRuntimeManager);
+        }
+    }
+
+    private void hanldeDisconnectFeedTypeMessage(EndFeedMessage endFeedMessage) throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Ending feed:" + endFeedMessage.getFeedConnectionId());
+        }
+        FeedRuntimeId runtimeId = null;
+        FeedRuntimeType subscribableRuntimeType = ((EndFeedMessage) message).getSourceRuntimeType();
+        if (endFeedMessage.isCompleteDisconnection()) {
+            // subscribableRuntimeType represents the location at which the feed connection receives data
+            FeedRuntimeType runtimeType = null;
+            switch (subscribableRuntimeType) {
+                case INTAKE:
+                    runtimeType = FeedRuntimeType.COLLECT;
+                    break;
+                case COMPUTE:
+                    runtimeType = FeedRuntimeType.COMPUTE_COLLECT;
+                    break;
+                default:
+                    throw new IllegalStateException("Invalid subscribable runtime type " + subscribableRuntimeType);
+            }
+
+            runtimeId = new FeedRuntimeId(runtimeType, partition, FeedRuntimeId.DEFAULT_OPERAND_ID);
+            CollectionRuntime feedRuntime = (CollectionRuntime) feedManager.getFeedConnectionManager()
+                    .getFeedRuntime(connectionId, runtimeId);
+            feedRuntime.getSourceRuntime().unsubscribeFeed(feedRuntime);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Complete Unsubscription of " + endFeedMessage.getFeedConnectionId());
+            }
+        } else {
+            // subscribaleRuntimeType represents the location for data hand-off in presence of subscribers
+            switch (subscribableRuntimeType) {
+                case INTAKE:
+                    // illegal state as data hand-off from one feed to another does not happen at intake
+                    throw new IllegalStateException("Illegal State, invalid runtime type  " + subscribableRuntimeType);
+                case COMPUTE:
+                    // feed could be primary or secondary, doesn't matter
+                    SubscribableFeedRuntimeId feedSubscribableRuntimeId = new SubscribableFeedRuntimeId(
+                            connectionId.getFeedId(), FeedRuntimeType.COMPUTE, partition);
+                    ISubscribableRuntime feedRuntime = feedManager.getFeedSubscriptionManager()
+                            .getSubscribableRuntime(feedSubscribableRuntimeId);
+                    DistributeFeedFrameWriter dWriter = feedRuntime.getFeedFrameWriter();
+                    Map<IFrameWriter, FeedFrameCollector> registeredCollectors = dWriter.getRegisteredReaders();
+
+                    IFrameWriter unsubscribingWriter = null;
+                    for (Entry<IFrameWriter, FeedFrameCollector> entry : registeredCollectors.entrySet()) {
+                        IFrameWriter frameWriter = entry.getKey();
+                        FeedRuntimeInputHandler feedFrameWriter = (FeedRuntimeInputHandler) frameWriter;
+                        if (feedFrameWriter.getConnectionId().equals(endFeedMessage.getFeedConnectionId())) {
+                            unsubscribingWriter = feedFrameWriter;
+                            dWriter.unsubscribeFeed(unsubscribingWriter);
+                            if (LOGGER.isLoggable(Level.INFO)) {
+                                LOGGER.info("Partial Unsubscription of " + unsubscribingWriter);
+                            }
+                            break;
+                        }
+                    }
+                    break;
+                default:
+                    break;
+            }
+
+        }
+
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Unsubscribed from feed :" + connectionId);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaComputeNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaComputeNodePushable.java b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaComputeNodePushable.java
new file mode 100644
index 0000000..80a54be
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaComputeNodePushable.java
@@ -0,0 +1,224 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.operators;
+
+import java.nio.ByteBuffer;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
+import org.apache.asterix.external.feed.api.IFeedManager;
+import org.apache.asterix.external.feed.api.ISubscribableRuntime;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.api.IFeedRuntime.Mode;
+import org.apache.asterix.external.feed.dataflow.DistributeFeedFrameWriter;
+import org.apache.asterix.external.feed.dataflow.FeedRuntimeInputHandler;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.policy.FeedPolicyEnforcer;
+import org.apache.asterix.external.feed.runtime.FeedRuntime;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+import org.apache.asterix.external.feed.runtime.SubscribableFeedRuntimeId;
+import org.apache.asterix.external.feed.runtime.SubscribableRuntime;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.IActivity;
+import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
+import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import org.apache.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+
+/*
+ * This IFrameWriter doesn't follow the contract
+ */
+public class FeedMetaComputeNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedMetaComputeNodePushable.class.getName());
+
+    /** Runtime node pushable corresponding to the core feed operator **/
+    private AbstractUnaryInputUnaryOutputOperatorNodePushable coreOperator;
+
+    /**
+     * A policy enforcer that ensures dynamic decisions for a feed are taken
+     * in accordance with the associated ingestion policy
+     **/
+    private FeedPolicyEnforcer policyEnforcer;
+
+    /**
+     * The Feed Runtime instance associated with the operator. Feed Runtime
+     * captures the state of the operator while the feed is active.
+     */
+    private FeedRuntime feedRuntime;
+
+    /**
+     * A unique identifier for the feed instance. A feed instance represents
+     * the flow of data from a feed to a dataset.
+     **/
+    private FeedConnectionId connectionId;
+
+    /**
+     * Denotes the i'th operator instance in a setting where K operator
+     * instances are scheduled to run in parallel
+     **/
+    private int partition;
+
+    private int nPartitions;
+
+    /** The (singleton) instance of IFeedManager **/
+    private IFeedManager feedManager;
+
+    private FrameTupleAccessor fta;
+
+    private final IHyracksTaskContext ctx;
+
+    private final FeedRuntimeType runtimeType = FeedRuntimeType.COMPUTE;
+
+    private FeedRuntimeInputHandler inputSideHandler;
+
+    public FeedMetaComputeNodePushable(IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider,
+            int partition, int nPartitions, IOperatorDescriptor coreOperator, FeedConnectionId feedConnectionId,
+            Map<String, String> feedPolicyProperties, String operationId) throws HyracksDataException {
+        this.ctx = ctx;
+        this.coreOperator = (AbstractUnaryInputUnaryOutputOperatorNodePushable) ((IActivity) coreOperator)
+                .createPushRuntime(ctx, recordDescProvider, partition, nPartitions);
+        this.policyEnforcer = new FeedPolicyEnforcer(feedConnectionId, feedPolicyProperties);
+        this.partition = partition;
+        this.nPartitions = nPartitions;
+        this.connectionId = feedConnectionId;
+        this.feedManager = (IFeedManager) ((IAsterixAppRuntimeContext) ctx.getJobletContext().getApplicationContext()
+                .getApplicationObject()).getFeedManager();
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        FeedRuntimeId runtimeId = new SubscribableFeedRuntimeId(connectionId.getFeedId(), runtimeType, partition);
+        try {
+            feedRuntime = feedManager.getFeedConnectionManager().getFeedRuntime(connectionId, runtimeId);
+            if (feedRuntime == null) {
+                initializeNewFeedRuntime(runtimeId);
+            } else {
+                reviveOldFeedRuntime(runtimeId);
+            }
+            writer.open();
+            coreOperator.open();
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    private void initializeNewFeedRuntime(FeedRuntimeId runtimeId) throws Exception {
+        this.fta = new FrameTupleAccessor(recordDesc);
+        this.inputSideHandler = new FeedRuntimeInputHandler(ctx, connectionId, runtimeId, coreOperator,
+                policyEnforcer.getFeedPolicyAccessor(), true, fta, recordDesc, feedManager, nPartitions);
+
+        DistributeFeedFrameWriter distributeWriter = new DistributeFeedFrameWriter(ctx, connectionId.getFeedId(),
+                writer, runtimeType, partition, new FrameTupleAccessor(recordDesc), feedManager);
+        coreOperator.setOutputFrameWriter(0, distributeWriter, recordDesc);
+
+        feedRuntime = new SubscribableRuntime(connectionId.getFeedId(), runtimeId, inputSideHandler, distributeWriter,
+                recordDesc);
+        feedManager.getFeedSubscriptionManager().registerFeedSubscribableRuntime((ISubscribableRuntime) feedRuntime);
+        feedManager.getFeedConnectionManager().registerFeedRuntime(connectionId, feedRuntime);
+
+        distributeWriter.subscribeFeed(policyEnforcer.getFeedPolicyAccessor(), writer, connectionId);
+    }
+
+    private void reviveOldFeedRuntime(FeedRuntimeId runtimeId) throws Exception {
+        this.fta = new FrameTupleAccessor(recordDesc);
+        this.inputSideHandler = feedRuntime.getInputHandler();
+        this.inputSideHandler.setCoreOperator(coreOperator);
+
+        DistributeFeedFrameWriter distributeWriter = new DistributeFeedFrameWriter(ctx, connectionId.getFeedId(),
+                writer, runtimeType, partition, new FrameTupleAccessor(recordDesc), feedManager);
+        coreOperator.setOutputFrameWriter(0, distributeWriter, recordDesc);
+        distributeWriter.subscribeFeed(policyEnforcer.getFeedPolicyAccessor(), writer, connectionId);
+
+        inputSideHandler.reset(nPartitions);
+        feedRuntime.setMode(Mode.PROCESS);
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        try {
+            inputSideHandler.nextFrame(buffer);
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+        if (LOGGER.isLoggable(Level.WARNING)) {
+            LOGGER.warning("Core Op:" + coreOperator.getDisplayName() + " fail ");
+        }
+        feedRuntime.setMode(Mode.FAIL);
+        coreOperator.fail();
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        boolean stalled = inputSideHandler.getMode().equals(Mode.STALL);
+        boolean end = inputSideHandler.getMode().equals(Mode.END);
+        try {
+            if (inputSideHandler != null) {
+                if (!(stalled || end)) {
+                    inputSideHandler.nextFrame(null); // signal end of data
+                    while (!inputSideHandler.isFinished()) {
+                        synchronized (coreOperator) {
+                            coreOperator.wait();
+                        }
+                    }
+                } else {
+                    inputSideHandler.setFinished(true);
+                }
+            }
+            coreOperator.close();
+            System.out.println("CLOSED " + coreOperator + " STALLED ?" + stalled + " ENDED " + end);
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            if (!stalled) {
+                deregister();
+                System.out.println("DEREGISTERING " + this.feedRuntime.getRuntimeId());
+            } else {
+                System.out.println("NOT DEREGISTERING " + this.feedRuntime.getRuntimeId());
+            }
+            if (inputSideHandler != null) {
+                inputSideHandler.close();
+            }
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Ending Operator  " + this.feedRuntime.getRuntimeId());
+            }
+        }
+    }
+
+    private void deregister() {
+        if (feedRuntime != null) {
+            // deregister from subscription manager
+            SubscribableFeedRuntimeId runtimeId = (SubscribableFeedRuntimeId) feedRuntime.getRuntimeId();
+            feedManager.getFeedSubscriptionManager().deregisterFeedSubscribableRuntime(runtimeId);
+
+            // deregister from connection manager
+            feedManager.getFeedConnectionManager().deRegisterFeedRuntime(connectionId, feedRuntime.getRuntimeId());
+        }
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaNodePushable.java b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaNodePushable.java
new file mode 100644
index 0000000..4dae72d
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaNodePushable.java
@@ -0,0 +1,184 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.operators;
+
+import java.nio.ByteBuffer;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
+import org.apache.asterix.external.feed.api.IFeedManager;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.api.IFeedRuntime.Mode;
+import org.apache.asterix.external.feed.dataflow.FeedRuntimeInputHandler;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.policy.FeedPolicyEnforcer;
+import org.apache.asterix.external.feed.runtime.FeedRuntime;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.IActivity;
+import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
+import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import org.apache.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+
+public class FeedMetaNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedMetaNodePushable.class.getName());
+
+    /** Runtime node pushable corresponding to the core feed operator **/
+    private AbstractUnaryInputUnaryOutputOperatorNodePushable coreOperator;
+
+    /**
+     * A policy enforcer that ensures dyanmic decisions for a feed are taken
+     * in accordance with the associated ingestion policy
+     **/
+    private FeedPolicyEnforcer policyEnforcer;
+
+    /**
+     * The Feed Runtime instance associated with the operator. Feed Runtime
+     * captures the state of the operator while the feed is active.
+     */
+    private FeedRuntime feedRuntime;
+
+    /**
+     * A unique identifier for the feed instance. A feed instance represents
+     * the flow of data from a feed to a dataset.
+     **/
+    private FeedConnectionId connectionId;
+
+    /**
+     * Denotes the i'th operator instance in a setting where K operator
+     * instances are scheduled to run in parallel
+     **/
+    private int partition;
+
+    /** Total number of partitions available **/
+    private int nPartitions;
+
+    /** Type associated with the core feed operator **/
+    private final FeedRuntimeType runtimeType = FeedRuntimeType.OTHER;
+
+    /** The (singleton) instance of IFeedManager **/
+    private IFeedManager feedManager;
+
+    private FrameTupleAccessor fta;
+
+    private final IHyracksTaskContext ctx;
+
+    private final String operandId;
+
+    /** The pre-processor associated with this runtime **/
+    private FeedRuntimeInputHandler inputSideHandler;
+
+    public FeedMetaNodePushable(IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider, int partition,
+            int nPartitions, IOperatorDescriptor coreOperator, FeedConnectionId feedConnectionId,
+            Map<String, String> feedPolicyProperties, String operationId) throws HyracksDataException {
+        this.ctx = ctx;
+        this.coreOperator = (AbstractUnaryInputUnaryOutputOperatorNodePushable) ((IActivity) coreOperator)
+                .createPushRuntime(ctx, recordDescProvider, partition, nPartitions);
+        this.policyEnforcer = new FeedPolicyEnforcer(feedConnectionId, feedPolicyProperties);
+        this.partition = partition;
+        this.nPartitions = nPartitions;
+        this.connectionId = feedConnectionId;
+        this.feedManager = (IFeedManager) ((IAsterixAppRuntimeContext) ctx.getJobletContext().getApplicationContext()
+                .getApplicationObject()).getFeedManager();
+        this.operandId = operationId;
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        FeedRuntimeId runtimeId = new FeedRuntimeId(runtimeType, partition, operandId);
+        try {
+            feedRuntime = feedManager.getFeedConnectionManager().getFeedRuntime(connectionId, runtimeId);
+            if (feedRuntime == null) {
+                initializeNewFeedRuntime(runtimeId);
+            } else {
+                reviveOldFeedRuntime(runtimeId);
+            }
+            coreOperator.open();
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    private void initializeNewFeedRuntime(FeedRuntimeId runtimeId) throws Exception {
+        this.fta = new FrameTupleAccessor(recordDesc);
+        this.inputSideHandler = new FeedRuntimeInputHandler(ctx, connectionId, runtimeId, coreOperator,
+                policyEnforcer.getFeedPolicyAccessor(), false, fta, recordDesc, feedManager, nPartitions);
+
+        setupBasicRuntime(inputSideHandler);
+    }
+
+    private void reviveOldFeedRuntime(FeedRuntimeId runtimeId) throws Exception {
+        this.inputSideHandler = feedRuntime.getInputHandler();
+        this.fta = new FrameTupleAccessor(recordDesc);
+        coreOperator.setOutputFrameWriter(0, writer, recordDesc);
+        feedRuntime.setMode(Mode.PROCESS);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Retreived state from the zombie instance " + runtimeType + " node.");
+        }
+    }
+
+    private void setupBasicRuntime(FeedRuntimeInputHandler inputHandler) throws Exception {
+        coreOperator.setOutputFrameWriter(0, writer, recordDesc);
+        FeedRuntimeId runtimeId = new FeedRuntimeId(runtimeType, partition, operandId);
+        feedRuntime = new FeedRuntime(runtimeId, inputHandler, writer);
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        try {
+            inputSideHandler.nextFrame(buffer);
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+        if (LOGGER.isLoggable(Level.WARNING)) {
+            LOGGER.info("Core Op:" + coreOperator.getDisplayName() + " fail ");
+        }
+        feedRuntime.setMode(Mode.FAIL);
+        coreOperator.fail();
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        try {
+            coreOperator.close();
+        } catch (Exception e) {
+            e.printStackTrace();
+            // ignore
+        } finally {
+            if (inputSideHandler != null) {
+                inputSideHandler.close();
+            }
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Ending Operator  " + this.feedRuntime.getRuntimeId());
+            }
+        }
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaOperatorDescriptor.java b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaOperatorDescriptor.java
new file mode 100644
index 0000000..9eb6c78
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaOperatorDescriptor.java
@@ -0,0 +1,132 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.operators;
+
+import java.util.Map;
+
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
+import org.apache.hyracks.api.dataflow.IOperatorNodePushable;
+import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.job.JobSpecification;
+import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+
+/**
+ * FeedMetaOperatorDescriptor is a wrapper operator that provides a sanboox like
+ * environment for an hyracks operator that is part of a feed ingestion
+ * pipeline. The MetaFeed operator provides an interface iden- tical to that
+ * offered by the underlying wrapped operator, hereafter referred to as the core
+ * operator. As seen by Hyracks, the altered pipeline is identical to the
+ * earlier version formed from core operators. The MetaFeed operator enhances
+ * each core operator by providing functionality for handling runtime
+ * exceptions, saving any state for future retrieval, and measuring/reporting of
+ * performance characteristics. We next describe how the added functionality
+ * contributes to providing fault- tolerance.
+ */
+
+public class FeedMetaOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * The actual (Hyracks) operator that is wrapped around by the MetaFeed
+     * operator.
+     **/
+    private IOperatorDescriptor coreOperator;
+
+    /**
+     * A unique identifier for the feed instance. A feed instance represents the
+     * flow of data from a feed to a dataset.
+     **/
+    private final FeedConnectionId feedConnectionId;
+
+    /**
+     * The policy associated with the feed instance.
+     **/
+    private final Map<String, String> feedPolicyProperties;
+
+    /**
+     * type for the feed runtime associated with the operator.
+     * Possible values: COMPUTE, STORE, OTHER
+     **/
+    private final FeedRuntimeType runtimeType;
+
+    private final String operandId;
+
+    public FeedMetaOperatorDescriptor(JobSpecification spec, FeedConnectionId feedConnectionId,
+            IOperatorDescriptor coreOperatorDescriptor, Map<String, String> feedPolicyProperties,
+            FeedRuntimeType runtimeType, boolean enableSubscriptionMode, String operandId) {
+        super(spec, coreOperatorDescriptor.getInputArity(), coreOperatorDescriptor.getOutputArity());
+        this.feedConnectionId = feedConnectionId;
+        this.feedPolicyProperties = feedPolicyProperties;
+        if (coreOperatorDescriptor.getOutputRecordDescriptors().length == 1) {
+            recordDescriptors[0] = coreOperatorDescriptor.getOutputRecordDescriptors()[0];
+        }
+        this.coreOperator = coreOperatorDescriptor;
+        this.runtimeType = runtimeType;
+        this.operandId = operandId;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
+        IOperatorNodePushable nodePushable = null;
+        switch (runtimeType) {
+            case COMPUTE:
+                nodePushable = new FeedMetaComputeNodePushable(ctx, recordDescProvider, partition, nPartitions,
+                        coreOperator, feedConnectionId, feedPolicyProperties, operandId);
+                break;
+            case STORE:
+                nodePushable = new FeedMetaStoreNodePushable(ctx, recordDescProvider, partition, nPartitions,
+                        coreOperator, feedConnectionId, feedPolicyProperties, operandId);
+                break;
+            case OTHER:
+                nodePushable = new FeedMetaNodePushable(ctx, recordDescProvider, partition, nPartitions, coreOperator,
+                        feedConnectionId, feedPolicyProperties, operandId);
+                break;
+            case ETS:
+                nodePushable = ((AlgebricksMetaOperatorDescriptor) coreOperator).createPushRuntime(ctx,
+                        recordDescProvider, partition, nPartitions);
+                break;
+            case JOIN:
+                break;
+            default:
+                throw new HyracksDataException(new IllegalArgumentException("Invalid feed runtime: " + runtimeType));
+        }
+        return nodePushable;
+    }
+
+    @Override
+    public String toString() {
+        return "FeedMeta [" + coreOperator + " ]";
+    }
+
+    public IOperatorDescriptor getCoreOperator() {
+        return coreOperator;
+    }
+
+    public FeedRuntimeType getRuntimeType() {
+        return runtimeType;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java
new file mode 100644
index 0000000..f75b3eb
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java
@@ -0,0 +1,220 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.operators;
+
+import java.nio.ByteBuffer;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
+import org.apache.asterix.common.dataflow.AsterixLSMInsertDeleteOperatorNodePushable;
+import org.apache.asterix.external.feed.api.IFeedManager;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.api.IFeedRuntime.Mode;
+import org.apache.asterix.external.feed.dataflow.FeedRuntimeInputHandler;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.policy.FeedPolicyEnforcer;
+import org.apache.asterix.external.feed.runtime.FeedRuntime;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.IActivity;
+import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
+import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import org.apache.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+
+public class FeedMetaStoreNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedMetaStoreNodePushable.class.getName());
+
+    /** Runtime node pushable corresponding to the core feed operator **/
+    private AbstractUnaryInputUnaryOutputOperatorNodePushable coreOperator;
+
+    /**
+     * A policy enforcer that ensures dyanmic decisions for a feed are taken
+     * in accordance with the associated ingestion policy
+     **/
+    private FeedPolicyEnforcer policyEnforcer;
+
+    /**
+     * The Feed Runtime instance associated with the operator. Feed Runtime
+     * captures the state of the operator while the feed is active.
+     */
+    private FeedRuntime feedRuntime;
+
+    /**
+     * A unique identifier for the feed instance. A feed instance represents
+     * the flow of data from a feed to a dataset.
+     **/
+    private FeedConnectionId connectionId;
+
+    /**
+     * Denotes the i'th operator instance in a setting where K operator
+     * instances are scheduled to run in parallel
+     **/
+    private int partition;
+
+    private int nPartitions;
+
+    /** Type associated with the core feed operator **/
+    private final FeedRuntimeType runtimeType = FeedRuntimeType.STORE;
+
+    /** The (singleton) instance of IFeedManager **/
+    private IFeedManager feedManager;
+
+    private FrameTupleAccessor fta;
+
+    private final IHyracksTaskContext ctx;
+
+    private final String operandId;
+
+    private FeedRuntimeInputHandler inputSideHandler;
+
+    public FeedMetaStoreNodePushable(IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider,
+            int partition, int nPartitions, IOperatorDescriptor coreOperator, FeedConnectionId feedConnectionId,
+            Map<String, String> feedPolicyProperties, String operationId) throws HyracksDataException {
+        this.ctx = ctx;
+        this.coreOperator = (AbstractUnaryInputUnaryOutputOperatorNodePushable) ((IActivity) coreOperator)
+                .createPushRuntime(ctx, recordDescProvider, partition, nPartitions);
+        this.policyEnforcer = new FeedPolicyEnforcer(feedConnectionId, feedPolicyProperties);
+        this.partition = partition;
+        this.nPartitions = nPartitions;
+        this.connectionId = feedConnectionId;
+        this.feedManager = (IFeedManager) ((IAsterixAppRuntimeContext) ctx.getJobletContext().getApplicationContext()
+                .getApplicationObject()).getFeedManager();
+        this.operandId = operationId;
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        FeedRuntimeId runtimeId = new FeedRuntimeId(runtimeType, partition, operandId);
+        try {
+            feedRuntime = feedManager.getFeedConnectionManager().getFeedRuntime(connectionId, runtimeId);
+            if (feedRuntime == null) {
+                initializeNewFeedRuntime(runtimeId);
+            } else {
+                reviveOldFeedRuntime(runtimeId);
+            }
+
+            coreOperator.open();
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    private void initializeNewFeedRuntime(FeedRuntimeId runtimeId) throws Exception {
+        if (LOGGER.isLoggable(Level.WARNING)) {
+            LOGGER.warning("Runtime not found for  " + runtimeId + " connection id " + connectionId);
+        }
+        this.fta = new FrameTupleAccessor(recordDesc);
+        this.inputSideHandler = new FeedRuntimeInputHandler(ctx, connectionId, runtimeId, coreOperator,
+                policyEnforcer.getFeedPolicyAccessor(), policyEnforcer.getFeedPolicyAccessor().bufferingEnabled(), fta,
+                recordDesc, feedManager, nPartitions);
+        if (coreOperator instanceof AsterixLSMInsertDeleteOperatorNodePushable) {
+            AsterixLSMInsertDeleteOperatorNodePushable indexOp = (AsterixLSMInsertDeleteOperatorNodePushable) coreOperator;
+            if (!indexOp.isPrimary()) {
+                inputSideHandler.setBufferingEnabled(false);
+            }
+        }
+        setupBasicRuntime(inputSideHandler);
+    }
+
+    private void reviveOldFeedRuntime(FeedRuntimeId runtimeId) throws Exception {
+        this.inputSideHandler = feedRuntime.getInputHandler();
+        this.fta = new FrameTupleAccessor(recordDesc);
+        coreOperator.setOutputFrameWriter(0, writer, recordDesc);
+        this.inputSideHandler.reset(nPartitions);
+        this.inputSideHandler.setCoreOperator(coreOperator);
+        feedRuntime.setMode(Mode.PROCESS);
+        if (LOGGER.isLoggable(Level.WARNING)) {
+            LOGGER.warning(
+                    "Retreived state from the zombie instance from previous execution for " + runtimeType + " node.");
+        }
+    }
+
+    private void setupBasicRuntime(FeedRuntimeInputHandler inputHandler) throws Exception {
+        coreOperator.setOutputFrameWriter(0, writer, recordDesc);
+        FeedRuntimeId runtimeId = new FeedRuntimeId(runtimeType, partition, operandId);
+        feedRuntime = new FeedRuntime(runtimeId, inputHandler, writer);
+        feedManager.getFeedConnectionManager().registerFeedRuntime(connectionId, feedRuntime);
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        try {
+            inputSideHandler.nextFrame(buffer);
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+        if (LOGGER.isLoggable(Level.WARNING)) {
+            LOGGER.info("Core Op:" + coreOperator.getDisplayName() + " fail ");
+        }
+        feedRuntime.setMode(Mode.FAIL);
+        coreOperator.fail();
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        System.out.println("CLOSE CALLED FOR " + this.feedRuntime.getRuntimeId());
+        boolean stalled = inputSideHandler.getMode().equals(Mode.STALL);
+        try {
+            if (!stalled) {
+                System.out.println("SIGNALLING END OF DATA for " + this.feedRuntime.getRuntimeId() + " mode is "
+                        + inputSideHandler.getMode() + " WAITING ON " + coreOperator);
+                inputSideHandler.nextFrame(null); // signal end of data
+                while (!inputSideHandler.isFinished()) {
+                    synchronized (coreOperator) {
+                        coreOperator.wait();
+                    }
+                }
+                System.out.println("ABOUT TO CLOSE OPERATOR  " + coreOperator);
+            }
+            coreOperator.close();
+        } catch (Exception e) {
+            e.printStackTrace();
+            // ignore
+        } finally {
+            if (!stalled) {
+                deregister();
+                System.out.println("DEREGISTERING " + this.feedRuntime.getRuntimeId());
+            } else {
+                System.out.println("NOT DEREGISTERING " + this.feedRuntime.getRuntimeId());
+            }
+            inputSideHandler.close();
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Ending Operator  " + this.feedRuntime.getRuntimeId());
+            }
+        }
+    }
+
+    private void deregister() {
+        if (feedRuntime != null) {
+            feedManager.getFeedConnectionManager().deRegisterFeedRuntime(connectionId, feedRuntime.getRuntimeId());
+        }
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
index 860d35f..129b62f 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
@@ -21,6 +21,7 @@ package org.apache.asterix.external.parser;
 import java.io.DataOutput;
 import java.io.IOException;
 import java.io.InputStream;
+import java.io.InputStreamReader;
 import java.util.BitSet;
 import java.util.List;
 import java.util.Map;
@@ -1145,4 +1146,10 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         recordBuilderPool.reset();
         abvsBuilderPool.reset();
     }
+
+    @Override
+    public boolean reset(InputStream in) throws IOException {
+        admLexer.reInit(new InputStreamReader(in));
+        return true;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java
index 146064a..6c399c3 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java
@@ -198,11 +198,17 @@ public class DelimitedDataParser extends AbstractDataParser implements IStreamDa
     }
 
     @Override
-    public void setInputStream(InputStream in) throws Exception {
+    public void setInputStream(InputStream in) throws IOException {
         cursor = new FieldCursorForDelimitedDataParser(new InputStreamReader(in), fieldDelimiter, quote);
         if (in != null && hasHeader) {
             cursor.nextRecord();
             while (cursor.nextField());
         }
     }
+
+    @Override
+    public boolean reset(InputStream in) throws IOException {
+        cursor = new FieldCursorForDelimitedDataParser(new InputStreamReader(in), fieldDelimiter, quote);
+        return true;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
index 649ca43..c5b39df 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
@@ -30,10 +30,6 @@ import org.apache.asterix.external.api.IIndexingAdapterFactory;
 import org.apache.asterix.external.dataset.adapter.GenericAdapter;
 import org.apache.asterix.external.indexing.ExternalFile;
 import org.apache.asterix.external.library.ExternalLibraryManager;
-import org.apache.asterix.external.runtime.GenericSocketFeedAdapter;
-import org.apache.asterix.external.runtime.GenericSocketFeedAdapterFactory;
-import org.apache.asterix.external.runtime.SocketClientAdapter;
-import org.apache.asterix.external.runtime.SocketClientAdapterFactory;
 import org.apache.asterix.external.util.ExternalDataCompatibilityUtils;
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.om.types.ARecordType;
@@ -47,16 +43,16 @@ public class AdapterFactoryProvider {
         Map<String, Class<? extends IAdapterFactory>> adapterFactories = new HashMap<String, Class<? extends IAdapterFactory>>();
         // Class names
         adapterFactories.put(GenericAdapter.class.getName(), GenericAdapterFactory.class);
-        adapterFactories.put(GenericSocketFeedAdapter.class.getName(), GenericSocketFeedAdapterFactory.class);
-        adapterFactories.put(SocketClientAdapter.class.getName(), SocketClientAdapterFactory.class);
-
         // Aliases
         adapterFactories.put(ExternalDataConstants.ALIAS_GENERIC_ADAPTER, GenericAdapterFactory.class);
         adapterFactories.put(ExternalDataConstants.ALIAS_HDFS_ADAPTER, GenericAdapterFactory.class);
         adapterFactories.put(ExternalDataConstants.ALIAS_LOCALFS_ADAPTER, GenericAdapterFactory.class);
-        adapterFactories.put(ExternalDataConstants.ALIAS_SOCKET_ADAPTER, GenericSocketFeedAdapterFactory.class);
-        adapterFactories.put(ExternalDataConstants.ALIAS_SOCKET_CLIENT_ADAPTER, SocketClientAdapterFactory.class);
+        adapterFactories.put(ExternalDataConstants.ALIAS_SOCKET_ADAPTER, GenericAdapterFactory.class);
+        adapterFactories.put(ExternalDataConstants.ALIAS_SOCKET_CLIENT_ADAPTER, GenericAdapterFactory.class);
         adapterFactories.put(ExternalDataConstants.ALIAS_FILE_FEED_ADAPTER, GenericAdapterFactory.class);
+        adapterFactories.put(ExternalDataConstants.ALIAS_TWITTER_PULL_ADAPTER, GenericAdapterFactory.class);
+        adapterFactories.put(ExternalDataConstants.ALIAS_TWITTER_PUSH_ADAPTER, GenericAdapterFactory.class);
+        adapterFactories.put(ExternalDataConstants.ALIAS_LOCALFS_PUSH_ADAPTER, GenericAdapterFactory.class);
 
         // Compatability
         adapterFactories.put(ExternalDataConstants.ADAPTER_HDFS_CLASSNAME, GenericAdapterFactory.class);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DataflowControllerProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DataflowControllerProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DataflowControllerProvider.java
index 68a3942..dfe7aed 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DataflowControllerProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DataflowControllerProvider.java
@@ -28,14 +28,19 @@ import org.apache.asterix.external.api.IInputStreamProvider;
 import org.apache.asterix.external.api.IInputStreamProviderFactory;
 import org.apache.asterix.external.api.IRecordDataParser;
 import org.apache.asterix.external.api.IRecordDataParserFactory;
+import org.apache.asterix.external.api.IRecordFlowController;
 import org.apache.asterix.external.api.IRecordReader;
 import org.apache.asterix.external.api.IRecordReaderFactory;
 import org.apache.asterix.external.api.IStreamDataParser;
 import org.apache.asterix.external.api.IStreamDataParserFactory;
+import org.apache.asterix.external.api.IStreamFlowController;
+import org.apache.asterix.external.dataflow.FeedRecordDataFlowController;
+import org.apache.asterix.external.dataflow.FeedStreamDataFlowController;
 import org.apache.asterix.external.dataflow.IndexingDataFlowController;
 import org.apache.asterix.external.dataflow.RecordDataFlowController;
 import org.apache.asterix.external.dataflow.StreamDataFlowController;
 import org.apache.asterix.external.util.DataflowUtils;
+import org.apache.asterix.external.util.ExternalDataUtils;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 
@@ -60,9 +65,11 @@ public class DataflowControllerProvider {
             Map<String, String> configuration, boolean indexingOp) throws Exception {
         switch (dataSourceFactory.getDataSourceType()) {
             case RECORDS:
-                RecordDataFlowController recordDataFlowController;
+                IRecordFlowController recordDataFlowController = null;
                 if (indexingOp) {
                     recordDataFlowController = new IndexingDataFlowController();
+                } else if (ExternalDataUtils.isFeed(configuration)) {
+                    recordDataFlowController = new FeedRecordDataFlowController();
                 } else {
                     recordDataFlowController = new RecordDataFlowController();
                 }
@@ -77,7 +84,12 @@ public class DataflowControllerProvider {
                 recordDataFlowController.setRecordParser(dataParser);
                 return recordDataFlowController;
             case STREAM:
-                StreamDataFlowController streamDataFlowController = new StreamDataFlowController();
+                IStreamFlowController streamDataFlowController = null;
+                if (ExternalDataUtils.isFeed(configuration)) {
+                    streamDataFlowController = new FeedStreamDataFlowController();
+                } else {
+                    streamDataFlowController = new StreamDataFlowController();
+                }
                 streamDataFlowController.configure(configuration, ctx);
                 streamDataFlowController.setTupleForwarder(DataflowUtils.getTupleForwarder(configuration));
                 IInputStreamProviderFactory streamProviderFactory = (IInputStreamProviderFactory) dataSourceFactory;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
index c69e12c..a7ab062 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
@@ -27,6 +27,7 @@ import org.apache.asterix.external.api.IRecordReaderFactory;
 import org.apache.asterix.external.input.HDFSDataSourceFactory;
 import org.apache.asterix.external.input.record.reader.factory.LineRecordReaderFactory;
 import org.apache.asterix.external.input.record.reader.factory.SemiStructuredRecordReaderFactory;
+import org.apache.asterix.external.input.record.reader.factory.TwitterRecordReaderFactory;
 import org.apache.asterix.external.input.stream.factory.LocalFSInputStreamProviderFactory;
 import org.apache.asterix.external.input.stream.factory.SocketInputStreamProviderFactory;
 import org.apache.asterix.external.util.ExternalDataConstants;
@@ -92,8 +93,12 @@ public class DatasourceFactoryProvider {
                             .setInputStreamFactoryProvider(DatasourceFactoryProvider.getInputStreamFactory(
                                     ExternalDataUtils.getRecordReaderStreamName(configuration), configuration));;
                     break;
+                case ExternalDataConstants.READER_TWITTER_PULL:
+                case ExternalDataConstants.READER_TWITTER_PUSH:
+                    readerFactory = new TwitterRecordReaderFactory();
+                    break;
                 default:
-                    throw new AsterixException("unknown input stream factory");
+                    throw new AsterixException("unknown record reader factory");
             }
         }
         return readerFactory;


[02/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
new file mode 100644
index 0000000..5b5f19f
--- /dev/null
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
@@ -0,0 +1,583 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.metadata.feeds;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.common.config.DatasetConfig.DatasetType;
+import org.apache.asterix.common.config.MetadataConstants;
+import org.apache.asterix.common.dataflow.AsterixLSMInvertedIndexInsertDeleteOperatorDescriptor;
+import org.apache.asterix.common.dataflow.AsterixLSMTreeInsertDeleteOperatorDescriptor;
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.external.api.IAdapterFactory;
+import org.apache.asterix.external.api.IDataSourceAdapter;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+import org.apache.asterix.external.library.ExternalLibraryManager;
+import org.apache.asterix.external.operators.FeedCollectOperatorDescriptor;
+import org.apache.asterix.external.operators.FeedMetaOperatorDescriptor;
+import org.apache.asterix.external.provider.AdapterFactoryProvider;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.external.util.ExternalDataUtils;
+import org.apache.asterix.metadata.MetadataException;
+import org.apache.asterix.metadata.MetadataManager;
+import org.apache.asterix.metadata.MetadataTransactionContext;
+import org.apache.asterix.metadata.entities.Dataset;
+import org.apache.asterix.metadata.entities.DatasourceAdapter;
+import org.apache.asterix.metadata.entities.Datatype;
+import org.apache.asterix.metadata.entities.Feed;
+import org.apache.asterix.metadata.entities.FeedPolicyEntity;
+import org.apache.asterix.metadata.entities.Function;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.IAType;
+import org.apache.commons.lang3.tuple.Pair;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.common.exceptions.NotImplementedException;
+import org.apache.hyracks.algebricks.common.utils.Triple;
+import org.apache.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
+import org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
+import org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory;
+import org.apache.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory;
+import org.apache.hyracks.api.constraints.Constraint;
+import org.apache.hyracks.api.constraints.PartitionConstraintHelper;
+import org.apache.hyracks.api.constraints.expressions.ConstantExpression;
+import org.apache.hyracks.api.constraints.expressions.ConstraintExpression;
+import org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression;
+import org.apache.hyracks.api.constraints.expressions.PartitionCountExpression;
+import org.apache.hyracks.api.constraints.expressions.PartitionLocationExpression;
+import org.apache.hyracks.api.dataflow.ConnectorDescriptorId;
+import org.apache.hyracks.api.dataflow.IConnectorDescriptor;
+import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
+import org.apache.hyracks.api.dataflow.OperatorDescriptorId;
+import org.apache.hyracks.api.dataflow.value.ITuplePartitionComputerFactory;
+import org.apache.hyracks.api.job.JobSpecification;
+import org.apache.hyracks.dataflow.common.data.partition.RandomPartitionComputerFactory;
+import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import org.apache.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
+
+/**
+ * A utility class for providing helper functions for feeds
+ * TODO: Refactor this class.
+ */
+public class FeedMetadataUtil {
+
+    private static Logger LOGGER = Logger.getLogger(FeedMetadataUtil.class.getName());
+
+    private static class LocationConstraint {
+        int partition;
+        String location;
+    }
+
+    public static Dataset validateIfDatasetExists(String dataverse, String datasetName, MetadataTransactionContext ctx)
+            throws AsterixException {
+        Dataset dataset = MetadataManager.INSTANCE.getDataset(ctx, dataverse, datasetName);
+        if (dataset == null) {
+            throw new AsterixException("Unknown target dataset :" + datasetName);
+        }
+
+        if (!dataset.getDatasetType().equals(DatasetType.INTERNAL)) {
+            throw new AsterixException("Statement not applicable. Dataset " + datasetName + " is not of required type "
+                    + DatasetType.INTERNAL);
+        }
+        return dataset;
+    }
+
+    public static Feed validateIfFeedExists(String dataverse, String feedName, MetadataTransactionContext ctx)
+            throws MetadataException, AsterixException {
+        Feed feed = MetadataManager.INSTANCE.getFeed(ctx, dataverse, feedName);
+        if (feed == null) {
+            throw new AsterixException("Unknown source feed: " + feedName);
+        }
+        return feed;
+    }
+
+    public static FeedPolicyEntity validateIfPolicyExists(String dataverse, String policyName,
+            MetadataTransactionContext ctx) throws AsterixException {
+        FeedPolicyEntity feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(ctx, dataverse, policyName);
+        if (feedPolicy == null) {
+            feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(ctx, MetadataConstants.METADATA_DATAVERSE_NAME,
+                    policyName);
+            if (feedPolicy == null) {
+                throw new AsterixException("Unknown feed policy" + policyName);
+            }
+        }
+        return feedPolicy;
+    }
+
+    public static JobSpecification alterJobSpecificationForFeed(JobSpecification spec,
+            FeedConnectionId feedConnectionId, Map<String, String> feedPolicyProperties) {
+
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Original Job Spec:" + spec);
+        }
+
+        JobSpecification altered = new JobSpecification(spec.getFrameSize());
+        Map<OperatorDescriptorId, IOperatorDescriptor> operatorMap = spec.getOperatorMap();
+        boolean preProcessingRequired = preProcessingRequired(feedConnectionId);
+        // copy operators
+        String operandId = null;
+        Map<OperatorDescriptorId, OperatorDescriptorId> oldNewOID = new HashMap<OperatorDescriptorId, OperatorDescriptorId>();
+        FeedMetaOperatorDescriptor metaOp = null;
+        for (Entry<OperatorDescriptorId, IOperatorDescriptor> entry : operatorMap.entrySet()) {
+            operandId = FeedRuntimeId.DEFAULT_OPERAND_ID;
+            IOperatorDescriptor opDesc = entry.getValue();
+            if (opDesc instanceof FeedCollectOperatorDescriptor) {
+                FeedCollectOperatorDescriptor orig = (FeedCollectOperatorDescriptor) opDesc;
+                FeedCollectOperatorDescriptor fiop = new FeedCollectOperatorDescriptor(altered,
+                        orig.getFeedConnectionId(), orig.getSourceFeedId(), (ARecordType) orig.getOutputType(),
+                        orig.getRecordDescriptor(), orig.getFeedPolicyProperties(), orig.getSubscriptionLocation());
+                oldNewOID.put(opDesc.getOperatorId(), fiop.getOperatorId());
+            } else if (opDesc instanceof AsterixLSMTreeInsertDeleteOperatorDescriptor) {
+                operandId = ((AsterixLSMTreeInsertDeleteOperatorDescriptor) opDesc).getIndexName();
+                metaOp = new FeedMetaOperatorDescriptor(altered, feedConnectionId, opDesc, feedPolicyProperties,
+                        FeedRuntimeType.STORE, false, operandId);
+                oldNewOID.put(opDesc.getOperatorId(), metaOp.getOperatorId());
+            } else if (opDesc instanceof AsterixLSMInvertedIndexInsertDeleteOperatorDescriptor) {
+                operandId = ((AsterixLSMInvertedIndexInsertDeleteOperatorDescriptor) opDesc).getIndexName();
+                metaOp = new FeedMetaOperatorDescriptor(altered, feedConnectionId, opDesc, feedPolicyProperties,
+                        FeedRuntimeType.STORE, false, operandId);
+                oldNewOID.put(opDesc.getOperatorId(), metaOp.getOperatorId());
+
+            } else {
+                FeedRuntimeType runtimeType = null;
+                boolean enableSubscriptionMode = false;
+                boolean createMetaOp = true;
+                OperatorDescriptorId opId = null;
+                if (opDesc instanceof AlgebricksMetaOperatorDescriptor) {
+                    IPushRuntimeFactory runtimeFactory = ((AlgebricksMetaOperatorDescriptor) opDesc).getPipeline()
+                            .getRuntimeFactories()[0];
+                    if (runtimeFactory instanceof AssignRuntimeFactory) {
+                        IConnectorDescriptor connectorDesc = spec.getOperatorInputMap().get(opDesc.getOperatorId())
+                                .get(0);
+                        IOperatorDescriptor sourceOp = spec.getProducer(connectorDesc);
+                        if (sourceOp instanceof FeedCollectOperatorDescriptor) {
+                            runtimeType = preProcessingRequired ? FeedRuntimeType.COMPUTE : FeedRuntimeType.OTHER;
+                            enableSubscriptionMode = preProcessingRequired;
+                        } else {
+                            runtimeType = FeedRuntimeType.OTHER;
+                        }
+                    } else if (runtimeFactory instanceof EmptyTupleSourceRuntimeFactory) {
+                        runtimeType = FeedRuntimeType.ETS;
+                    } else {
+                        runtimeType = FeedRuntimeType.OTHER;
+                    }
+                } else {
+                    if (opDesc instanceof AbstractSingleActivityOperatorDescriptor) {
+                        runtimeType = FeedRuntimeType.OTHER;
+                    } else {
+                        opId = altered.createOperatorDescriptorId(opDesc);
+                        createMetaOp = false;
+                    }
+                }
+                if (createMetaOp) {
+                    metaOp = new FeedMetaOperatorDescriptor(altered, feedConnectionId, opDesc, feedPolicyProperties,
+                            runtimeType, enableSubscriptionMode, operandId);
+                    opId = metaOp.getOperatorId();
+                }
+                oldNewOID.put(opDesc.getOperatorId(), opId);
+            }
+        }
+
+        // copy connectors
+        Map<ConnectorDescriptorId, ConnectorDescriptorId> connectorMapping = new HashMap<ConnectorDescriptorId, ConnectorDescriptorId>();
+        for (Entry<ConnectorDescriptorId, IConnectorDescriptor> entry : spec.getConnectorMap().entrySet()) {
+            IConnectorDescriptor connDesc = entry.getValue();
+            ConnectorDescriptorId newConnId = altered.createConnectorDescriptor(connDesc);
+            connectorMapping.put(entry.getKey(), newConnId);
+        }
+
+        // make connections between operators
+        for (Entry<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>> entry : spec
+                .getConnectorOperatorMap().entrySet()) {
+            IConnectorDescriptor connDesc = altered.getConnectorMap().get(connectorMapping.get(entry.getKey()));
+            Pair<IOperatorDescriptor, Integer> leftOp = entry.getValue().getLeft();
+            Pair<IOperatorDescriptor, Integer> rightOp = entry.getValue().getRight();
+
+            IOperatorDescriptor leftOpDesc = altered.getOperatorMap()
+                    .get(oldNewOID.get(leftOp.getLeft().getOperatorId()));
+            IOperatorDescriptor rightOpDesc = altered.getOperatorMap()
+                    .get(oldNewOID.get(rightOp.getLeft().getOperatorId()));
+
+            altered.connect(connDesc, leftOpDesc, leftOp.getRight(), rightOpDesc, rightOp.getRight());
+        }
+
+        // prepare for setting partition constraints
+        Map<OperatorDescriptorId, List<LocationConstraint>> operatorLocations = new HashMap<OperatorDescriptorId, List<LocationConstraint>>();
+        Map<OperatorDescriptorId, Integer> operatorCounts = new HashMap<OperatorDescriptorId, Integer>();
+
+        for (Constraint constraint : spec.getUserConstraints()) {
+            LValueConstraintExpression lexpr = constraint.getLValue();
+            ConstraintExpression cexpr = constraint.getRValue();
+            OperatorDescriptorId opId;
+            switch (lexpr.getTag()) {
+                case PARTITION_COUNT:
+                    opId = ((PartitionCountExpression) lexpr).getOperatorDescriptorId();
+                    operatorCounts.put(opId, (int) ((ConstantExpression) cexpr).getValue());
+                    break;
+                case PARTITION_LOCATION:
+                    opId = ((PartitionLocationExpression) lexpr).getOperatorDescriptorId();
+
+                    IOperatorDescriptor opDesc = altered.getOperatorMap().get(oldNewOID.get(opId));
+                    List<LocationConstraint> locations = operatorLocations.get(opDesc.getOperatorId());
+                    if (locations == null) {
+                        locations = new ArrayList<>();
+                        operatorLocations.put(opDesc.getOperatorId(), locations);
+                    }
+                    String location = (String) ((ConstantExpression) cexpr).getValue();
+                    LocationConstraint lc = new LocationConstraint();
+                    lc.location = location;
+                    lc.partition = ((PartitionLocationExpression) lexpr).getPartition();
+                    locations.add(lc);
+                    break;
+                default:
+                    break;
+            }
+        }
+
+        // set absolute location constraints
+        for (Entry<OperatorDescriptorId, List<LocationConstraint>> entry : operatorLocations.entrySet()) {
+            IOperatorDescriptor opDesc = altered.getOperatorMap().get(oldNewOID.get(entry.getKey()));
+            Collections.sort(entry.getValue(), new Comparator<LocationConstraint>() {
+
+                @Override
+                public int compare(LocationConstraint o1, LocationConstraint o2) {
+                    return o1.partition - o2.partition;
+                }
+            });
+            String[] locations = new String[entry.getValue().size()];
+            for (int i = 0; i < locations.length; ++i) {
+                locations[i] = entry.getValue().get(i).location;
+            }
+            PartitionConstraintHelper.addAbsoluteLocationConstraint(altered, opDesc, locations);
+        }
+
+        // set count constraints
+        for (Entry<OperatorDescriptorId, Integer> entry : operatorCounts.entrySet()) {
+            IOperatorDescriptor opDesc = altered.getOperatorMap().get(oldNewOID.get(entry.getKey()));
+            if (!operatorLocations.keySet().contains(entry.getKey())) {
+                PartitionConstraintHelper.addPartitionCountConstraint(altered, opDesc, entry.getValue());
+            }
+        }
+
+        // useConnectorSchedulingPolicy
+        altered.setUseConnectorPolicyForScheduling(spec.isUseConnectorPolicyForScheduling());
+
+        // connectorAssignmentPolicy
+        altered.setConnectorPolicyAssignmentPolicy(spec.getConnectorPolicyAssignmentPolicy());
+
+        // roots
+        for (OperatorDescriptorId root : spec.getRoots()) {
+            altered.addRoot(altered.getOperatorMap().get(oldNewOID.get(root)));
+        }
+
+        // jobEventListenerFactory
+        altered.setJobletEventListenerFactory(spec.getJobletEventListenerFactory());
+
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("New Job Spec:" + altered);
+        }
+
+        return altered;
+
+    }
+
+    public static void increaseCardinality(JobSpecification spec, FeedRuntimeType compute, int requiredCardinality,
+            List<String> newLocations) throws AsterixException {
+        IOperatorDescriptor changingOpDesc = alterJobSpecForComputeCardinality(spec, requiredCardinality);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, changingOpDesc,
+                nChooseK(requiredCardinality, newLocations));
+
+    }
+
+    public static void decreaseComputeCardinality(JobSpecification spec, FeedRuntimeType compute,
+            int requiredCardinality, List<String> currentLocations) throws AsterixException {
+        IOperatorDescriptor changingOpDesc = alterJobSpecForComputeCardinality(spec, requiredCardinality);
+        String[] chosenLocations = nChooseK(requiredCardinality, currentLocations);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, changingOpDesc, chosenLocations);
+    }
+
+    private static IOperatorDescriptor alterJobSpecForComputeCardinality(JobSpecification spec, int requiredCardinality)
+            throws AsterixException {
+        Map<ConnectorDescriptorId, IConnectorDescriptor> connectors = spec.getConnectorMap();
+        Map<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>> connectorOpMap = spec
+                .getConnectorOperatorMap();
+
+        IOperatorDescriptor sourceOp = null;
+        IOperatorDescriptor targetOp = null;
+        IConnectorDescriptor connDesc = null;
+        for (Entry<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>> entry : connectorOpMap
+                .entrySet()) {
+            ConnectorDescriptorId cid = entry.getKey();
+            sourceOp = entry.getValue().getKey().getKey();
+            if (sourceOp instanceof FeedCollectOperatorDescriptor) {
+                targetOp = entry.getValue().getValue().getKey();
+                if (targetOp instanceof FeedMetaOperatorDescriptor
+                        && (((FeedMetaOperatorDescriptor) targetOp).getRuntimeType().equals(FeedRuntimeType.COMPUTE))) {
+                    connDesc = connectors.get(cid);
+                    break;
+                } else {
+                    throw new AsterixException("Incorrect manipulation, feed does not have a compute stage");
+                }
+            }
+        }
+
+        Map<OperatorDescriptorId, List<IConnectorDescriptor>> operatorInputMap = spec.getOperatorInputMap();
+        boolean removed = operatorInputMap.get(targetOp.getOperatorId()).remove(connDesc);
+        if (!removed) {
+            throw new AsterixException("Connector desc not found");
+        }
+        Map<OperatorDescriptorId, List<IConnectorDescriptor>> operatorOutputMap = spec.getOperatorOutputMap();
+        removed = operatorOutputMap.get(sourceOp.getOperatorId()).remove(connDesc);
+        if (!removed) {
+            throw new AsterixException("Connector desc not found");
+        }
+        spec.getConnectorMap().remove(connDesc.getConnectorId());
+        connectorOpMap.remove(connDesc.getConnectorId());
+
+        ITuplePartitionComputerFactory tpcf = new RandomPartitionComputerFactory(requiredCardinality);
+        MToNPartitioningConnectorDescriptor newConnector = new MToNPartitioningConnectorDescriptor(spec, tpcf);
+        spec.getConnectorMap().put(newConnector.getConnectorId(), newConnector);
+        spec.connect(newConnector, sourceOp, 0, targetOp, 0);
+
+        // ==============================================================================
+        Set<Constraint> userConstraints = spec.getUserConstraints();
+        Constraint countConstraint = null;
+        Constraint locationConstraint = null;
+        List<LocationConstraint> locations = new ArrayList<LocationConstraint>();
+        IOperatorDescriptor changingOpDesc = null;
+
+        for (Constraint constraint : userConstraints) {
+            LValueConstraintExpression lexpr = constraint.getLValue();
+            ConstraintExpression cexpr = constraint.getRValue();
+            OperatorDescriptorId opId;
+            switch (lexpr.getTag()) {
+                case PARTITION_COUNT: {
+                    opId = ((PartitionCountExpression) lexpr).getOperatorDescriptorId();
+                    IOperatorDescriptor opDesc = spec.getOperatorMap().get(opId);
+                    if (opDesc instanceof FeedMetaOperatorDescriptor) {
+                        FeedRuntimeType runtimeType = ((FeedMetaOperatorDescriptor) opDesc).getRuntimeType();
+                        if (runtimeType.equals(FeedRuntimeType.COMPUTE)) {
+                            countConstraint = constraint;
+                            changingOpDesc = opDesc;
+                        }
+                    }
+                    break;
+                }
+                case PARTITION_LOCATION:
+                    opId = ((PartitionLocationExpression) lexpr).getOperatorDescriptorId();
+                    IOperatorDescriptor opDesc = spec.getOperatorMap().get(opId);
+                    if (opDesc instanceof FeedMetaOperatorDescriptor) {
+                        FeedRuntimeType runtimeType = ((FeedMetaOperatorDescriptor) opDesc).getRuntimeType();
+                        if (runtimeType.equals(FeedRuntimeType.COMPUTE)) {
+                            locationConstraint = constraint;
+                            changingOpDesc = opDesc;
+                            String location = (String) ((ConstantExpression) cexpr).getValue();
+                            LocationConstraint lc = new LocationConstraint();
+                            lc.location = location;
+                            lc.partition = ((PartitionLocationExpression) lexpr).getPartition();
+                            locations.add(lc);
+                        }
+                    }
+                    break;
+                default:
+                    break;
+            }
+        }
+
+        userConstraints.remove(countConstraint);
+        if (locationConstraint != null) {
+            userConstraints.remove(locationConstraint);
+        }
+
+        return changingOpDesc;
+    }
+
+    private static String[] nChooseK(int k, List<String> locations) {
+        String[] result = new String[k];
+        for (int i = 0; i < k; i++) {
+            result[i] = locations.get(i);
+        }
+        return result;
+    }
+
+    private static boolean preProcessingRequired(FeedConnectionId connectionId) {
+        MetadataTransactionContext ctx = null;
+        Feed feed = null;
+        boolean preProcessingRequired = false;
+        try {
+            MetadataManager.INSTANCE.acquireReadLatch();
+            ctx = MetadataManager.INSTANCE.beginTransaction();
+            feed = MetadataManager.INSTANCE.getFeed(ctx, connectionId.getFeedId().getDataverse(),
+                    connectionId.getFeedId().getFeedName());
+            preProcessingRequired = feed.getAppliedFunction() != null;
+            MetadataManager.INSTANCE.commitTransaction(ctx);
+        } catch (Exception e) {
+            if (ctx != null) {
+                try {
+                    MetadataManager.INSTANCE.abortTransaction(ctx);
+                } catch (Exception abortException) {
+                    e.addSuppressed(abortException);
+                    throw new IllegalStateException(e);
+                }
+            }
+        } finally {
+            MetadataManager.INSTANCE.releaseReadLatch();
+        }
+        return preProcessingRequired;
+    }
+
+    public static Triple<IAdapterFactory, ARecordType, IDataSourceAdapter.AdapterType> getPrimaryFeedFactoryAndOutput(
+            Feed feed, FeedPolicyAccessor policyAccessor, MetadataTransactionContext mdTxnCtx)
+                    throws AlgebricksException {
+        // This method needs to be re-visited
+        String adapterName = null;
+        DatasourceAdapter adapterEntity = null;
+        String adapterFactoryClassname = null;
+        IAdapterFactory adapterFactory = null;
+        ARecordType adapterOutputType = null;
+        Triple<IAdapterFactory, ARecordType, IDataSourceAdapter.AdapterType> feedProps = null;
+        IDataSourceAdapter.AdapterType adapterType = null;
+        try {
+            adapterName = feed.getAdapterName();
+            Map<String, String> configuration = feed.getAdapterConfiguration();
+            configuration.putAll(policyAccessor.getFeedPolicy());
+            adapterOutputType = getOutputType(feed, configuration);
+            ExternalDataUtils.prepareFeed(configuration, feed.getDataverseName(), feed.getFeedName());
+            // Get adapter from metadata dataset <Metadata dataverse>
+            adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, MetadataConstants.METADATA_DATAVERSE_NAME,
+                    adapterName);
+            // Get adapter from metadata dataset <The feed dataverse>
+            if (adapterEntity == null) {
+                adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, feed.getDataverseName(), adapterName);
+            }
+
+            if (adapterEntity != null) {
+                adapterType = adapterEntity.getType();
+                adapterFactoryClassname = adapterEntity.getClassname();
+                switch (adapterType) {
+                    case INTERNAL:
+                        adapterFactory = (IAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
+                        break;
+                    case EXTERNAL:
+                        String[] anameComponents = adapterName.split("#");
+                        String libraryName = anameComponents[0];
+                        ClassLoader cl = ExternalLibraryManager.getLibraryClassLoader(feed.getDataverseName(),
+                                libraryName);
+                        adapterFactory = (IAdapterFactory) cl.loadClass(adapterFactoryClassname).newInstance();
+                        break;
+                }
+                adapterFactory.configure(configuration, adapterOutputType);
+            } else {
+                adapterFactory = AdapterFactoryProvider.getAdapterFactory(adapterName, configuration,
+                        adapterOutputType);
+                adapterType = IDataSourceAdapter.AdapterType.INTERNAL;
+            }
+            feedProps = new Triple<IAdapterFactory, ARecordType, IDataSourceAdapter.AdapterType>(adapterFactory,
+                    adapterOutputType, adapterType);
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new AlgebricksException("unable to create adapter " + e);
+        }
+        return feedProps;
+    }
+
+    private static ARecordType getOutputType(Feed feed, Map<String, String> configuration) throws Exception {
+        ARecordType outputType = null;
+        String fqOutputType = configuration.get(ExternalDataConstants.KEY_TYPE_NAME);
+
+        if (fqOutputType == null) {
+            throw new IllegalArgumentException("No output type specified");
+        }
+        String[] dataverseAndType = fqOutputType.split("[.]");
+        String dataverseName;
+        String datatypeName;
+
+        if (dataverseAndType.length == 1) {
+            datatypeName = dataverseAndType[0];
+            dataverseName = feed.getDataverseName();
+        } else if (dataverseAndType.length == 2) {
+            dataverseName = dataverseAndType[0];
+            datatypeName = dataverseAndType[1];
+        } else
+            throw new IllegalArgumentException(
+                    "Invalid value for the parameter " + ExternalDataConstants.KEY_TYPE_NAME);
+
+        MetadataTransactionContext ctx = null;
+        MetadataManager.INSTANCE.acquireReadLatch();
+        try {
+            ctx = MetadataManager.INSTANCE.beginTransaction();
+            Datatype t = MetadataManager.INSTANCE.getDatatype(ctx, dataverseName, datatypeName);
+            IAType type = t.getDatatype();
+            if (type.getTypeTag() != ATypeTag.RECORD) {
+                throw new IllegalStateException();
+            }
+            outputType = (ARecordType) t.getDatatype();
+            MetadataManager.INSTANCE.commitTransaction(ctx);
+        } catch (Exception e) {
+            if (ctx != null) {
+                MetadataManager.INSTANCE.abortTransaction(ctx);
+            }
+            throw e;
+        } finally {
+            MetadataManager.INSTANCE.releaseReadLatch();
+        }
+        return outputType;
+    }
+
+    public static String getSecondaryFeedOutput(Feed feed, FeedPolicyAccessor policyAccessor,
+            MetadataTransactionContext mdTxnCtx) throws AlgebricksException, MetadataException {
+        String outputType = null;
+        String primaryFeedName = feed.getSourceFeedName();
+        Feed primaryFeed = MetadataManager.INSTANCE.getFeed(mdTxnCtx, feed.getDataverseName(), primaryFeedName);
+        FunctionSignature appliedFunction = primaryFeed.getAppliedFunction();
+        if (appliedFunction == null) {
+            Triple<IAdapterFactory, ARecordType, IDataSourceAdapter.AdapterType> result = getPrimaryFeedFactoryAndOutput(
+                    primaryFeed, policyAccessor, mdTxnCtx);
+            outputType = result.second.getTypeName();
+        } else {
+            Function function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, appliedFunction);
+            if (function != null) {
+                if (function.getLanguage().equals(Function.LANGUAGE_AQL)) {
+                    throw new NotImplementedException(
+                            "Secondary feeds derived from a source feed that has an applied AQL function are not supported yet.");
+                } else {
+                    outputType = function.getReturnType();
+                }
+            } else {
+                throw new IllegalArgumentException(
+                        "Function " + appliedFunction + " associated with source feed not found in Metadata.");
+            }
+        }
+        return outputType;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedSubscriptionManager.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedSubscriptionManager.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedSubscriptionManager.java
deleted file mode 100644
index b928e55..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedSubscriptionManager.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.feeds.SubscribableFeedRuntimeId;
-import org.apache.asterix.common.feeds.api.IFeedSubscriptionManager;
-import org.apache.asterix.common.feeds.api.ISubscribableRuntime;
-
-public class FeedSubscriptionManager implements IFeedSubscriptionManager {
-
-    private static Logger LOGGER = Logger.getLogger(FeedSubscriptionManager.class.getName());
-
-    private final String nodeId;
-
-    private final Map<SubscribableFeedRuntimeId, ISubscribableRuntime> subscribableRuntimes;
-
-    public FeedSubscriptionManager(String nodeId) {
-        this.nodeId = nodeId;
-        this.subscribableRuntimes = new HashMap<SubscribableFeedRuntimeId, ISubscribableRuntime>();
-    }
-
-    @Override
-    public void registerFeedSubscribableRuntime(ISubscribableRuntime subscribableRuntime) {
-        SubscribableFeedRuntimeId sid = (SubscribableFeedRuntimeId) subscribableRuntime.getRuntimeId();
-        if (!subscribableRuntimes.containsKey(sid)) {
-            subscribableRuntimes.put(sid, subscribableRuntime);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Registered feed subscribable runtime " + subscribableRuntime);
-            }
-        } else {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Feed ingestion runtime " + subscribableRuntime + " already registered.");
-            }
-        }
-    }
-
-    @Override
-    public ISubscribableRuntime getSubscribableRuntime(SubscribableFeedRuntimeId subscribableFeedRuntimeId) {
-        return subscribableRuntimes.get(subscribableFeedRuntimeId);
-    }
-
-    @Override
-    public void deregisterFeedSubscribableRuntime(SubscribableFeedRuntimeId ingestionId) {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("De-registered feed subscribable runtime " + ingestionId);
-        }
-        subscribableRuntimes.remove(ingestionId);
-    }
-
-    @Override
-    public String toString() {
-        return "IngestionManager [" + nodeId + "]";
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedUtil.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedUtil.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedUtil.java
deleted file mode 100644
index 5ed2876..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedUtil.java
+++ /dev/null
@@ -1,590 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.config.DatasetConfig.DatasetType;
-import org.apache.asterix.common.dataflow.AsterixLSMInvertedIndexInsertDeleteOperatorDescriptor;
-import org.apache.asterix.common.dataflow.AsterixLSMTreeInsertDeleteOperatorDescriptor;
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedPolicyAccessor;
-import org.apache.asterix.common.feeds.FeedRuntimeId;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.asterix.common.functions.FunctionSignature;
-import org.apache.asterix.external.api.IAdapterFactory;
-import org.apache.asterix.external.library.ExternalLibraryManager;
-import org.apache.asterix.external.provider.AdapterFactoryProvider;
-import org.apache.asterix.external.util.ExternalDataConstants;
-import org.apache.asterix.metadata.MetadataException;
-import org.apache.asterix.metadata.MetadataManager;
-import org.apache.asterix.metadata.MetadataTransactionContext;
-import org.apache.asterix.metadata.bootstrap.MetadataConstants;
-import org.apache.asterix.metadata.entities.Dataset;
-import org.apache.asterix.metadata.entities.DatasourceAdapter;
-import org.apache.asterix.metadata.entities.DatasourceAdapter.AdapterType;
-import org.apache.asterix.metadata.entities.Datatype;
-import org.apache.asterix.metadata.entities.Feed;
-import org.apache.asterix.metadata.entities.FeedPolicy;
-import org.apache.asterix.metadata.entities.Function;
-import org.apache.asterix.metadata.entities.PrimaryFeed;
-import org.apache.asterix.metadata.entities.SecondaryFeed;
-import org.apache.asterix.om.types.ARecordType;
-import org.apache.asterix.om.types.ATypeTag;
-import org.apache.asterix.om.types.IAType;
-import org.apache.commons.lang3.tuple.Pair;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.algebricks.common.exceptions.NotImplementedException;
-import org.apache.hyracks.algebricks.common.utils.Triple;
-import org.apache.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
-import org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
-import org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory;
-import org.apache.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory;
-import org.apache.hyracks.api.constraints.Constraint;
-import org.apache.hyracks.api.constraints.PartitionConstraintHelper;
-import org.apache.hyracks.api.constraints.expressions.ConstantExpression;
-import org.apache.hyracks.api.constraints.expressions.ConstraintExpression;
-import org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression;
-import org.apache.hyracks.api.constraints.expressions.PartitionCountExpression;
-import org.apache.hyracks.api.constraints.expressions.PartitionLocationExpression;
-import org.apache.hyracks.api.dataflow.ConnectorDescriptorId;
-import org.apache.hyracks.api.dataflow.IConnectorDescriptor;
-import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
-import org.apache.hyracks.api.dataflow.OperatorDescriptorId;
-import org.apache.hyracks.api.dataflow.value.ITuplePartitionComputerFactory;
-import org.apache.hyracks.api.job.JobSpecification;
-import org.apache.hyracks.dataflow.common.data.partition.RandomPartitionComputerFactory;
-import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-import org.apache.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
-
-/**
- * A utility class for providing helper functions for feeds
- */
-public class FeedUtil {
-
-    private static Logger LOGGER = Logger.getLogger(FeedUtil.class.getName());
-
-    public static String getFeedPointKeyRep(Feed feed, List<String> appliedFunctions) {
-        StringBuilder builder = new StringBuilder();
-        builder.append(feed.getDataverseName() + ":");
-        builder.append(feed.getFeedName() + ":");
-        if (appliedFunctions != null && !appliedFunctions.isEmpty()) {
-            for (String function : appliedFunctions) {
-                builder.append(function + ":");
-            }
-            builder.deleteCharAt(builder.length() - 1);
-        }
-        return builder.toString();
-    }
-
-    private static class LocationConstraint {
-        int partition;
-        String location;
-    }
-
-    public static Dataset validateIfDatasetExists(String dataverse, String datasetName, MetadataTransactionContext ctx)
-            throws AsterixException {
-        Dataset dataset = MetadataManager.INSTANCE.getDataset(ctx, dataverse, datasetName);
-        if (dataset == null) {
-            throw new AsterixException("Unknown target dataset :" + datasetName);
-        }
-
-        if (!dataset.getDatasetType().equals(DatasetType.INTERNAL)) {
-            throw new AsterixException("Statement not applicable. Dataset " + datasetName + " is not of required type "
-                    + DatasetType.INTERNAL);
-        }
-        return dataset;
-    }
-
-    public static Feed validateIfFeedExists(String dataverse, String feedName, MetadataTransactionContext ctx)
-            throws MetadataException, AsterixException {
-        Feed feed = MetadataManager.INSTANCE.getFeed(ctx, dataverse, feedName);
-        if (feed == null) {
-            throw new AsterixException("Unknown source feed: " + feedName);
-        }
-        return feed;
-    }
-
-    public static FeedPolicy validateIfPolicyExists(String dataverse, String policyName, MetadataTransactionContext ctx)
-            throws AsterixException {
-        FeedPolicy feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(ctx, dataverse, policyName);
-        if (feedPolicy == null) {
-            feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(ctx, MetadataConstants.METADATA_DATAVERSE_NAME,
-                    policyName);
-            if (feedPolicy == null) {
-                throw new AsterixException("Unknown feed policy" + policyName);
-            }
-        }
-        return feedPolicy;
-    }
-
-    public static JobSpecification alterJobSpecificationForFeed(JobSpecification spec,
-            FeedConnectionId feedConnectionId, Map<String, String> feedPolicyProperties) {
-
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Original Job Spec:" + spec);
-        }
-
-        JobSpecification altered = new JobSpecification(spec.getFrameSize());
-        Map<OperatorDescriptorId, IOperatorDescriptor> operatorMap = spec.getOperatorMap();
-        boolean preProcessingRequired = preProcessingRequired(feedConnectionId);
-        // copy operators
-        String operandId = null;
-        Map<OperatorDescriptorId, OperatorDescriptorId> oldNewOID = new HashMap<OperatorDescriptorId, OperatorDescriptorId>();
-        FeedMetaOperatorDescriptor metaOp = null;
-        for (Entry<OperatorDescriptorId, IOperatorDescriptor> entry : operatorMap.entrySet()) {
-            operandId = FeedRuntimeId.DEFAULT_OPERAND_ID;
-            IOperatorDescriptor opDesc = entry.getValue();
-            if (opDesc instanceof FeedCollectOperatorDescriptor) {
-                FeedCollectOperatorDescriptor orig = (FeedCollectOperatorDescriptor) opDesc;
-                FeedCollectOperatorDescriptor fiop = new FeedCollectOperatorDescriptor(altered,
-                        orig.getFeedConnectionId(), orig.getSourceFeedId(), (ARecordType) orig.getOutputType(),
-                        orig.getRecordDescriptor(), orig.getFeedPolicyProperties(), orig.getSubscriptionLocation());
-                oldNewOID.put(opDesc.getOperatorId(), fiop.getOperatorId());
-            } else if (opDesc instanceof AsterixLSMTreeInsertDeleteOperatorDescriptor) {
-                operandId = ((AsterixLSMTreeInsertDeleteOperatorDescriptor) opDesc).getIndexName();
-                metaOp = new FeedMetaOperatorDescriptor(altered, feedConnectionId, opDesc, feedPolicyProperties,
-                        FeedRuntimeType.STORE, false, operandId);
-                oldNewOID.put(opDesc.getOperatorId(), metaOp.getOperatorId());
-            } else if (opDesc instanceof AsterixLSMInvertedIndexInsertDeleteOperatorDescriptor) {
-                operandId = ((AsterixLSMInvertedIndexInsertDeleteOperatorDescriptor) opDesc).getIndexName();
-                metaOp = new FeedMetaOperatorDescriptor(altered, feedConnectionId, opDesc, feedPolicyProperties,
-                        FeedRuntimeType.STORE, false, operandId);
-                oldNewOID.put(opDesc.getOperatorId(), metaOp.getOperatorId());
-
-            } else {
-                FeedRuntimeType runtimeType = null;
-                boolean enableSubscriptionMode = false;
-                boolean createMetaOp = true;
-                OperatorDescriptorId opId = null;
-                if (opDesc instanceof AlgebricksMetaOperatorDescriptor) {
-                    IPushRuntimeFactory runtimeFactory = ((AlgebricksMetaOperatorDescriptor) opDesc).getPipeline()
-                            .getRuntimeFactories()[0];
-                    if (runtimeFactory instanceof AssignRuntimeFactory) {
-                        IConnectorDescriptor connectorDesc = spec.getOperatorInputMap().get(opDesc.getOperatorId())
-                                .get(0);
-                        IOperatorDescriptor sourceOp = spec.getProducer(connectorDesc);
-                        if (sourceOp instanceof FeedCollectOperatorDescriptor) {
-                            runtimeType = preProcessingRequired ? FeedRuntimeType.COMPUTE : FeedRuntimeType.OTHER;
-                            enableSubscriptionMode = preProcessingRequired;
-                        } else {
-                            runtimeType = FeedRuntimeType.OTHER;
-                        }
-                    } else if (runtimeFactory instanceof EmptyTupleSourceRuntimeFactory) {
-                        runtimeType = FeedRuntimeType.ETS;
-                    } else {
-                        runtimeType = FeedRuntimeType.OTHER;
-                    }
-                } else {
-                    if (opDesc instanceof AbstractSingleActivityOperatorDescriptor) {
-                        runtimeType = FeedRuntimeType.OTHER;
-                    } else {
-                        opId = altered.createOperatorDescriptorId(opDesc);
-                        createMetaOp = false;
-                    }
-                }
-                if (createMetaOp) {
-                    metaOp = new FeedMetaOperatorDescriptor(altered, feedConnectionId, opDesc, feedPolicyProperties,
-                            runtimeType, enableSubscriptionMode, operandId);
-                    opId = metaOp.getOperatorId();
-                }
-                oldNewOID.put(opDesc.getOperatorId(), opId);
-            }
-        }
-
-        // copy connectors
-        Map<ConnectorDescriptorId, ConnectorDescriptorId> connectorMapping = new HashMap<ConnectorDescriptorId, ConnectorDescriptorId>();
-        for (Entry<ConnectorDescriptorId, IConnectorDescriptor> entry : spec.getConnectorMap().entrySet()) {
-            IConnectorDescriptor connDesc = entry.getValue();
-            ConnectorDescriptorId newConnId = altered.createConnectorDescriptor(connDesc);
-            connectorMapping.put(entry.getKey(), newConnId);
-        }
-
-        // make connections between operators
-        for (Entry<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>> entry : spec
-                .getConnectorOperatorMap().entrySet()) {
-            IConnectorDescriptor connDesc = altered.getConnectorMap().get(connectorMapping.get(entry.getKey()));
-            Pair<IOperatorDescriptor, Integer> leftOp = entry.getValue().getLeft();
-            Pair<IOperatorDescriptor, Integer> rightOp = entry.getValue().getRight();
-
-            IOperatorDescriptor leftOpDesc = altered.getOperatorMap()
-                    .get(oldNewOID.get(leftOp.getLeft().getOperatorId()));
-            IOperatorDescriptor rightOpDesc = altered.getOperatorMap()
-                    .get(oldNewOID.get(rightOp.getLeft().getOperatorId()));
-
-            altered.connect(connDesc, leftOpDesc, leftOp.getRight(), rightOpDesc, rightOp.getRight());
-        }
-
-        // prepare for setting partition constraints
-        Map<OperatorDescriptorId, List<LocationConstraint>> operatorLocations = new HashMap<OperatorDescriptorId, List<LocationConstraint>>();
-        Map<OperatorDescriptorId, Integer> operatorCounts = new HashMap<OperatorDescriptorId, Integer>();
-
-        for (Constraint constraint : spec.getUserConstraints()) {
-            LValueConstraintExpression lexpr = constraint.getLValue();
-            ConstraintExpression cexpr = constraint.getRValue();
-            OperatorDescriptorId opId;
-            switch (lexpr.getTag()) {
-                case PARTITION_COUNT:
-                    opId = ((PartitionCountExpression) lexpr).getOperatorDescriptorId();
-                    operatorCounts.put(opId, (int) ((ConstantExpression) cexpr).getValue());
-                    break;
-                case PARTITION_LOCATION:
-                    opId = ((PartitionLocationExpression) lexpr).getOperatorDescriptorId();
-
-                    IOperatorDescriptor opDesc = altered.getOperatorMap().get(oldNewOID.get(opId));
-                    List<LocationConstraint> locations = operatorLocations.get(opDesc.getOperatorId());
-                    if (locations == null) {
-                        locations = new ArrayList<>();
-                        operatorLocations.put(opDesc.getOperatorId(), locations);
-                    }
-                    String location = (String) ((ConstantExpression) cexpr).getValue();
-                    LocationConstraint lc = new LocationConstraint();
-                    lc.location = location;
-                    lc.partition = ((PartitionLocationExpression) lexpr).getPartition();
-                    locations.add(lc);
-                    break;
-                default:
-                    break;
-            }
-        }
-
-        // set absolute location constraints
-        for (Entry<OperatorDescriptorId, List<LocationConstraint>> entry : operatorLocations.entrySet()) {
-            IOperatorDescriptor opDesc = altered.getOperatorMap().get(oldNewOID.get(entry.getKey()));
-            Collections.sort(entry.getValue(), new Comparator<LocationConstraint>() {
-
-                @Override
-                public int compare(LocationConstraint o1, LocationConstraint o2) {
-                    return o1.partition - o2.partition;
-                }
-            });
-            String[] locations = new String[entry.getValue().size()];
-            for (int i = 0; i < locations.length; ++i) {
-                locations[i] = entry.getValue().get(i).location;
-            }
-            PartitionConstraintHelper.addAbsoluteLocationConstraint(altered, opDesc, locations);
-        }
-
-        // set count constraints
-        for (Entry<OperatorDescriptorId, Integer> entry : operatorCounts.entrySet()) {
-            IOperatorDescriptor opDesc = altered.getOperatorMap().get(oldNewOID.get(entry.getKey()));
-            if (!operatorLocations.keySet().contains(entry.getKey())) {
-                PartitionConstraintHelper.addPartitionCountConstraint(altered, opDesc, entry.getValue());
-            }
-        }
-
-        // useConnectorSchedulingPolicy
-        altered.setUseConnectorPolicyForScheduling(spec.isUseConnectorPolicyForScheduling());
-
-        // connectorAssignmentPolicy
-        altered.setConnectorPolicyAssignmentPolicy(spec.getConnectorPolicyAssignmentPolicy());
-
-        // roots
-        for (OperatorDescriptorId root : spec.getRoots()) {
-            altered.addRoot(altered.getOperatorMap().get(oldNewOID.get(root)));
-        }
-
-        // jobEventListenerFactory
-        altered.setJobletEventListenerFactory(spec.getJobletEventListenerFactory());
-
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("New Job Spec:" + altered);
-        }
-
-        return altered;
-
-    }
-
-    public static void increaseCardinality(JobSpecification spec, FeedRuntimeType compute, int requiredCardinality,
-            List<String> newLocations) throws AsterixException {
-        IOperatorDescriptor changingOpDesc = alterJobSpecForComputeCardinality(spec, requiredCardinality);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, changingOpDesc,
-                nChooseK(requiredCardinality, newLocations));
-
-    }
-
-    public static void decreaseComputeCardinality(JobSpecification spec, FeedRuntimeType compute,
-            int requiredCardinality, List<String> currentLocations) throws AsterixException {
-        IOperatorDescriptor changingOpDesc = alterJobSpecForComputeCardinality(spec, requiredCardinality);
-        String[] chosenLocations = nChooseK(requiredCardinality, currentLocations);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, changingOpDesc, chosenLocations);
-    }
-
-    private static IOperatorDescriptor alterJobSpecForComputeCardinality(JobSpecification spec, int requiredCardinality)
-            throws AsterixException {
-        Map<ConnectorDescriptorId, IConnectorDescriptor> connectors = spec.getConnectorMap();
-        Map<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>> connectorOpMap = spec
-                .getConnectorOperatorMap();
-
-        IOperatorDescriptor sourceOp = null;
-        IOperatorDescriptor targetOp = null;
-        IConnectorDescriptor connDesc = null;
-        for (Entry<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>> entry : connectorOpMap
-                .entrySet()) {
-            ConnectorDescriptorId cid = entry.getKey();
-            sourceOp = entry.getValue().getKey().getKey();
-            if (sourceOp instanceof FeedCollectOperatorDescriptor) {
-                targetOp = entry.getValue().getValue().getKey();
-                if (targetOp instanceof FeedMetaOperatorDescriptor
-                        && (((FeedMetaOperatorDescriptor) targetOp).getRuntimeType().equals(FeedRuntimeType.COMPUTE))) {
-                    connDesc = connectors.get(cid);
-                    break;
-                } else {
-                    throw new AsterixException("Incorrect manipulation, feed does not have a compute stage");
-                }
-            }
-        }
-
-        Map<OperatorDescriptorId, List<IConnectorDescriptor>> operatorInputMap = spec.getOperatorInputMap();
-        boolean removed = operatorInputMap.get(targetOp.getOperatorId()).remove(connDesc);
-        if (!removed) {
-            throw new AsterixException("Connector desc not found");
-        }
-        Map<OperatorDescriptorId, List<IConnectorDescriptor>> operatorOutputMap = spec.getOperatorOutputMap();
-        removed = operatorOutputMap.get(sourceOp.getOperatorId()).remove(connDesc);
-        if (!removed) {
-            throw new AsterixException("Connector desc not found");
-        }
-        spec.getConnectorMap().remove(connDesc.getConnectorId());
-        connectorOpMap.remove(connDesc.getConnectorId());
-
-        ITuplePartitionComputerFactory tpcf = new RandomPartitionComputerFactory(requiredCardinality);
-        MToNPartitioningConnectorDescriptor newConnector = new MToNPartitioningConnectorDescriptor(spec, tpcf);
-        spec.getConnectorMap().put(newConnector.getConnectorId(), newConnector);
-        spec.connect(newConnector, sourceOp, 0, targetOp, 0);
-
-        // ==============================================================================
-        Set<Constraint> userConstraints = spec.getUserConstraints();
-        Constraint countConstraint = null;
-        Constraint locationConstraint = null;
-        List<LocationConstraint> locations = new ArrayList<LocationConstraint>();
-        IOperatorDescriptor changingOpDesc = null;
-
-        for (Constraint constraint : userConstraints) {
-            LValueConstraintExpression lexpr = constraint.getLValue();
-            ConstraintExpression cexpr = constraint.getRValue();
-            OperatorDescriptorId opId;
-            switch (lexpr.getTag()) {
-                case PARTITION_COUNT: {
-                    opId = ((PartitionCountExpression) lexpr).getOperatorDescriptorId();
-                    IOperatorDescriptor opDesc = spec.getOperatorMap().get(opId);
-                    if (opDesc instanceof FeedMetaOperatorDescriptor) {
-                        FeedRuntimeType runtimeType = ((FeedMetaOperatorDescriptor) opDesc).getRuntimeType();
-                        if (runtimeType.equals(FeedRuntimeType.COMPUTE)) {
-                            countConstraint = constraint;
-                            changingOpDesc = opDesc;
-                        }
-                    }
-                    break;
-                }
-                case PARTITION_LOCATION:
-                    opId = ((PartitionLocationExpression) lexpr).getOperatorDescriptorId();
-                    IOperatorDescriptor opDesc = spec.getOperatorMap().get(opId);
-                    if (opDesc instanceof FeedMetaOperatorDescriptor) {
-                        FeedRuntimeType runtimeType = ((FeedMetaOperatorDescriptor) opDesc).getRuntimeType();
-                        if (runtimeType.equals(FeedRuntimeType.COMPUTE)) {
-                            locationConstraint = constraint;
-                            changingOpDesc = opDesc;
-                            String location = (String) ((ConstantExpression) cexpr).getValue();
-                            LocationConstraint lc = new LocationConstraint();
-                            lc.location = location;
-                            lc.partition = ((PartitionLocationExpression) lexpr).getPartition();
-                            locations.add(lc);
-                        }
-                    }
-                    break;
-                default:
-                    break;
-            }
-        }
-
-        userConstraints.remove(countConstraint);
-        if (locationConstraint != null) {
-            userConstraints.remove(locationConstraint);
-        }
-
-        return changingOpDesc;
-    }
-
-    private static String[] nChooseK(int k, List<String> locations) {
-        String[] result = new String[k];
-        for (int i = 0; i < k; i++) {
-            result[i] = locations.get(i);
-        }
-        return result;
-    }
-
-    private static boolean preProcessingRequired(FeedConnectionId connectionId) {
-        MetadataTransactionContext ctx = null;
-        Feed feed = null;
-        boolean preProcessingRequired = false;
-        try {
-            MetadataManager.INSTANCE.acquireReadLatch();
-            ctx = MetadataManager.INSTANCE.beginTransaction();
-            feed = MetadataManager.INSTANCE.getFeed(ctx, connectionId.getFeedId().getDataverse(),
-                    connectionId.getFeedId().getFeedName());
-            preProcessingRequired = feed.getAppliedFunction() != null;
-            MetadataManager.INSTANCE.commitTransaction(ctx);
-        } catch (Exception e) {
-            if (ctx != null) {
-                try {
-                    MetadataManager.INSTANCE.abortTransaction(ctx);
-                } catch (Exception abortException) {
-                    e.addSuppressed(abortException);
-                    throw new IllegalStateException(e);
-                }
-            }
-        } finally {
-            MetadataManager.INSTANCE.releaseReadLatch();
-        }
-        return preProcessingRequired;
-    }
-
-    public static Triple<IAdapterFactory, ARecordType, AdapterType> getPrimaryFeedFactoryAndOutput(PrimaryFeed feed,
-            FeedPolicyAccessor policyAccessor, MetadataTransactionContext mdTxnCtx) throws AlgebricksException {
-
-        String adapterName = null;
-        DatasourceAdapter adapterEntity = null;
-        String adapterFactoryClassname = null;
-        IAdapterFactory adapterFactory = null;
-        ARecordType adapterOutputType = null;
-        Triple<IAdapterFactory, ARecordType, AdapterType> feedProps = null;
-        AdapterType adapterType = null;
-        try {
-            adapterName = feed.getAdaptorName();
-            Map<String, String> configuration = feed.getAdaptorConfiguration();
-            configuration.putAll(policyAccessor.getFeedPolicy());
-            adapterOutputType = getOutputType(feed, configuration);
-            adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, MetadataConstants.METADATA_DATAVERSE_NAME,
-                    adapterName);
-            if (adapterEntity == null) {
-                adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, feed.getDataverseName(), adapterName);
-            }
-            if (adapterEntity != null) {
-                adapterType = adapterEntity.getType();
-                adapterFactoryClassname = adapterEntity.getClassname();
-                switch (adapterType) {
-                    case INTERNAL:
-                        adapterFactory = (IAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
-                        break;
-                    case EXTERNAL:
-                        String[] anameComponents = adapterName.split("#");
-                        String libraryName = anameComponents[0];
-                        ClassLoader cl = ExternalLibraryManager.getLibraryClassLoader(feed.getDataverseName(),
-                                libraryName);
-                        adapterFactory = (IAdapterFactory) cl.loadClass(adapterFactoryClassname).newInstance();
-                        break;
-                }
-                adapterFactory.configure(configuration, adapterOutputType);
-            } else {
-                configuration.put(ExternalDataConstants.KEY_DATAVERSE, feed.getDataverseName());
-                adapterFactory = AdapterFactoryProvider.getAdapterFactory(adapterName, configuration,
-                        adapterOutputType);
-                adapterType = AdapterType.INTERNAL;
-            }
-            feedProps = new Triple<IAdapterFactory, ARecordType, AdapterType>(adapterFactory, adapterOutputType,
-                    adapterType);
-        } catch (Exception e) {
-            e.printStackTrace();
-            throw new AlgebricksException("unable to create adapter " + e);
-        }
-        return feedProps;
-    }
-
-    private static ARecordType getOutputType(PrimaryFeed feed, Map<String, String> configuration) throws Exception {
-        ARecordType outputType = null;
-        String fqOutputType = configuration.get(ExternalDataConstants.KEY_TYPE_NAME);
-
-        if (fqOutputType == null) {
-            throw new IllegalArgumentException("No output type specified");
-        }
-        String[] dataverseAndType = fqOutputType.split("[.]");
-        String dataverseName;
-        String datatypeName;
-
-        if (dataverseAndType.length == 1) {
-            datatypeName = dataverseAndType[0];
-            dataverseName = feed.getDataverseName();
-        } else if (dataverseAndType.length == 2) {
-            dataverseName = dataverseAndType[0];
-            datatypeName = dataverseAndType[1];
-        } else
-            throw new IllegalArgumentException(
-                    "Invalid value for the parameter " + ExternalDataConstants.KEY_TYPE_NAME);
-
-        MetadataTransactionContext ctx = null;
-        MetadataManager.INSTANCE.acquireReadLatch();
-        try {
-            ctx = MetadataManager.INSTANCE.beginTransaction();
-            Datatype t = MetadataManager.INSTANCE.getDatatype(ctx, dataverseName, datatypeName);
-            IAType type = t.getDatatype();
-            if (type.getTypeTag() != ATypeTag.RECORD) {
-                throw new IllegalStateException();
-            }
-            outputType = (ARecordType) t.getDatatype();
-            MetadataManager.INSTANCE.commitTransaction(ctx);
-        } catch (Exception e) {
-            if (ctx != null) {
-                MetadataManager.INSTANCE.abortTransaction(ctx);
-            }
-            throw e;
-        } finally {
-            MetadataManager.INSTANCE.releaseReadLatch();
-        }
-        return outputType;
-    }
-
-    public static String getSecondaryFeedOutput(SecondaryFeed feed, FeedPolicyAccessor policyAccessor,
-            MetadataTransactionContext mdTxnCtx) throws AlgebricksException, MetadataException {
-        String outputType = null;
-        String primaryFeedName = feed.getSourceFeedName();
-        Feed primaryFeed = MetadataManager.INSTANCE.getFeed(mdTxnCtx, feed.getDataverseName(), primaryFeedName);
-        FunctionSignature appliedFunction = primaryFeed.getAppliedFunction();
-        if (appliedFunction == null) {
-            Triple<IAdapterFactory, ARecordType, AdapterType> result = getPrimaryFeedFactoryAndOutput(
-                    (PrimaryFeed) primaryFeed, policyAccessor, mdTxnCtx);
-            outputType = result.second.getTypeName();
-        } else {
-            Function function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, appliedFunction);
-            if (function != null) {
-                if (function.getLanguage().equals(Function.LANGUAGE_AQL)) {
-                    throw new NotImplementedException(
-                            "Secondary feeds derived from a source feed that has an applied AQL function are not supported yet.");
-                } else {
-                    outputType = function.getReturnType();
-                }
-            } else {
-                throw new IllegalArgumentException(
-                        "Function " + appliedFunction + " associated with source feed not found in Metadata.");
-            }
-        }
-        return outputType;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedWorkManager.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedWorkManager.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedWorkManager.java
deleted file mode 100644
index be12ff0..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedWorkManager.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-
-import org.apache.asterix.common.feeds.api.IFeedWork;
-import org.apache.asterix.common.feeds.api.IFeedWorkEventListener;
-import org.apache.asterix.common.feeds.api.IFeedWorkManager;
-
-/**
- * Handles asynchronous execution of feed management related tasks.
- */
-public class FeedWorkManager implements IFeedWorkManager {
-
-    public static final FeedWorkManager INSTANCE = new FeedWorkManager();
-
-    private final ExecutorService executorService = Executors.newCachedThreadPool();
-
-    private FeedWorkManager() {
-    }
-
-    public void submitWork(IFeedWork work, IFeedWorkEventListener listener) {
-        Runnable runnable = work.getRunnable();
-        try {
-            executorService.execute(runnable);
-            listener.workCompleted(work);
-        } catch (Exception e) {
-            listener.workFailed(work, e);
-        }
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/IAdapterExecutor.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/IAdapterExecutor.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/IAdapterExecutor.java
deleted file mode 100644
index ff641af..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/IAdapterExecutor.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import org.apache.asterix.common.feeds.FeedConnectionId;
-
-public interface IAdapterExecutor {
-
-    /**
-     * @throws Exception
-     */
-    public void start() throws Exception;
-
-    /**
-     * @throws Exception
-     */
-    public void stop() throws Exception;
-
-    /**
-     * @return
-     */
-    public FeedConnectionId getFeedId();
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/IFeedMessage.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/IFeedMessage.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/IFeedMessage.java
deleted file mode 100644
index 9180671..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/IFeedMessage.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.io.Serializable;
-
-public interface IFeedMessage extends Serializable {
-
-    public enum MessageType {
-        END,
-        SUPER_FEED_MANAGER_ELECT
-    }
-
-    public MessageType getMessageType();
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/ITypedAdapterFactory.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/ITypedAdapterFactory.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/ITypedAdapterFactory.java
deleted file mode 100644
index f35c21f..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/ITypedAdapterFactory.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.util.Map;
-
-import org.apache.asterix.external.api.IAdapterFactory;
-import org.apache.asterix.om.types.ARecordType;
-
-public interface ITypedAdapterFactory extends IAdapterFactory {
-
-    public ARecordType getAdapterOutputType();
-
-    public void configure(Map<String, String> configuration) throws Exception;
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/MessageListener.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/MessageListener.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/MessageListener.java
deleted file mode 100644
index 650cb92..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/MessageListener.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.ServerSocket;
-import java.net.Socket;
-import java.nio.CharBuffer;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-public class MessageListener {
-
-    private static final Logger LOGGER = Logger.getLogger(MessageListener.class.getName());
-
-    private final int port;
-    private final LinkedBlockingQueue<String> outbox;
-
-    private ExecutorService executorService = Executors.newFixedThreadPool(10);
-
-    private MessageListenerServer listenerServer;
-
-    public MessageListener(int port, LinkedBlockingQueue<String> outbox) {
-        this.port = port;
-        this.outbox = outbox;
-    }
-
-    public void stop() {
-        listenerServer.stop();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Stopped message service at " + port);
-        }
-        if (!executorService.isShutdown()) {
-            executorService.shutdownNow();
-        }
-
-    }
-
-    public void start() throws IOException {
-        listenerServer = new MessageListenerServer(port, outbox);
-        executorService.execute(listenerServer);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Starting message service at " + port);
-        }
-    }
-
-    private static class MessageListenerServer implements Runnable {
-
-        private final int port;
-        private final LinkedBlockingQueue<String> outbox;
-        private ServerSocket server;
-
-        public MessageListenerServer(int port, LinkedBlockingQueue<String> outbox) {
-            this.port = port;
-            this.outbox = outbox;
-        }
-
-        public void stop() {
-            try {
-                server.close();
-            } catch (IOException e) {
-                e.printStackTrace();
-            }
-        }
-
-        @Override
-        public void run() {
-            char EOL = (char) "\n".getBytes()[0];
-            Socket client = null;
-            try {
-                server = new ServerSocket(port);
-                client = server.accept();
-                InputStream in = client.getInputStream();
-                CharBuffer buffer = CharBuffer.allocate(5000);
-                char ch;
-                while (true) {
-                    ch = (char) in.read();
-                    if (((int) ch) == -1) {
-                        break;
-                    }
-                    while (ch != EOL) {
-                        buffer.put(ch);
-                        ch = (char) in.read();
-                    }
-                    buffer.flip();
-                    String s = new String(buffer.array());
-                    synchronized (outbox) {
-                        outbox.add(s + "\n");
-                    }
-                    buffer.position(0);
-                    buffer.limit(5000);
-                }
-
-            } catch (Exception e) {
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("Unable to start Message listener" + server);
-                }
-            } finally {
-                if (server != null) {
-                    try {
-                        server.close();
-                    } catch (Exception e) {
-                        e.printStackTrace();
-                    }
-                }
-            }
-
-        }
-
-    }
-
-    public static interface IMessageAnalyzer {
-
-        public LinkedBlockingQueue<String> getMessageQueue();
-
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/PrepareStallMessage.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/PrepareStallMessage.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/PrepareStallMessage.java
deleted file mode 100644
index 3ca1147..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/PrepareStallMessage.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedConstants;
-import org.apache.asterix.common.feeds.message.FeedMessage;
-
-/**
- * A feed control message indicating the need to end the feed. This message is dispatched
- * to all locations that host an operator involved in the feed pipeline.
- */
-public class PrepareStallMessage extends FeedMessage {
-
-    private static final long serialVersionUID = 1L;
-
-    private final FeedConnectionId connectionId;
-
-    private final int computePartitionsRetainLimit;
-
-    public PrepareStallMessage(FeedConnectionId connectionId, int computePartitionsRetainLimit) {
-        super(MessageType.PREPARE_STALL);
-        this.connectionId = connectionId;
-        this.computePartitionsRetainLimit = computePartitionsRetainLimit;
-    }
-
-    @Override
-    public String toString() {
-        return MessageType.PREPARE_STALL.name() + "  " + connectionId;
-    }
-
-    @Override
-    public JSONObject toJSON() throws JSONException {
-        JSONObject obj = new JSONObject();
-        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
-        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
-        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
-        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
-        obj.put(FeedConstants.MessageConstants.COMPUTE_PARTITION_RETAIN_LIMIT, computePartitionsRetainLimit);
-        return obj;
-    }
-
-    public FeedConnectionId getConnectionId() {
-        return connectionId;
-    }
-
-    public int getComputePartitionsRetainLimit() {
-        return computePartitionsRetainLimit;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/RemoteSocketMessageListener.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/RemoteSocketMessageListener.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/RemoteSocketMessageListener.java
deleted file mode 100644
index 5c5c068..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/RemoteSocketMessageListener.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.Socket;
-import java.nio.CharBuffer;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-public class RemoteSocketMessageListener {
-
-    private static final Logger LOGGER = Logger.getLogger(RemoteSocketMessageListener.class.getName());
-
-    private final String host;
-    private final int port;
-    private final LinkedBlockingQueue<String> outbox;
-    private final ExecutorService executorService = Executors.newFixedThreadPool(10);
-
-    private RemoteMessageListenerServer listenerServer;
-
-    public RemoteSocketMessageListener(String host, int port, LinkedBlockingQueue<String> outbox) {
-        this.host = host;
-        this.port = port;
-        this.outbox = outbox;
-    }
-
-    public void stop() {
-        if (!executorService.isShutdown()) {
-            executorService.shutdownNow();
-        }
-        listenerServer.stop();
-
-    }
-
-    public void start() throws IOException {
-        listenerServer = new RemoteMessageListenerServer(host, port, outbox);
-        executorService.execute(listenerServer);
-    }
-
-    private static class RemoteMessageListenerServer implements Runnable {
-
-        private final String host;
-        private final int port;
-        private final LinkedBlockingQueue<String> outbox;
-        private Socket client;
-
-        public RemoteMessageListenerServer(String host, int port, LinkedBlockingQueue<String> outbox) {
-            this.host = host;
-            this.port = port;
-            this.outbox = outbox;
-        }
-
-        public void stop() {
-            try {
-                client.close();
-            } catch (IOException e) {
-                e.printStackTrace();
-            }
-        }
-
-        @Override
-        public void run() {
-            char EOL = (char) "\n".getBytes()[0];
-            Socket client = null;
-            try {
-                client = new Socket(host, port);
-                InputStream in = client.getInputStream();
-                CharBuffer buffer = CharBuffer.allocate(5000);
-                char ch;
-                while (true) {
-                    ch = (char) in.read();
-                    if ((ch) == -1) {
-                        break;
-                    }
-                    while (ch != EOL) {
-                        buffer.put(ch);
-                        ch = (char) in.read();
-                    }
-                    buffer.flip();
-                    String s = new String(buffer.array());
-                    synchronized (outbox) {
-                        outbox.add(s + "\n");
-                    }
-                    buffer.position(0);
-                    buffer.limit(5000);
-                }
-
-            } catch (Exception e) {
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("Unable to start Remote Message listener" + client);
-                }
-            } finally {
-                if (client != null && !client.isClosed()) {
-                    try {
-                        client.close();
-                    } catch (Exception e) {
-                        e.printStackTrace();
-                    }
-                }
-            }
-        }
-    }
-
-    public static interface IMessageAnalyzer {
-
-        /**
-         * @return
-         */
-        public LinkedBlockingQueue<String> getMessageQueue();
-
-    }
-
-}


[25/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feed/FeedJobNotificationHandler.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feed/FeedJobNotificationHandler.java b/asterix-app/src/main/java/org/apache/asterix/feed/FeedJobNotificationHandler.java
new file mode 100644
index 0000000..b42ef1e
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feed/FeedJobNotificationHandler.java
@@ -0,0 +1,742 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.feed;
+
+import java.rmi.RemoteException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.asterix.common.exceptions.ACIDException;
+import org.apache.asterix.external.feed.api.IFeedJoint;
+import org.apache.asterix.external.feed.api.IFeedLifecycleEventSubscriber;
+import org.apache.asterix.external.feed.api.IIntakeProgressTracker;
+import org.apache.asterix.external.feed.api.IFeedJoint.State;
+import org.apache.asterix.external.feed.api.IFeedLifecycleEventSubscriber.FeedLifecycleEvent;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedConnectionRequest;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.management.FeedJointKey;
+import org.apache.asterix.external.feed.management.FeedWorkManager;
+import org.apache.asterix.external.feed.message.StorageReportFeedMessage;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.external.feed.watch.FeedActivity;
+import org.apache.asterix.external.feed.watch.FeedConnectJobInfo;
+import org.apache.asterix.external.feed.watch.FeedIntakeInfo;
+import org.apache.asterix.external.feed.watch.FeedJobInfo;
+import org.apache.asterix.external.feed.watch.FeedJobInfo.FeedJobState;
+import org.apache.asterix.external.feed.watch.FeedJobInfo.JobType;
+import org.apache.asterix.external.operators.FeedCollectOperatorDescriptor;
+import org.apache.asterix.external.operators.FeedIntakeOperatorDescriptor;
+import org.apache.asterix.external.operators.FeedMetaOperatorDescriptor;
+import org.apache.asterix.feed.FeedLifecycleListener.Message;
+import org.apache.asterix.feed.FeedWorkCollection.SubscribeFeedWork;
+import org.apache.asterix.metadata.feeds.BuiltinFeedPolicies;
+import org.apache.asterix.om.util.AsterixAppContextInfo;
+import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
+import org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
+import org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory;
+import org.apache.hyracks.api.client.IHyracksClientConnection;
+import org.apache.hyracks.api.dataflow.IConnectorDescriptor;
+import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
+import org.apache.hyracks.api.dataflow.OperatorDescriptorId;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.job.JobId;
+import org.apache.hyracks.api.job.JobInfo;
+import org.apache.hyracks.api.job.JobSpecification;
+import org.apache.hyracks.api.job.JobStatus;
+import org.apache.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexInsertUpdateDeleteOperatorDescriptor;
+
+public class FeedJobNotificationHandler implements Runnable {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedJobNotificationHandler.class.getName());
+
+    private final LinkedBlockingQueue<Message> inbox;
+    private final Map<FeedConnectionId, List<IFeedLifecycleEventSubscriber>> eventSubscribers;
+
+    private final Map<JobId, FeedJobInfo> jobInfos;
+    private final Map<FeedId, FeedIntakeInfo> intakeJobInfos;
+    private final Map<FeedConnectionId, FeedConnectJobInfo> connectJobInfos;
+    private final Map<FeedId, List<IFeedJoint>> feedPipeline;
+    private final Map<FeedConnectionId, Pair<IIntakeProgressTracker, Long>> feedIntakeProgressTrackers;
+
+    public FeedJobNotificationHandler(LinkedBlockingQueue<Message> inbox) {
+        this.inbox = inbox;
+        this.jobInfos = new HashMap<JobId, FeedJobInfo>();
+        this.intakeJobInfos = new HashMap<FeedId, FeedIntakeInfo>();
+        this.connectJobInfos = new HashMap<FeedConnectionId, FeedConnectJobInfo>();
+        this.feedPipeline = new HashMap<FeedId, List<IFeedJoint>>();
+        this.eventSubscribers = new HashMap<FeedConnectionId, List<IFeedLifecycleEventSubscriber>>();
+        this.feedIntakeProgressTrackers = new HashMap<FeedConnectionId, Pair<IIntakeProgressTracker, Long>>();
+    }
+
+    @Override
+    public void run() {
+        Message mesg;
+        while (true) {
+            try {
+                mesg = inbox.take();
+                switch (mesg.messageKind) {
+                    case JOB_START:
+                        handleJobStartMessage(mesg);
+                        break;
+                    case JOB_FINISH:
+                        handleJobFinishMessage(mesg);
+                        break;
+                }
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+
+        }
+    }
+
+    public void registerFeedIntakeProgressTracker(FeedConnectionId connectionId,
+            IIntakeProgressTracker feedIntakeProgressTracker) {
+        if (feedIntakeProgressTrackers.get(connectionId) == null) {
+            this.feedIntakeProgressTrackers.put(connectionId, new Pair<IIntakeProgressTracker, Long>(
+                    feedIntakeProgressTracker, 0L));
+        } else {
+            throw new IllegalStateException(" Progress tracker for connection " + connectionId
+                    + " is alreader registered");
+        }
+    }
+
+    public void deregisterFeedIntakeProgressTracker(FeedConnectionId connectionId) {
+        this.feedIntakeProgressTrackers.remove(connectionId);
+    }
+
+    public void updateTrackingInformation(StorageReportFeedMessage srm) {
+        Pair<IIntakeProgressTracker, Long> p = feedIntakeProgressTrackers.get(srm.getConnectionId());
+        if (p != null && p.second < srm.getLastPersistedTupleIntakeTimestamp()) {
+            p.second = srm.getLastPersistedTupleIntakeTimestamp();
+            p.first.notifyIngestedTupleTimestamp(p.second);
+        }
+    }
+
+    public Collection<FeedIntakeInfo> getFeedIntakeInfos() {
+        return intakeJobInfos.values();
+    }
+
+    public Collection<FeedConnectJobInfo> getFeedConnectInfos() {
+        return connectJobInfos.values();
+    }
+
+    public void registerFeedJoint(IFeedJoint feedJoint) {
+        List<IFeedJoint> feedJointsOnPipeline = feedPipeline.get(feedJoint.getOwnerFeedId());
+        if (feedJointsOnPipeline == null) {
+            feedJointsOnPipeline = new ArrayList<IFeedJoint>();
+            feedPipeline.put(feedJoint.getOwnerFeedId(), feedJointsOnPipeline);
+            feedJointsOnPipeline.add(feedJoint);
+        } else {
+            if (!feedJointsOnPipeline.contains(feedJoint)) {
+                feedJointsOnPipeline.add(feedJoint);
+            } else {
+                throw new IllegalArgumentException("Feed joint " + feedJoint + " already registered");
+            }
+        }
+    }
+
+    public void registerFeedIntakeJob(FeedId feedId, JobId jobId, JobSpecification jobSpec) throws HyracksDataException {
+        if (jobInfos.get(jobId) != null) {
+            throw new IllegalStateException("Feed job already registered");
+        }
+
+        List<IFeedJoint> joints = feedPipeline.get(feedId);
+        IFeedJoint intakeJoint = null;
+        for (IFeedJoint joint : joints) {
+            if (joint.getType().equals(IFeedJoint.FeedJointType.INTAKE)) {
+                intakeJoint = joint;
+                break;
+            }
+        }
+
+        if (intakeJoint != null) {
+            FeedIntakeInfo intakeJobInfo = new FeedIntakeInfo(jobId, FeedJobState.CREATED, FeedJobInfo.JobType.INTAKE,
+                    feedId, intakeJoint, jobSpec);
+            intakeJobInfos.put(feedId, intakeJobInfo);
+            jobInfos.put(jobId, intakeJobInfo);
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Registered feed intake [" + jobId + "]" + " for feed " + feedId);
+            }
+        } else {
+            throw new HyracksDataException("Could not register feed intake job [" + jobId + "]" + " for feed  "
+                    + feedId);
+        }
+    }
+
+    public void registerFeedCollectionJob(FeedId sourceFeedId, FeedConnectionId connectionId, JobId jobId,
+            JobSpecification jobSpec, Map<String, String> feedPolicy) {
+        if (jobInfos.get(jobId) != null) {
+            throw new IllegalStateException("Feed job already registered");
+        }
+
+        List<IFeedJoint> feedJoints = feedPipeline.get(sourceFeedId);
+        FeedConnectionId cid = null;
+        IFeedJoint sourceFeedJoint = null;
+        for (IFeedJoint joint : feedJoints) {
+            cid = joint.getReceiver(connectionId);
+            if (cid != null) {
+                sourceFeedJoint = joint;
+                break;
+            }
+        }
+
+        if (cid != null) {
+            FeedConnectJobInfo cInfo = new FeedConnectJobInfo(jobId, FeedJobState.CREATED, connectionId,
+                    sourceFeedJoint, null, jobSpec, feedPolicy);
+            jobInfos.put(jobId, cInfo);
+            connectJobInfos.put(connectionId, cInfo);
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Registered feed connection [" + jobId + "]" + " for feed " + connectionId);
+            }
+        } else {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Could not register feed collection job [" + jobId + "]" + " for feed connection "
+                        + connectionId);
+            }
+        }
+
+    }
+
+    public void deregisterFeedIntakeJob(JobId jobId) {
+        if (jobInfos.get(jobId) == null) {
+            throw new IllegalStateException(" Feed Intake job not registered ");
+        }
+
+        FeedIntakeInfo info = (FeedIntakeInfo) jobInfos.get(jobId);
+        jobInfos.remove(jobId);
+        intakeJobInfos.remove(info.getFeedId());
+
+        if (!info.getState().equals(FeedJobState.UNDER_RECOVERY)) {
+            List<IFeedJoint> joints = feedPipeline.get(info.getFeedId());
+            joints.remove(info.getIntakeFeedJoint());
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Deregistered feed intake job [" + jobId + "]");
+            }
+        } else {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Not removing feed joint as intake job is in " + FeedJobState.UNDER_RECOVERY + " state.");
+            }
+        }
+
+    }
+
+    private void handleJobStartMessage(Message message) throws Exception {
+        FeedJobInfo jobInfo = jobInfos.get(message.jobId);
+        switch (jobInfo.getJobType()) {
+            case INTAKE:
+                handleIntakeJobStartMessage((FeedIntakeInfo) jobInfo);
+                break;
+            case FEED_CONNECT:
+                handleCollectJobStartMessage((FeedConnectJobInfo) jobInfo);
+                break;
+        }
+
+    }
+
+    private void handleJobFinishMessage(Message message) throws Exception {
+        FeedJobInfo jobInfo = jobInfos.get(message.jobId);
+        switch (jobInfo.getJobType()) {
+            case INTAKE:
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Intake Job finished for feed intake " + jobInfo.getJobId());
+                }
+                handleFeedIntakeJobFinishMessage((FeedIntakeInfo) jobInfo, message);
+                break;
+            case FEED_CONNECT:
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Collect Job finished for  " + (FeedConnectJobInfo) jobInfo);
+                }
+                handleFeedCollectJobFinishMessage((FeedConnectJobInfo) jobInfo);
+                break;
+        }
+
+    }
+
+    private synchronized void handleIntakeJobStartMessage(FeedIntakeInfo intakeJobInfo) throws Exception {
+        List<OperatorDescriptorId> intakeOperatorIds = new ArrayList<OperatorDescriptorId>();
+        Map<OperatorDescriptorId, IOperatorDescriptor> operators = intakeJobInfo.getSpec().getOperatorMap();
+        for (Entry<OperatorDescriptorId, IOperatorDescriptor> entry : operators.entrySet()) {
+            IOperatorDescriptor opDesc = entry.getValue();
+            if (opDesc instanceof FeedIntakeOperatorDescriptor) {
+                intakeOperatorIds.add(opDesc.getOperatorId());
+            }
+        }
+
+        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+        JobInfo info = hcc.getJobInfo(intakeJobInfo.getJobId());
+        List<String> intakeLocations = new ArrayList<String>();
+        for (OperatorDescriptorId intakeOperatorId : intakeOperatorIds) {
+            Map<Integer, String> operatorLocations = info.getOperatorLocations().get(intakeOperatorId);
+            int nOperatorInstances = operatorLocations.size();
+            for (int i = 0; i < nOperatorInstances; i++) {
+                intakeLocations.add(operatorLocations.get(i));
+            }
+        }
+        // intakeLocations is an ordered list; element at position i corresponds to location of i'th instance of operator
+        intakeJobInfo.setIntakeLocation(intakeLocations);
+        intakeJobInfo.getIntakeFeedJoint().setState(State.ACTIVE);
+        intakeJobInfo.setState(FeedJobState.ACTIVE);
+
+        // notify event listeners 
+        notifyFeedEventSubscribers(intakeJobInfo, FeedLifecycleEvent.FEED_INTAKE_STARTED);
+    }
+
+    private void handleCollectJobStartMessage(FeedConnectJobInfo cInfo) throws RemoteException, ACIDException {
+        // set locations of feed sub-operations (intake, compute, store)
+        setLocations(cInfo);
+
+        // activate joints
+        List<IFeedJoint> joints = feedPipeline.get(cInfo.getConnectionId().getFeedId());
+        for (IFeedJoint joint : joints) {
+            if (joint.getProvider().equals(cInfo.getConnectionId())) {
+                joint.setState(State.ACTIVE);
+                if (joint.getType().equals(IFeedJoint.FeedJointType.COMPUTE)) {
+                    cInfo.setComputeFeedJoint(joint);
+                }
+            }
+        }
+        cInfo.setState(FeedJobState.ACTIVE);
+
+        // register activity in metadata
+        registerFeedActivity(cInfo);
+        // notify event listeners
+        notifyFeedEventSubscribers(cInfo, FeedLifecycleEvent.FEED_COLLECT_STARTED);
+    }
+
+    private void notifyFeedEventSubscribers(FeedJobInfo jobInfo, FeedLifecycleEvent event) {
+        JobType jobType = jobInfo.getJobType();
+        List<FeedConnectionId> impactedConnections = new ArrayList<FeedConnectionId>();
+        if (jobType.equals(JobType.INTAKE)) {
+            FeedId feedId = ((FeedIntakeInfo) jobInfo).getFeedId();
+            for (FeedConnectionId connId : eventSubscribers.keySet()) {
+                if (connId.getFeedId().equals(feedId)) {
+                    impactedConnections.add(connId);
+                }
+            }
+        } else {
+            impactedConnections.add(((FeedConnectJobInfo) jobInfo).getConnectionId());
+        }
+
+        for (FeedConnectionId connId : impactedConnections) {
+            List<IFeedLifecycleEventSubscriber> subscribers = eventSubscribers.get(connId);
+            if (subscribers != null && !subscribers.isEmpty()) {
+                for (IFeedLifecycleEventSubscriber subscriber : subscribers) {
+                    subscriber.handleFeedEvent(event);
+                }
+            }
+        }
+    }
+
+    public synchronized void submitFeedConnectionRequest(IFeedJoint feedJoint, final FeedConnectionRequest request)
+            throws Exception {
+        List<String> locations = null;
+        switch (feedJoint.getType()) {
+            case INTAKE:
+                FeedIntakeInfo intakeInfo = intakeJobInfos.get(feedJoint.getOwnerFeedId());
+                locations = intakeInfo.getIntakeLocation();
+                break;
+            case COMPUTE:
+                FeedConnectionId connectionId = feedJoint.getProvider();
+                FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
+                locations = cInfo.getComputeLocations();
+                break;
+        }
+
+        SubscribeFeedWork work = new SubscribeFeedWork(locations.toArray(new String[] {}), request);
+        FeedWorkManager.INSTANCE.submitWork(work, new SubscribeFeedWork.FeedSubscribeWorkEventListener());
+    }
+
+    public IFeedJoint getSourceFeedJoint(FeedConnectionId connectionId) {
+        FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
+        if (cInfo != null) {
+            return cInfo.getSourceFeedJoint();
+        }
+        return null;
+    }
+
+    public Set<FeedConnectionId> getActiveFeedConnections() {
+        Set<FeedConnectionId> activeConnections = new HashSet<FeedConnectionId>();
+        for (FeedConnectJobInfo cInfo : connectJobInfos.values()) {
+            if (cInfo.getState().equals(FeedJobState.ACTIVE)) {
+                activeConnections.add(cInfo.getConnectionId());
+            }
+        }
+        return activeConnections;
+    }
+
+    public boolean isFeedConnectionActive(FeedConnectionId connectionId) {
+        FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
+        if (cInfo != null) {
+            return cInfo.getState().equals(FeedJobState.ACTIVE);
+        }
+        return false;
+    }
+
+    public void setJobState(FeedConnectionId connectionId, FeedJobState jobState) {
+        FeedConnectJobInfo connectJobInfo = connectJobInfos.get(connectionId);
+        connectJobInfo.setState(jobState);
+    }
+
+    public FeedJobState getFeedJobState(FeedConnectionId connectionId) {
+        return connectJobInfos.get(connectionId).getState();
+    }
+
+    private void handleFeedIntakeJobFinishMessage(FeedIntakeInfo intakeInfo, Message message) throws Exception {
+        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+        JobInfo info = hcc.getJobInfo(message.jobId);
+        JobStatus status = info.getStatus();
+        FeedLifecycleEvent event;
+        event = status.equals(JobStatus.FAILURE) ? FeedLifecycleEvent.FEED_INTAKE_FAILURE
+                : FeedLifecycleEvent.FEED_ENDED;
+
+        // remove feed joints
+        deregisterFeedIntakeJob(message.jobId);
+
+        // notify event listeners 
+        notifyFeedEventSubscribers(intakeInfo, event);
+
+    }
+
+    private void handleFeedCollectJobFinishMessage(FeedConnectJobInfo cInfo) throws Exception {
+        FeedConnectionId connectionId = cInfo.getConnectionId();
+
+        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+        JobInfo info = hcc.getJobInfo(cInfo.getJobId());
+        JobStatus status = info.getStatus();
+        boolean failure = status != null && status.equals(JobStatus.FAILURE);
+        FeedPolicyAccessor fpa = new FeedPolicyAccessor(cInfo.getFeedPolicy());
+
+        boolean removeJobHistory = !failure;
+        boolean retainSubsription = cInfo.getState().equals(FeedJobState.UNDER_RECOVERY)
+                || (failure && fpa.continueOnHardwareFailure());
+
+        if (!retainSubsription) {
+            IFeedJoint feedJoint = cInfo.getSourceFeedJoint();
+            feedJoint.removeReceiver(connectionId);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Subscription " + cInfo.getConnectionId() + " completed successfully. Removed subscription");
+            }
+            removeFeedJointsPostPipelineTermination(cInfo.getConnectionId());
+        }
+
+        if (removeJobHistory) {
+            connectJobInfos.remove(connectionId);
+            jobInfos.remove(cInfo.getJobId());
+            feedIntakeProgressTrackers.remove(cInfo.getConnectionId());
+        }
+        deregisterFeedActivity(cInfo);
+
+        // notify event listeners 
+        FeedLifecycleEvent event = failure ? FeedLifecycleEvent.FEED_COLLECT_FAILURE : FeedLifecycleEvent.FEED_ENDED;
+        notifyFeedEventSubscribers(cInfo, event);
+    }
+
+    private void registerFeedActivity(FeedConnectJobInfo cInfo) {
+        Map<String, String> feedActivityDetails = new HashMap<String, String>();
+
+        if (cInfo.getCollectLocations() != null) {
+            feedActivityDetails.put(FeedActivity.FeedActivityDetails.INTAKE_LOCATIONS,
+                    StringUtils.join(cInfo.getCollectLocations().iterator(), ','));
+        }
+
+        if (cInfo.getComputeLocations() != null) {
+            feedActivityDetails.put(FeedActivity.FeedActivityDetails.COMPUTE_LOCATIONS,
+                    StringUtils.join(cInfo.getComputeLocations().iterator(), ','));
+        }
+
+        if (cInfo.getStorageLocations() != null) {
+            feedActivityDetails.put(FeedActivity.FeedActivityDetails.STORAGE_LOCATIONS,
+                    StringUtils.join(cInfo.getStorageLocations().iterator(), ','));
+        }
+
+        String policyName = cInfo.getFeedPolicy().get(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY);
+        feedActivityDetails.put(FeedActivity.FeedActivityDetails.FEED_POLICY_NAME, policyName);
+
+        feedActivityDetails.put(FeedActivity.FeedActivityDetails.FEED_CONNECT_TIMESTAMP, (new Date()).toString());
+        try {
+            FeedActivity feedActivity = new FeedActivity(cInfo.getConnectionId().getFeedId().getDataverse(), cInfo
+                    .getConnectionId().getFeedId().getFeedName(), cInfo.getConnectionId().getDatasetName(),
+                    feedActivityDetails);
+            CentralFeedManager.getInstance().getFeedLoadManager()
+                    .reportFeedActivity(cInfo.getConnectionId(), feedActivity);
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Unable to register feed activity for " + cInfo + " " + e.getMessage());
+            }
+
+        }
+
+    }
+
+    public void deregisterFeedActivity(FeedConnectJobInfo cInfo) {
+        try {
+            CentralFeedManager.getInstance().getFeedLoadManager().removeFeedActivity(cInfo.getConnectionId());
+        } catch (Exception e) {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Unable to deregister feed activity for " + cInfo + " " + e.getMessage());
+            }
+        }
+    }
+
+    public void removeFeedJointsPostPipelineTermination(FeedConnectionId connectionId) {
+        FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
+        List<IFeedJoint> feedJoints = feedPipeline.get(connectionId.getFeedId());
+
+        IFeedJoint sourceJoint = cInfo.getSourceFeedJoint();
+        List<FeedConnectionId> all = sourceJoint.getReceivers();
+        boolean removeSourceJoint = all.size() < 2;
+        if (removeSourceJoint) {
+            feedJoints.remove(sourceJoint);
+        }
+
+        IFeedJoint computeJoint = cInfo.getComputeFeedJoint();
+        if (computeJoint != null && computeJoint.getReceivers().size() < 2) {
+            feedJoints.remove(computeJoint);
+        }
+    }
+
+    public boolean isRegisteredFeedJob(JobId jobId) {
+        return jobInfos.get(jobId) != null;
+    }
+
+    public List<String> getFeedComputeLocations(FeedId feedId) {
+        List<IFeedJoint> feedJoints = feedPipeline.get(feedId);
+        for (IFeedJoint joint : feedJoints) {
+            if (joint.getFeedJointKey().getFeedId().equals(feedId)) {
+                return connectJobInfos.get(joint.getProvider()).getComputeLocations();
+            }
+        }
+        return null;
+    }
+
+    public List<String> getFeedStorageLocations(FeedConnectionId connectionId) {
+        return connectJobInfos.get(connectionId).getStorageLocations();
+    }
+
+    public List<String> getFeedCollectLocations(FeedConnectionId connectionId) {
+        return connectJobInfos.get(connectionId).getCollectLocations();
+    }
+
+    public List<String> getFeedIntakeLocations(FeedId feedId) {
+        return intakeJobInfos.get(feedId).getIntakeLocation();
+    }
+
+    public JobId getFeedCollectJobId(FeedConnectionId connectionId) {
+        return connectJobInfos.get(connectionId).getJobId();
+    }
+
+    public void registerFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
+        List<IFeedLifecycleEventSubscriber> subscribers = eventSubscribers.get(connectionId);
+        if (subscribers == null) {
+            subscribers = new ArrayList<IFeedLifecycleEventSubscriber>();
+            eventSubscribers.put(connectionId, subscribers);
+        }
+        subscribers.add(subscriber);
+    }
+
+    public void deregisterFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
+        List<IFeedLifecycleEventSubscriber> subscribers = eventSubscribers.get(connectionId);
+        if (subscribers != null) {
+            subscribers.remove(subscriber);
+        }
+    }
+
+    //============================
+
+    public boolean isFeedPointAvailable(FeedJointKey feedJointKey) {
+        List<IFeedJoint> joints = feedPipeline.get(feedJointKey.getFeedId());
+        if (joints != null && !joints.isEmpty()) {
+            for (IFeedJoint joint : joints) {
+                if (joint.getFeedJointKey().equals(feedJointKey)) {
+                    return true;
+                }
+            }
+        }
+        return false;
+    }
+
+    public Collection<IFeedJoint> getFeedIntakeJoints() {
+        List<IFeedJoint> intakeFeedPoints = new ArrayList<IFeedJoint>();
+        for (FeedIntakeInfo info : intakeJobInfos.values()) {
+            intakeFeedPoints.add(info.getIntakeFeedJoint());
+        }
+        return intakeFeedPoints;
+    }
+
+    public IFeedJoint getFeedJoint(FeedJointKey feedPointKey) {
+        List<IFeedJoint> joints = feedPipeline.get(feedPointKey.getFeedId());
+        if (joints != null && !joints.isEmpty()) {
+            for (IFeedJoint joint : joints) {
+                if (joint.getFeedJointKey().equals(feedPointKey)) {
+                    return joint;
+                }
+            }
+        }
+        return null;
+    }
+
+    public IFeedJoint getAvailableFeedJoint(FeedJointKey feedJointKey) {
+        IFeedJoint feedJoint = getFeedJoint(feedJointKey);
+        if (feedJoint != null) {
+            return feedJoint;
+        } else {
+            String jointKeyString = feedJointKey.getStringRep();
+            List<IFeedJoint> jointsOnPipeline = feedPipeline.get(feedJointKey.getFeedId());
+            IFeedJoint candidateJoint = null;
+            if (jointsOnPipeline != null) {
+                for (IFeedJoint joint : jointsOnPipeline) {
+                    if (jointKeyString.contains(joint.getFeedJointKey().getStringRep())) {
+                        if (candidateJoint == null) {
+                            candidateJoint = joint;
+                        } else if (joint.getFeedJointKey().getStringRep()
+                                .contains(candidateJoint.getFeedJointKey().getStringRep())) { // found feed point is a super set of the earlier find
+                            candidateJoint = joint;
+                        }
+                    }
+                }
+            }
+            return candidateJoint;
+        }
+    }
+
+    public JobSpecification getCollectJobSpecification(FeedConnectionId connectionId) {
+        return connectJobInfos.get(connectionId).getSpec();
+    }
+
+    public IFeedJoint getFeedPoint(FeedId sourceFeedId, IFeedJoint.FeedJointType type) {
+        List<IFeedJoint> joints = feedPipeline.get(sourceFeedId);
+        for (IFeedJoint joint : joints) {
+            if (joint.getType().equals(type)) {
+                return joint;
+            }
+        }
+        return null;
+    }
+
+    public FeedConnectJobInfo getFeedConnectJobInfo(FeedConnectionId connectionId) {
+        return connectJobInfos.get(connectionId);
+    }
+
+    private void setLocations(FeedConnectJobInfo cInfo) {
+        JobSpecification jobSpec = cInfo.getSpec();
+
+        List<OperatorDescriptorId> collectOperatorIds = new ArrayList<OperatorDescriptorId>();
+        List<OperatorDescriptorId> computeOperatorIds = new ArrayList<OperatorDescriptorId>();
+        List<OperatorDescriptorId> storageOperatorIds = new ArrayList<OperatorDescriptorId>();
+
+        Map<OperatorDescriptorId, IOperatorDescriptor> operators = jobSpec.getOperatorMap();
+        for (Entry<OperatorDescriptorId, IOperatorDescriptor> entry : operators.entrySet()) {
+            IOperatorDescriptor opDesc = entry.getValue();
+            IOperatorDescriptor actualOp = null;
+            if (opDesc instanceof FeedMetaOperatorDescriptor) {
+                actualOp = ((FeedMetaOperatorDescriptor) opDesc).getCoreOperator();
+            } else {
+                actualOp = opDesc;
+            }
+
+            if (actualOp instanceof AlgebricksMetaOperatorDescriptor) {
+                AlgebricksMetaOperatorDescriptor op = ((AlgebricksMetaOperatorDescriptor) actualOp);
+                IPushRuntimeFactory[] runtimeFactories = op.getPipeline().getRuntimeFactories();
+                boolean computeOp = false;
+                for (IPushRuntimeFactory rf : runtimeFactories) {
+                    if (rf instanceof AssignRuntimeFactory) {
+                        IConnectorDescriptor connDesc = jobSpec.getOperatorInputMap().get(op.getOperatorId()).get(0);
+                        IOperatorDescriptor sourceOp = jobSpec.getConnectorOperatorMap().get(connDesc.getConnectorId())
+                                .getLeft().getLeft();
+                        if (sourceOp instanceof FeedCollectOperatorDescriptor) {
+                            computeOp = true;
+                            break;
+                        }
+                    }
+                }
+                if (computeOp) {
+                    computeOperatorIds.add(entry.getKey());
+                }
+            } else if (actualOp instanceof LSMTreeIndexInsertUpdateDeleteOperatorDescriptor) {
+                storageOperatorIds.add(entry.getKey());
+            } else if (actualOp instanceof FeedCollectOperatorDescriptor) {
+                collectOperatorIds.add(entry.getKey());
+            }
+        }
+
+        try {
+            IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+            JobInfo info = hcc.getJobInfo(cInfo.getJobId());
+            List<String> collectLocations = new ArrayList<String>();
+            for (OperatorDescriptorId collectOpId : collectOperatorIds) {
+                Map<Integer, String> operatorLocations = info.getOperatorLocations().get(collectOpId);
+                int nOperatorInstances = operatorLocations.size();
+                for (int i = 0; i < nOperatorInstances; i++) {
+                    collectLocations.add(operatorLocations.get(i));
+                }
+            }
+
+            List<String> computeLocations = new ArrayList<String>();
+            for (OperatorDescriptorId computeOpId : computeOperatorIds) {
+                Map<Integer, String> operatorLocations = info.getOperatorLocations().get(computeOpId);
+                if (operatorLocations != null) {
+                    int nOperatorInstances = operatorLocations.size();
+                    for (int i = 0; i < nOperatorInstances; i++) {
+                        computeLocations.add(operatorLocations.get(i));
+                    }
+                } else {
+                    computeLocations.clear();
+                    computeLocations.addAll(collectLocations);
+                }
+            }
+
+            List<String> storageLocations = new ArrayList<String>();
+            for (OperatorDescriptorId storageOpId : storageOperatorIds) {
+                Map<Integer, String> operatorLocations = info.getOperatorLocations().get(storageOpId);
+                if (operatorLocations == null) {
+                    continue;
+                }
+                int nOperatorInstances = operatorLocations.size();
+                for (int i = 0; i < nOperatorInstances; i++) {
+                    storageLocations.add(operatorLocations.get(i));
+                }
+            }
+            cInfo.setCollectLocations(collectLocations);
+            cInfo.setComputeLocations(computeLocations);
+            cInfo.setStorageLocations(storageLocations);
+
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feed/FeedJoint.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feed/FeedJoint.java b/asterix-app/src/main/java/org/apache/asterix/feed/FeedJoint.java
new file mode 100644
index 0000000..43f227d
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feed/FeedJoint.java
@@ -0,0 +1,190 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.feed;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.external.feed.api.IFeedJoint;
+import org.apache.asterix.external.feed.api.IFeedLifecycleListener.ConnectionLocation;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedConnectionRequest;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.management.FeedJointKey;
+
+public class FeedJoint implements IFeedJoint {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedJoint.class.getName());
+
+    /** A unique key associated with the feed point **/
+    private final FeedJointKey key;
+
+    /** The state associated with the FeedJoint **/
+    private State state;
+
+    /** A list of subscribers that receive data from this FeedJoint **/
+    private final List<FeedConnectionId> receivers;
+
+    /** The feedId on which the feedPoint resides **/
+    private final FeedId ownerFeedId;
+
+    /** A list of feed subscription requests submitted for subscribing to the FeedPoint's data **/
+    private final List<FeedConnectionRequest> connectionRequests;
+
+    private final ConnectionLocation connectionLocation;
+
+    private final FeedJointType type;
+
+    private FeedConnectionId provider;
+
+    public FeedJoint(FeedJointKey key, FeedId ownerFeedId, ConnectionLocation subscriptionLocation, FeedJointType type,
+            FeedConnectionId provider) {
+        this.key = key;
+        this.ownerFeedId = ownerFeedId;
+        this.type = type;
+        this.receivers = new ArrayList<FeedConnectionId>();
+        this.state = State.CREATED;
+        this.connectionLocation = subscriptionLocation;
+        this.connectionRequests = new ArrayList<FeedConnectionRequest>();
+        this.provider = provider;
+    }
+
+    @Override
+    public int hashCode() {
+        return key.hashCode();
+    }
+
+    public void addReceiver(FeedConnectionId connectionId) {
+        receivers.add(connectionId);
+    }
+
+    public void removeReceiver(FeedConnectionId connectionId) {
+        receivers.remove(connectionId);
+    }
+
+    public synchronized void addConnectionRequest(FeedConnectionRequest request) {
+        connectionRequests.add(request);
+        if (state.equals(State.ACTIVE)) {
+            handlePendingConnectionRequest();
+        }
+    }
+
+    public synchronized void setState(State state) {
+        if (this.state.equals(state)) {
+            return;
+        }
+        this.state = state;
+        if (this.state.equals(State.ACTIVE)) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Feed joint " + this + " is now " + State.ACTIVE);
+            }
+            handlePendingConnectionRequest();
+        }
+    }
+
+    private void handlePendingConnectionRequest() {
+        for (FeedConnectionRequest connectionRequest : connectionRequests) {
+            FeedConnectionId connectionId = new FeedConnectionId(connectionRequest.getReceivingFeedId(),
+                    connectionRequest.getTargetDataset());
+            try {
+                FeedLifecycleListener.INSTANCE.submitFeedConnectionRequest(this, connectionRequest);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Submitted feed connection request " + connectionRequest + " at feed joint " + this);
+                }
+                addReceiver(connectionId);
+            } catch (Exception e) {
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("Unsuccessful attempt at submitting connection request " + connectionRequest
+                            + " at feed joint " + this + ". Message " + e.getMessage());
+                }
+                e.printStackTrace();
+            }
+        }
+        connectionRequests.clear();
+    }
+
+    public FeedConnectionId getReceiver(FeedConnectionId connectionId) {
+        for (FeedConnectionId cid : receivers) {
+            if (cid.equals(connectionId)) {
+                return cid;
+            }
+        }
+        return null;
+    }
+
+    @Override
+    public String toString() {
+        return key.toString() + " [" + connectionLocation + "]" + "[" + state + "]";
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (o == null) {
+            return false;
+        }
+        if (o == this) {
+            return true;
+        }
+        if (!(o instanceof FeedJoint)) {
+            return false;
+        }
+        return ((FeedJoint) o).getFeedJointKey().equals(this.key);
+    }
+
+    public FeedId getOwnerFeedId() {
+        return ownerFeedId;
+    }
+
+    public List<FeedConnectionRequest> getConnectionRequests() {
+        return connectionRequests;
+    }
+
+    public ConnectionLocation getConnectionLocation() {
+        return connectionLocation;
+    }
+
+    public FeedJointType getType() {
+        return type;
+    }
+
+    @Override
+    public FeedConnectionId getProvider() {
+        return provider;
+    }
+
+    public List<FeedConnectionId> getReceivers() {
+        return receivers;
+    }
+
+    public FeedJointKey getKey() {
+        return key;
+    }
+
+    public synchronized State getState() {
+        return state;
+    }
+
+    @Override
+    public FeedJointKey getFeedJointKey() {
+        return key;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feed/FeedLifecycleListener.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feed/FeedLifecycleListener.java b/asterix-app/src/main/java/org/apache/asterix/feed/FeedLifecycleListener.java
new file mode 100644
index 0000000..aac3675
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feed/FeedLifecycleListener.java
@@ -0,0 +1,499 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.feed;
+
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.api.common.SessionConfig;
+import org.apache.asterix.api.common.SessionConfig.OutputFormat;
+import org.apache.asterix.aql.translator.QueryTranslator;
+import org.apache.asterix.common.api.IClusterManagementWork;
+import org.apache.asterix.common.api.IClusterManagementWork.ClusterState;
+import org.apache.asterix.common.api.IClusterManagementWorkResponse;
+import org.apache.asterix.compiler.provider.AqlCompilationProvider;
+import org.apache.asterix.compiler.provider.ILangCompilationProvider;
+import org.apache.asterix.external.feed.api.IFeedJoint;
+import org.apache.asterix.external.feed.api.IFeedLifecycleEventSubscriber;
+import org.apache.asterix.external.feed.api.IFeedLifecycleListener;
+import org.apache.asterix.external.feed.api.IIntakeProgressTracker;
+import org.apache.asterix.external.feed.management.FeedCollectInfo;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedConnectionRequest;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.management.FeedJointKey;
+import org.apache.asterix.external.feed.message.StorageReportFeedMessage;
+import org.apache.asterix.external.feed.watch.FeedConnectJobInfo;
+import org.apache.asterix.external.feed.watch.FeedIntakeInfo;
+import org.apache.asterix.external.feed.watch.FeedJobInfo;
+import org.apache.asterix.external.feed.watch.FeedJobInfo.FeedJobState;
+import org.apache.asterix.external.operators.FeedCollectOperatorDescriptor;
+import org.apache.asterix.external.operators.FeedIntakeOperatorDescriptor;
+import org.apache.asterix.lang.common.base.Statement;
+import org.apache.asterix.lang.common.statement.DataverseDecl;
+import org.apache.asterix.lang.common.statement.DisconnectFeedStatement;
+import org.apache.asterix.lang.common.struct.Identifier;
+import org.apache.asterix.metadata.MetadataManager;
+import org.apache.asterix.metadata.MetadataTransactionContext;
+import org.apache.asterix.metadata.cluster.AddNodeWork;
+import org.apache.asterix.metadata.cluster.ClusterManager;
+import org.apache.asterix.om.util.AsterixAppContextInfo;
+import org.apache.asterix.om.util.AsterixClusterProperties;
+import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
+import org.apache.hyracks.api.exceptions.HyracksException;
+import org.apache.hyracks.api.job.IActivityClusterGraphGeneratorFactory;
+import org.apache.hyracks.api.job.JobId;
+import org.apache.hyracks.api.job.JobSpecification;
+
+/**
+ * A listener that subscribes to events associated with cluster membership
+ * (nodes joining/leaving the cluster) and job lifecycle (start/end of a job).
+ * Subscription to such events allows keeping track of feed ingestion jobs and
+ * take any corrective action that may be required when a node involved in a
+ * feed leaves the cluster.
+ */
+public class FeedLifecycleListener implements IFeedLifecycleListener {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedLifecycleListener.class.getName());
+
+    public static FeedLifecycleListener INSTANCE = new FeedLifecycleListener();
+    private static final ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
+
+    private final LinkedBlockingQueue<Message> jobEventInbox;
+    private final LinkedBlockingQueue<IClusterManagementWorkResponse> responseInbox;
+    private final Map<FeedCollectInfo, List<String>> dependentFeeds = new HashMap<FeedCollectInfo, List<String>>();
+    private final Map<FeedConnectionId, LinkedBlockingQueue<String>> feedReportQueue;
+    private final FeedJobNotificationHandler feedJobNotificationHandler;
+    private final FeedWorkRequestResponseHandler feedWorkRequestResponseHandler;
+    private final ExecutorService executorService;
+
+    private ClusterState state;
+
+    private FeedLifecycleListener() {
+        this.jobEventInbox = new LinkedBlockingQueue<Message>();
+        this.feedJobNotificationHandler = new FeedJobNotificationHandler(jobEventInbox);
+        this.responseInbox = new LinkedBlockingQueue<IClusterManagementWorkResponse>();
+        this.feedWorkRequestResponseHandler = new FeedWorkRequestResponseHandler(responseInbox);
+        this.feedReportQueue = new HashMap<FeedConnectionId, LinkedBlockingQueue<String>>();
+        this.executorService = Executors.newCachedThreadPool();
+        this.executorService.execute(feedJobNotificationHandler);
+        this.executorService.execute(feedWorkRequestResponseHandler);
+        ClusterManager.INSTANCE.registerSubscriber(this);
+        this.state = AsterixClusterProperties.INSTANCE.getState();
+    }
+
+    @Override
+    public void notifyJobStart(JobId jobId) throws HyracksException {
+        if (feedJobNotificationHandler.isRegisteredFeedJob(jobId)) {
+            jobEventInbox.add(new Message(jobId, Message.MessageKind.JOB_START));
+        }
+    }
+
+    @Override
+    public void notifyJobFinish(JobId jobId) throws HyracksException {
+        if (feedJobNotificationHandler.isRegisteredFeedJob(jobId)) {
+            jobEventInbox.add(new Message(jobId, Message.MessageKind.JOB_FINISH));
+        } else {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("NO NEED TO NOTIFY JOB FINISH!");
+            }
+        }
+    }
+
+    public FeedConnectJobInfo getFeedConnectJobInfo(FeedConnectionId connectionId) {
+        return feedJobNotificationHandler.getFeedConnectJobInfo(connectionId);
+    }
+
+    public void registerFeedIntakeProgressTracker(FeedConnectionId connectionId,
+            IIntakeProgressTracker feedIntakeProgressTracker) {
+        feedJobNotificationHandler.registerFeedIntakeProgressTracker(connectionId, feedIntakeProgressTracker);
+    }
+
+    public void deregisterFeedIntakeProgressTracker(FeedConnectionId connectionId) {
+        feedJobNotificationHandler.deregisterFeedIntakeProgressTracker(connectionId);
+    }
+
+    public void updateTrackingInformation(StorageReportFeedMessage srm) {
+        feedJobNotificationHandler.updateTrackingInformation(srm);
+    }
+
+    /*
+     * Traverse job specification to categorize job as a feed intake job or a feed collection job
+     */
+    @Override
+    public void notifyJobCreation(JobId jobId, IActivityClusterGraphGeneratorFactory acggf) throws HyracksException {
+        JobSpecification spec = acggf.getJobSpecification();
+        FeedConnectionId feedConnectionId = null;
+        Map<String, String> feedPolicy = null;
+        for (IOperatorDescriptor opDesc : spec.getOperatorMap().values()) {
+            if (opDesc instanceof FeedCollectOperatorDescriptor) {
+                feedConnectionId = ((FeedCollectOperatorDescriptor) opDesc).getFeedConnectionId();
+                feedPolicy = ((FeedCollectOperatorDescriptor) opDesc).getFeedPolicyProperties();
+                feedJobNotificationHandler.registerFeedCollectionJob(
+                        ((FeedCollectOperatorDescriptor) opDesc).getSourceFeedId(), feedConnectionId, jobId, spec,
+                        feedPolicy);
+                break;
+            } else if (opDesc instanceof FeedIntakeOperatorDescriptor) {
+                feedJobNotificationHandler.registerFeedIntakeJob(((FeedIntakeOperatorDescriptor) opDesc).getFeedId(),
+                        jobId, spec);
+                break;
+            }
+        }
+    }
+
+    public void setJobState(FeedConnectionId connectionId, FeedJobState jobState) {
+        feedJobNotificationHandler.setJobState(connectionId, jobState);
+    }
+
+    public FeedJobState getFeedJobState(FeedConnectionId connectionId) {
+        return feedJobNotificationHandler.getFeedJobState(connectionId);
+    }
+
+    public static class Message {
+        public JobId jobId;
+
+        public enum MessageKind {
+            JOB_START,
+            JOB_FINISH
+        }
+
+        public MessageKind messageKind;
+
+        public Message(JobId jobId, MessageKind msgKind) {
+            this.jobId = jobId;
+            this.messageKind = msgKind;
+        }
+    }
+
+    @Override
+    public Set<IClusterManagementWork> notifyNodeFailure(Set<String> deadNodeIds) {
+        Set<IClusterManagementWork> workToBeDone = new HashSet<IClusterManagementWork>();
+
+        Collection<FeedIntakeInfo> intakeInfos = feedJobNotificationHandler.getFeedIntakeInfos();
+        Collection<FeedConnectJobInfo> connectJobInfos = feedJobNotificationHandler.getFeedConnectInfos();
+
+        Map<String, List<FeedJobInfo>> impactedJobs = new HashMap<String, List<FeedJobInfo>>();
+
+        for (String deadNode : deadNodeIds) {
+            for (FeedIntakeInfo intakeInfo : intakeInfos) {
+                if (intakeInfo.getIntakeLocation().contains(deadNode)) {
+                    List<FeedJobInfo> infos = impactedJobs.get(deadNode);
+                    if (infos == null) {
+                        infos = new ArrayList<FeedJobInfo>();
+                        impactedJobs.put(deadNode, infos);
+                    }
+                    infos.add(intakeInfo);
+                    intakeInfo.setState(FeedJobState.UNDER_RECOVERY);
+                }
+            }
+
+            for (FeedConnectJobInfo connectInfo : connectJobInfos) {
+                if (connectInfo.getStorageLocations().contains(deadNode)) {
+                    continue;
+                }
+                if (connectInfo.getComputeLocations().contains(deadNode)
+                        || connectInfo.getCollectLocations().contains(deadNode)) {
+                    List<FeedJobInfo> infos = impactedJobs.get(deadNode);
+                    if (infos == null) {
+                        infos = new ArrayList<FeedJobInfo>();
+                        impactedJobs.put(deadNode, infos);
+                    }
+                    infos.add(connectInfo);
+                    connectInfo.setState(FeedJobState.UNDER_RECOVERY);
+                    feedJobNotificationHandler.deregisterFeedActivity(connectInfo);
+                }
+            }
+
+        }
+
+        if (impactedJobs.size() > 0) {
+            AddNodeWork addNodeWork = new AddNodeWork(deadNodeIds, deadNodeIds.size(), this);
+            feedWorkRequestResponseHandler.registerFeedWork(addNodeWork.getWorkId(), impactedJobs);
+            workToBeDone.add(addNodeWork);
+        }
+        return workToBeDone;
+
+    }
+
+    public static class FailureReport {
+
+        private final List<Pair<FeedConnectJobInfo, List<String>>> recoverableConnectJobs;
+        private final Map<IFeedJoint, List<String>> recoverableIntakeFeedIds;
+
+        public FailureReport(Map<IFeedJoint, List<String>> recoverableIntakeFeedIds,
+                List<Pair<FeedConnectJobInfo, List<String>>> recoverableSubscribers) {
+            this.recoverableConnectJobs = recoverableSubscribers;
+            this.recoverableIntakeFeedIds = recoverableIntakeFeedIds;
+        }
+
+        public List<Pair<FeedConnectJobInfo, List<String>>> getRecoverableSubscribers() {
+            return recoverableConnectJobs;
+        }
+
+        public Map<IFeedJoint, List<String>> getRecoverableIntakeFeedIds() {
+            return recoverableIntakeFeedIds;
+        }
+
+    }
+
+    @Override
+    public Set<IClusterManagementWork> notifyNodeJoin(String joinedNodeId) {
+        ClusterState newState = AsterixClusterProperties.INSTANCE.getState();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(joinedNodeId + " joined the cluster. " + "Asterix state: " + newState);
+        }
+
+        boolean needToReActivateFeeds = !newState.equals(state) && (newState == ClusterState.ACTIVE);
+        if (needToReActivateFeeds) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info(joinedNodeId + " Resuming loser feeds (if any)");
+            }
+            try {
+                FeedsActivator activator = new FeedsActivator();
+                (new Thread(activator)).start();
+            } catch (Exception e) {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Exception in resuming feeds" + e.getMessage());
+                }
+            }
+            state = newState;
+        } else {
+            List<FeedCollectInfo> feedsThatCanBeRevived = new ArrayList<FeedCollectInfo>();
+            for (Entry<FeedCollectInfo, List<String>> entry : dependentFeeds.entrySet()) {
+                List<String> requiredNodeIds = entry.getValue();
+                if (requiredNodeIds.contains(joinedNodeId)) {
+                    requiredNodeIds.remove(joinedNodeId);
+                    if (requiredNodeIds.isEmpty()) {
+                        feedsThatCanBeRevived.add(entry.getKey());
+                    }
+                }
+            }
+            if (!feedsThatCanBeRevived.isEmpty()) {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info(joinedNodeId + " Resuming feeds after rejoining of node " + joinedNodeId);
+                }
+                FeedsActivator activator = new FeedsActivator(feedsThatCanBeRevived);
+                (new Thread(activator)).start();
+            }
+        }
+        return null;
+    }
+
+    @Override
+    public void notifyRequestCompletion(IClusterManagementWorkResponse response) {
+        try {
+            responseInbox.put(response);
+        } catch (InterruptedException e) {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Interrupted exception");
+            }
+        }
+    }
+
+    @Override
+    public void notifyStateChange(ClusterState previousState, ClusterState newState) {
+        switch (newState) {
+            case ACTIVE:
+                if (previousState.equals(ClusterState.UNUSABLE)) {
+                    try {
+                        // TODO: Figure out why code was commented
+                        // FeedsActivator activator = new FeedsActivator();
+                        // (new Thread(activator)).start();
+                    } catch (Exception e) {
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info("Exception in resuming feeds" + e.getMessage());
+                        }
+                    }
+                }
+                break;
+            default:
+                break;
+        }
+
+    }
+
+    public static class FeedsDeActivator implements Runnable {
+
+        private List<FeedConnectJobInfo> failedConnectjobs;
+
+        public FeedsDeActivator(List<FeedConnectJobInfo> failedConnectjobs) {
+            this.failedConnectjobs = failedConnectjobs;
+        }
+
+        @Override
+        public void run() {
+            for (FeedConnectJobInfo failedConnectJob : failedConnectjobs) {
+                endFeed(failedConnectJob);
+            }
+        }
+
+        private void endFeed(FeedConnectJobInfo cInfo) {
+            MetadataTransactionContext ctx = null;
+            PrintWriter writer = new PrintWriter(System.out, true);
+            SessionConfig pc = new SessionConfig(writer, OutputFormat.ADM);
+
+            try {
+                ctx = MetadataManager.INSTANCE.beginTransaction();
+                FeedId feedId = cInfo.getConnectionId().getFeedId();
+                DisconnectFeedStatement stmt = new DisconnectFeedStatement(new Identifier(feedId.getDataverse()),
+                        new Identifier(feedId.getFeedName()), new Identifier(cInfo.getConnectionId().getDatasetName()));
+                List<Statement> statements = new ArrayList<Statement>();
+                DataverseDecl dataverseDecl = new DataverseDecl(new Identifier(feedId.getDataverse()));
+                statements.add(dataverseDecl);
+                statements.add(stmt);
+                QueryTranslator translator = new QueryTranslator(statements, pc, compilationProvider);
+                translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null,
+                        QueryTranslator.ResultDelivery.SYNC);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("End irrecoverable feed: " + cInfo.getConnectionId());
+                }
+                MetadataManager.INSTANCE.commitTransaction(ctx);
+            } catch (Exception e) {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Exception in ending loser feed: " + cInfo.getConnectionId() + " Exception "
+                            + e.getMessage());
+                }
+                e.printStackTrace();
+                try {
+                    MetadataManager.INSTANCE.abortTransaction(ctx);
+                } catch (Exception e2) {
+                    e2.addSuppressed(e);
+                    if (LOGGER.isLoggable(Level.SEVERE)) {
+                        LOGGER.severe("Exception in aborting transaction! System is in inconsistent state");
+                    }
+                }
+
+            }
+
+        }
+    }
+
+    public void submitFeedConnectionRequest(IFeedJoint feedPoint, FeedConnectionRequest subscriptionRequest)
+            throws Exception {
+        feedJobNotificationHandler.submitFeedConnectionRequest(feedPoint, subscriptionRequest);
+    }
+
+    @Override
+    public List<FeedConnectionId> getActiveFeedConnections(FeedId feedId) {
+        List<FeedConnectionId> connections = new ArrayList<FeedConnectionId>();
+        Collection<FeedConnectionId> activeConnections = feedJobNotificationHandler.getActiveFeedConnections();
+        if (feedId != null) {
+            for (FeedConnectionId connectionId : activeConnections) {
+                if (connectionId.getFeedId().equals(feedId)) {
+                    connections.add(connectionId);
+                }
+            }
+        } else {
+            connections.addAll(activeConnections);
+        }
+        return connections;
+    }
+
+    @Override
+    public List<String> getComputeLocations(FeedId feedId) {
+        return feedJobNotificationHandler.getFeedComputeLocations(feedId);
+    }
+
+    @Override
+    public List<String> getIntakeLocations(FeedId feedId) {
+        return feedJobNotificationHandler.getFeedIntakeLocations(feedId);
+    }
+
+    @Override
+    public List<String> getStoreLocations(FeedConnectionId feedConnectionId) {
+        return feedJobNotificationHandler.getFeedStorageLocations(feedConnectionId);
+    }
+
+    @Override
+    public List<String> getCollectLocations(FeedConnectionId feedConnectionId) {
+        return feedJobNotificationHandler.getFeedCollectLocations(feedConnectionId);
+    }
+
+    @Override
+    public boolean isFeedConnectionActive(FeedConnectionId connectionId) {
+        return feedJobNotificationHandler.isFeedConnectionActive(connectionId);
+    }
+
+    public void reportPartialDisconnection(FeedConnectionId connectionId) {
+        feedJobNotificationHandler.removeFeedJointsPostPipelineTermination(connectionId);
+    }
+
+    public void registerFeedReportQueue(FeedConnectionId feedId, LinkedBlockingQueue<String> queue) {
+        feedReportQueue.put(feedId, queue);
+    }
+
+    public void deregisterFeedReportQueue(FeedConnectionId feedId, LinkedBlockingQueue<String> queue) {
+        feedReportQueue.remove(feedId);
+    }
+
+    public LinkedBlockingQueue<String> getFeedReportQueue(FeedConnectionId feedId) {
+        return feedReportQueue.get(feedId);
+    }
+
+    @Override
+    public IFeedJoint getAvailableFeedJoint(FeedJointKey feedJointKey) {
+        return feedJobNotificationHandler.getAvailableFeedJoint(feedJointKey);
+    }
+
+    @Override
+    public boolean isFeedJointAvailable(FeedJointKey feedJointKey) {
+        return feedJobNotificationHandler.isFeedPointAvailable(feedJointKey);
+    }
+
+    public void registerFeedJoint(IFeedJoint feedJoint) {
+        feedJobNotificationHandler.registerFeedJoint(feedJoint);
+    }
+
+    public IFeedJoint getFeedJoint(FeedJointKey feedJointKey) {
+        return feedJobNotificationHandler.getFeedJoint(feedJointKey);
+    }
+
+    @Override
+    public void registerFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
+        feedJobNotificationHandler.registerFeedEventSubscriber(connectionId, subscriber);
+    }
+
+    @Override
+    public void deregisterFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
+        feedJobNotificationHandler.deregisterFeedEventSubscriber(connectionId, subscriber);
+
+    }
+
+    public JobSpecification getCollectJobSpecification(FeedConnectionId connectionId) {
+        return feedJobNotificationHandler.getCollectJobSpecification(connectionId);
+    }
+
+    public JobId getFeedCollectJobId(FeedConnectionId connectionId) {
+        return feedJobNotificationHandler.getFeedCollectJobId(connectionId);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feed/FeedLoadManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feed/FeedLoadManager.java b/asterix-app/src/main/java/org/apache/asterix/feed/FeedLoadManager.java
new file mode 100644
index 0000000..ee4da11
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feed/FeedLoadManager.java
@@ -0,0 +1,302 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.feed;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.feed.api.IFeedLoadManager;
+import org.apache.asterix.external.feed.api.IFeedTrackingManager;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.message.FeedCongestionMessage;
+import org.apache.asterix.external.feed.message.FeedReportMessage;
+import org.apache.asterix.external.feed.message.PrepareStallMessage;
+import org.apache.asterix.external.feed.message.ScaleInReportMessage;
+import org.apache.asterix.external.feed.message.TerminateDataFlowMessage;
+import org.apache.asterix.external.feed.message.ThrottlingEnabledFeedMessage;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+import org.apache.asterix.external.feed.watch.FeedActivity;
+import org.apache.asterix.external.feed.watch.NodeLoadReport;
+import org.apache.asterix.external.feed.watch.FeedJobInfo.FeedJobState;
+import org.apache.asterix.file.FeedOperations;
+import org.apache.asterix.metadata.feeds.FeedMetadataUtil;
+import org.apache.asterix.om.util.AsterixAppContextInfo;
+import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.hyracks.api.client.IHyracksClientConnection;
+import org.apache.hyracks.api.job.JobId;
+import org.apache.hyracks.api.job.JobSpecification;
+
+public class FeedLoadManager implements IFeedLoadManager {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedLoadManager.class.getName());
+
+    private static final long MIN_MODIFICATION_INTERVAL = 180000; // 10 seconds
+    private final TreeSet<NodeLoadReport> nodeReports;
+    private final Map<FeedConnectionId, FeedActivity> feedActivities;
+    private final Map<String, Pair<Integer, Integer>> feedMetrics;
+
+    private FeedConnectionId lastModified;
+    private long lastModifiedTimestamp;
+
+    private static final int UNKNOWN = -1;
+
+    public FeedLoadManager() {
+        this.nodeReports = new TreeSet<NodeLoadReport>();
+        this.feedActivities = new HashMap<FeedConnectionId, FeedActivity>();
+        this.feedMetrics = new HashMap<String, Pair<Integer, Integer>>();
+    }
+
+    @Override
+    public void submitNodeLoadReport(NodeLoadReport report) {
+        nodeReports.remove(report);
+        nodeReports.add(report);
+    }
+
+    @Override
+    public void reportCongestion(FeedCongestionMessage message) throws AsterixException {
+        FeedRuntimeId runtimeId = message.getRuntimeId();
+        FeedJobState jobState = FeedLifecycleListener.INSTANCE.getFeedJobState(message.getConnectionId());
+        if (jobState == null
+                || (jobState.equals(FeedJobState.UNDER_RECOVERY))
+                || (message.getConnectionId().equals(lastModified) && System.currentTimeMillis()
+                        - lastModifiedTimestamp < MIN_MODIFICATION_INTERVAL)) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Ignoring congestion report from " + runtimeId);
+            }
+            return;
+        } else {
+            try {
+                FeedLifecycleListener.INSTANCE.setJobState(message.getConnectionId(), FeedJobState.UNDER_RECOVERY);
+                int inflowRate = message.getInflowRate();
+                int outflowRate = message.getOutflowRate();
+                List<String> currentComputeLocations = new ArrayList<String>();
+                currentComputeLocations.addAll(FeedLifecycleListener.INSTANCE.getComputeLocations(message
+                        .getConnectionId().getFeedId()));
+                int computeCardinality = currentComputeLocations.size();
+                int requiredCardinality = (int) Math
+                        .ceil((double) ((computeCardinality * inflowRate) / (double) outflowRate)) + 5;
+                int additionalComputeNodes = requiredCardinality - computeCardinality;
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("INCREASING COMPUTE CARDINALITY from " + computeCardinality + " by "
+                            + additionalComputeNodes);
+                }
+
+                List<String> helperComputeNodes = getNodeForSubstitution(additionalComputeNodes);
+
+                // Step 1) Alter the original feed job to adjust the cardinality
+                JobSpecification jobSpec = FeedLifecycleListener.INSTANCE.getCollectJobSpecification(message
+                        .getConnectionId());
+                helperComputeNodes.addAll(currentComputeLocations);
+                List<String> newLocations = new ArrayList<String>();
+                newLocations.addAll(currentComputeLocations);
+                newLocations.addAll(helperComputeNodes);
+                FeedMetadataUtil.increaseCardinality(jobSpec, FeedRuntimeType.COMPUTE, requiredCardinality, newLocations);
+
+                // Step 2) send prepare to  stall message
+                gracefullyTerminateDataFlow(message.getConnectionId(), Integer.MAX_VALUE);
+
+                // Step 3) run the altered job specification 
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("New Job after adjusting to the workload " + jobSpec);
+                }
+
+                Thread.sleep(10000);
+                runJob(jobSpec, false);
+                lastModified = message.getConnectionId();
+                lastModifiedTimestamp = System.currentTimeMillis();
+
+            } catch (Exception e) {
+                e.printStackTrace();
+                if (LOGGER.isLoggable(Level.SEVERE)) {
+                    LOGGER.severe("Unable to form the required job for scaling in/out" + e.getMessage());
+                }
+                throw new AsterixException(e);
+            }
+        }
+    }
+
+    @Override
+    public void submitScaleInPossibleReport(ScaleInReportMessage message) throws Exception {
+        FeedJobState jobState = FeedLifecycleListener.INSTANCE.getFeedJobState(message.getConnectionId());
+        if (jobState == null || (jobState.equals(FeedJobState.UNDER_RECOVERY))) {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("JobState information for job " + "[" + message.getConnectionId() + "]" + " not found ");
+            }
+            return;
+        } else {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Processing scale-in message " + message);
+            }
+            FeedLifecycleListener.INSTANCE.setJobState(message.getConnectionId(), FeedJobState.UNDER_RECOVERY);
+            JobSpecification jobSpec = FeedLifecycleListener.INSTANCE.getCollectJobSpecification(message
+                    .getConnectionId());
+            int reducedCardinality = message.getReducedCardinaliy();
+            List<String> currentComputeLocations = new ArrayList<String>();
+            currentComputeLocations.addAll(FeedLifecycleListener.INSTANCE.getComputeLocations(message.getConnectionId()
+                    .getFeedId()));
+            FeedMetadataUtil.decreaseComputeCardinality(jobSpec, FeedRuntimeType.COMPUTE, reducedCardinality,
+                    currentComputeLocations);
+
+            gracefullyTerminateDataFlow(message.getConnectionId(), reducedCardinality - 1);
+            Thread.sleep(3000);
+            JobId newJobId = runJob(jobSpec, false);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Launch modified job" + "[" + newJobId + "]" + "for scale-in \n" + jobSpec);
+            }
+
+        }
+    }
+
+    private void gracefullyTerminateDataFlow(FeedConnectionId connectionId, int computePartitionRetainLimit)
+            throws Exception {
+        // Step 1) send prepare to  stall message
+        PrepareStallMessage stallMessage = new PrepareStallMessage(connectionId, computePartitionRetainLimit);
+        List<String> intakeLocations = FeedLifecycleListener.INSTANCE.getCollectLocations(connectionId);
+        List<String> computeLocations = FeedLifecycleListener.INSTANCE.getComputeLocations(connectionId.getFeedId());
+        List<String> storageLocations = FeedLifecycleListener.INSTANCE.getStoreLocations(connectionId);
+
+        Set<String> operatorLocations = new HashSet<String>();
+
+        operatorLocations.addAll(intakeLocations);
+        operatorLocations.addAll(computeLocations);
+        operatorLocations.addAll(storageLocations);
+
+        JobSpecification messageJobSpec = FeedOperations.buildPrepareStallMessageJob(stallMessage, operatorLocations);
+        runJob(messageJobSpec, true);
+
+        // Step 2)
+        TerminateDataFlowMessage terminateMesg = new TerminateDataFlowMessage(connectionId);
+        messageJobSpec = FeedOperations.buildTerminateFlowMessageJob(terminateMesg, intakeLocations);
+        runJob(messageJobSpec, true);
+    }
+
+    public static JobId runJob(JobSpecification spec, boolean waitForCompletion) throws Exception {
+        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+        JobId jobId = hcc.startJob(spec);
+        if (waitForCompletion) {
+            hcc.waitForCompletion(jobId);
+        }
+        return jobId;
+    }
+
+    @Override
+    public void submitFeedRuntimeReport(FeedReportMessage report) {
+        String key = "" + report.getConnectionId() + ":" + report.getRuntimeId().getFeedRuntimeType();
+        Pair<Integer, Integer> value = feedMetrics.get(key);
+        if (value == null) {
+            value = new Pair<Integer, Integer>(report.getValue(), 1);
+            feedMetrics.put(key, value);
+        } else {
+            value.first = value.first + report.getValue();
+            value.second = value.second + 1;
+        }
+    }
+
+    @Override
+    public int getOutflowRate(FeedConnectionId connectionId, FeedRuntimeType runtimeType) {
+        int rVal;
+        String key = "" + connectionId + ":" + runtimeType;
+        feedMetrics.get(key);
+        Pair<Integer, Integer> value = feedMetrics.get(key);
+        if (value == null) {
+            rVal = UNKNOWN;
+        } else {
+            rVal = value.first / value.second;
+        }
+        return rVal;
+    }
+
+    private List<String> getNodeForSubstitution(int nRequired) {
+        List<String> nodeIds = new ArrayList<String>();
+        Iterator<NodeLoadReport> it = null;
+        int nAdded = 0;
+        while (nAdded < nRequired) {
+            it = nodeReports.iterator();
+            while (it.hasNext()) {
+                nodeIds.add(it.next().getNodeId());
+                nAdded++;
+            }
+        }
+        return nodeIds;
+    }
+
+    @Override
+    public synchronized List<String> getNodes(int required) {
+        Iterator<NodeLoadReport> it;
+        List<String> allocated = new ArrayList<String>();
+        while (allocated.size() < required) {
+            it = nodeReports.iterator();
+            while (it.hasNext() && allocated.size() < required) {
+                allocated.add(it.next().getNodeId());
+            }
+        }
+        return allocated;
+    }
+
+    @Override
+    public void reportThrottlingEnabled(ThrottlingEnabledFeedMessage mesg) throws AsterixException, Exception {
+        System.out.println("Throttling Enabled for " + mesg.getConnectionId() + " " + mesg.getFeedRuntimeId());
+        FeedConnectionId connectionId = mesg.getConnectionId();
+        List<String> destinationLocations = new ArrayList<String>();
+        List<String> storageLocations = FeedLifecycleListener.INSTANCE.getStoreLocations(connectionId);
+        List<String> collectLocations = FeedLifecycleListener.INSTANCE.getCollectLocations(connectionId);
+
+        destinationLocations.addAll(storageLocations);
+        destinationLocations.addAll(collectLocations);
+        JobSpecification messageJobSpec = FeedOperations.buildNotifyThrottlingEnabledMessageJob(mesg,
+                destinationLocations);
+        runJob(messageJobSpec, true);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.warning("Acking disabled for " + mesg.getConnectionId() + " in view of activated throttling");
+        }
+        IFeedTrackingManager trackingManager = CentralFeedManager.getInstance().getFeedTrackingManager();
+        trackingManager.disableAcking(connectionId);
+    }
+
+    @Override
+    public void reportFeedActivity(FeedConnectionId connectionId, FeedActivity activity) {
+        feedActivities.put(connectionId, activity);
+    }
+
+    @Override
+    public FeedActivity getFeedActivity(FeedConnectionId connectionId) {
+        return feedActivities.get(connectionId);
+    }
+
+    @Override
+    public Collection<FeedActivity> getFeedActivities() {
+        return feedActivities.values();
+    }
+
+    @Override
+    public void removeFeedActivity(FeedConnectionId connectionId) {
+        feedActivities.remove(connectionId);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feed/FeedMessageReceiver.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feed/FeedMessageReceiver.java b/asterix-app/src/main/java/org/apache/asterix/feed/FeedMessageReceiver.java
new file mode 100644
index 0000000..66eca0c
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feed/FeedMessageReceiver.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.feed;
+
+import java.util.logging.Level;
+
+import org.json.JSONObject;
+import org.apache.asterix.external.feed.api.IFeedLoadManager;
+import org.apache.asterix.external.feed.api.IFeedTrackingManager;
+import org.apache.asterix.external.feed.api.IFeedMessage.MessageType;
+import org.apache.asterix.external.feed.message.FeedCongestionMessage;
+import org.apache.asterix.external.feed.message.FeedReportMessage;
+import org.apache.asterix.external.feed.message.FeedTupleCommitAckMessage;
+import org.apache.asterix.external.feed.message.MessageReceiver;
+import org.apache.asterix.external.feed.message.ScaleInReportMessage;
+import org.apache.asterix.external.feed.message.StorageReportFeedMessage;
+import org.apache.asterix.external.feed.message.ThrottlingEnabledFeedMessage;
+import org.apache.asterix.external.feed.watch.NodeLoadReport;
+import org.apache.asterix.external.util.FeedConstants;
+import org.apache.asterix.feed.CentralFeedManager.AQLExecutor;
+import org.apache.asterix.hyracks.bootstrap.FeedBootstrap;
+
+public class FeedMessageReceiver extends MessageReceiver<String> {
+
+    private static boolean initialized;
+
+    private final IFeedLoadManager feedLoadManager;
+    private final IFeedTrackingManager feedTrackingManager;
+
+    public FeedMessageReceiver(CentralFeedManager centralFeedManager) {
+        this.feedLoadManager = centralFeedManager.getFeedLoadManager();
+        this.feedTrackingManager = centralFeedManager.getFeedTrackingManager();
+    }
+
+    @Override
+    public void processMessage(String message) throws Exception {
+        JSONObject obj = new JSONObject(message);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Received message " + obj);
+        }
+        MessageType messageType = MessageType.valueOf(obj.getString(FeedConstants.MessageConstants.MESSAGE_TYPE));
+        switch (messageType) {
+            case XAQL:
+                if (!initialized) {
+                    FeedBootstrap.setUpInitialArtifacts();
+                    initialized = true;
+                }
+                AQLExecutor.executeAQL(obj.getString(FeedConstants.MessageConstants.AQL));
+                break;
+            case CONGESTION:
+                feedLoadManager.reportCongestion(FeedCongestionMessage.read(obj));
+                break;
+            case FEED_REPORT:
+                feedLoadManager.submitFeedRuntimeReport(FeedReportMessage.read(obj));
+                break;
+            case NODE_REPORT:
+                feedLoadManager.submitNodeLoadReport(NodeLoadReport.read(obj));
+                break;
+            case SCALE_IN_REQUEST:
+                feedLoadManager.submitScaleInPossibleReport(ScaleInReportMessage.read(obj));
+                break;
+            case STORAGE_REPORT:
+                FeedLifecycleListener.INSTANCE.updateTrackingInformation(StorageReportFeedMessage.read(obj));
+                break;
+            case COMMIT_ACK:
+                feedTrackingManager.submitAckReport(FeedTupleCommitAckMessage.read(obj));
+                break;
+            case THROTTLING_ENABLED:
+                feedLoadManager.reportThrottlingEnabled(ThrottlingEnabledFeedMessage.read(obj));
+            default:
+                break;
+        }
+
+    }
+}


[18/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedJoint.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedJoint.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedJoint.java
deleted file mode 100644
index 59d807a..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedJoint.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import java.util.List;
-
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.FeedJointKey;
-import org.apache.asterix.common.feeds.FeedConnectionRequest;
-import org.apache.asterix.common.feeds.api.IFeedLifecycleListener.ConnectionLocation;
-
-public interface IFeedJoint {
-
-    public enum FeedJointType {
-        /** Feed Joint is located at the intake stage of a primary feed **/
-        INTAKE,
-
-        /** Feed Joint is located at the compute stage of a primary/secondary feed **/
-        COMPUTE
-    }
-
-    public enum State {
-        /** Initial state of a feed joint post creation but prior to scheduling of corresponding Hyracks job. **/
-        CREATED,
-
-        /** State acquired post creation of Hyracks job and known physical locations of the joint **/
-        INITIALIZED,
-
-        /** State acquired post starting of Hyracks job at which point, data begins to flow through the joint **/
-        ACTIVE
-    }
-
-    /**
-     * @return the {@link State} associated with the FeedJoint
-     */
-    public State getState();
-
-    /**
-     * @return the {@link FeedJointType} associated with the FeedJoint
-     */
-    public FeedJointType getType();
-
-    /**
-     * @return the list of data receivers that are
-     *         receiving the data flowing through this FeedJoint
-     */
-    public List<FeedConnectionId> getReceivers();
-
-    /**
-     * @return the list of pending subscription request {@link FeedConnectionRequest} submitted for data flowing through the FeedJoint
-     */
-    public List<FeedConnectionRequest> getConnectionRequests();
-
-    /**
-     * @return the subscription location {@link ConnectionLocation} associated with the FeedJoint
-     */
-    public ConnectionLocation getConnectionLocation();
-
-    /**
-     * @return the unique {@link FeedJointKey} associated with the FeedJoint
-     */
-    public FeedJointKey getFeedJointKey();
-
-    /**
-     * Returns the feed subscriber {@link FeedSubscriber} corresponding to a given feed connection id.
-     * 
-     * @param feedConnectionId
-     *            the unique id of a feed connection
-     * @return an instance of feedConnectionId {@link FeedConnectionId}
-     */
-    public FeedConnectionId getReceiver(FeedConnectionId feedConnectionId);
-
-    /**
-     * @param active
-     */
-    public void setState(State active);
-
-    /**
-     * Remove the subscriber from the set of registered subscribers to the FeedJoint
-     * 
-     * @param connectionId
-     *            the connectionId that needs to be removed
-     */
-    public void removeReceiver(FeedConnectionId connectionId);
-
-    public FeedId getOwnerFeedId();
-
-    /**
-     * Add a feed connectionId to the set of registered subscribers
-     * 
-     * @param connectionId
-     */
-    public void addReceiver(FeedConnectionId connectionId);
-
-    /**
-     * Add a feed subscription request {@link FeedConnectionRequest} for the FeedJoint
-     * 
-     * @param request
-     */
-    public void addConnectionRequest(FeedConnectionRequest request);
-
-    public FeedConnectionId getProvider();
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedLifecycleEventSubscriber.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedLifecycleEventSubscriber.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedLifecycleEventSubscriber.java
deleted file mode 100644
index 94af74b..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedLifecycleEventSubscriber.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-
-public interface IFeedLifecycleEventSubscriber {
-
-    public enum FeedLifecycleEvent {
-        FEED_INTAKE_STARTED,
-        FEED_COLLECT_STARTED,
-        FEED_INTAKE_FAILURE,
-        FEED_COLLECT_FAILURE,
-        FEED_ENDED
-    }
-
-    public void assertEvent(FeedLifecycleEvent event) throws AsterixException, InterruptedException;
-
-    public void handleFeedEvent(FeedLifecycleEvent event);
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedLifecycleIntakeEventSubscriber.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedLifecycleIntakeEventSubscriber.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedLifecycleIntakeEventSubscriber.java
deleted file mode 100644
index bff2589..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedLifecycleIntakeEventSubscriber.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.feeds.FeedIntakeInfo;
-
-public interface IFeedLifecycleIntakeEventSubscriber extends IFeedLifecycleEventSubscriber {
-
-    public void handleFeedEvent(FeedIntakeInfo iInfo, FeedLifecycleEvent event) throws AsterixException;
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedLifecycleListener.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedLifecycleListener.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedLifecycleListener.java
deleted file mode 100644
index 0ae5f56..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedLifecycleListener.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import java.util.List;
-
-import org.apache.asterix.common.api.IClusterEventsSubscriber;
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.FeedJointKey;
-import org.apache.hyracks.api.job.IJobLifecycleListener;
-
-public interface IFeedLifecycleListener extends IJobLifecycleListener, IClusterEventsSubscriber {
-
-    public enum ConnectionLocation {
-        SOURCE_FEED_INTAKE_STAGE,
-        SOURCE_FEED_COMPUTE_STAGE
-    }
-
-    public IFeedJoint getAvailableFeedJoint(FeedJointKey feedJoinKey);
-
-    public boolean isFeedJointAvailable(FeedJointKey feedJoinKey);
-
-    public List<FeedConnectionId> getActiveFeedConnections(FeedId feedId);
-
-    public List<String> getComputeLocations(FeedId feedId);
-
-    public List<String> getIntakeLocations(FeedId feedId);
-
-    public List<String> getStoreLocations(FeedConnectionId feedId);
-
-    public void registerFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber);
-
-    public void deregisterFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber);
-
-    public List<String> getCollectLocations(FeedConnectionId feedConnectionId);
-
-    boolean isFeedConnectionActive(FeedConnectionId connectionId);
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedLoadManager.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedLoadManager.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedLoadManager.java
deleted file mode 100644
index 7baa229..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedLoadManager.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import java.util.Collection;
-import java.util.List;
-
-import org.json.JSONException;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.feeds.FeedActivity;
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.NodeLoadReport;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.asterix.common.feeds.message.FeedCongestionMessage;
-import org.apache.asterix.common.feeds.message.FeedReportMessage;
-import org.apache.asterix.common.feeds.message.ScaleInReportMessage;
-import org.apache.asterix.common.feeds.message.ThrottlingEnabledFeedMessage;
-
-public interface IFeedLoadManager {
-
-    public void submitNodeLoadReport(NodeLoadReport report);
-
-    public void reportCongestion(FeedCongestionMessage message) throws JSONException, AsterixException;
-
-    public void submitFeedRuntimeReport(FeedReportMessage message);
-
-    public void submitScaleInPossibleReport(ScaleInReportMessage sm) throws AsterixException, Exception;
-
-    public List<String> getNodes(int required);
-
-    public void reportThrottlingEnabled(ThrottlingEnabledFeedMessage mesg) throws AsterixException, Exception;
-
-    int getOutflowRate(FeedConnectionId connectionId, FeedRuntimeType runtimeType);
-
-    void reportFeedActivity(FeedConnectionId connectionId, FeedActivity activity);
-
-    void removeFeedActivity(FeedConnectionId connectionId);
-    
-    public FeedActivity getFeedActivity(FeedConnectionId connectionId);
-
-    public Collection<FeedActivity> getFeedActivities();
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedManager.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedManager.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedManager.java
deleted file mode 100644
index 768b11f..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedManager.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import org.apache.asterix.common.config.AsterixFeedProperties;
-import org.apache.asterix.common.feeds.api.IFeedConnectionManager;
-import org.apache.asterix.common.feeds.api.IFeedMemoryManager;
-import org.apache.asterix.common.feeds.api.IFeedMessageService;
-import org.apache.asterix.common.feeds.api.IFeedMetadataManager;
-import org.apache.asterix.common.feeds.api.IFeedMetricCollector;
-import org.apache.asterix.common.feeds.api.IFeedSubscriptionManager;
-
-/**
- * Provides access to services related to feed management within a node controller
- */
-public interface IFeedManager {
-
-    /**
-     * gets the nodeId associated with the host node controller
-     * 
-     * @return the nodeId associated with the host node controller
-     */
-    public String getNodeId();
-
-    /**
-     * gets the handle to the singleton instance of subscription manager
-     * 
-     * @return the singleton instance of subscription manager
-     * @see IFeedSubscriptionManager
-     */
-    public IFeedSubscriptionManager getFeedSubscriptionManager();
-
-    /**
-     * gets the handle to the singleton instance of connection manager
-     * 
-     * @return the singleton instance of connection manager
-     * @see IFeedConnectionManager
-     */
-    public IFeedConnectionManager getFeedConnectionManager();
-
-    /**
-     * gets the handle to the singleton instance of memory manager
-     * 
-     * @return the singleton instance of memory manager
-     * @see IFeedMemoryManager
-     */
-    public IFeedMemoryManager getFeedMemoryManager();
-
-    /**
-     * gets the handle to the singleton instance of feed metadata manager
-     * 
-     * @return the singleton instance of feed metadata manager
-     * @see IFeedMetadataManager
-     */
-    public IFeedMetadataManager getFeedMetadataManager();
-
-    /**
-     * gets the handle to the singleton instance of feed metric collector
-     * 
-     * @return the singleton instance of feed metric collector
-     * @see IFeedMetricCollector
-     */
-    public IFeedMetricCollector getFeedMetricCollector();
-
-    /**
-     * gets the handle to the singleton instance of feed message service
-     * 
-     * @return the singleton instance of feed message service
-     * @see IFeedMessageService
-     */
-    public IFeedMessageService getFeedMessageService();
-
-    /**
-     * gets the asterix configuration
-     * 
-     * @return asterix configuration
-     * @see AsterixFeedProperties
-     */
-    public AsterixFeedProperties getAsterixFeedProperties();
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMemoryComponent.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMemoryComponent.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMemoryComponent.java
deleted file mode 100644
index 817b750..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMemoryComponent.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-/**
- * Represents an in-memory components required for storing frames that contain feed tuples.
- * The component's memory footprint is measured and regulated by the {@link IFeedMemoryManager}.
- * Any expansion in size is accounted and can be restricted by the {@link IFeedMemoryManager}
- **/
-public interface IFeedMemoryComponent {
-
-    public enum Type {
-
-        /** A pool of reusable frames **/
-        POOL,
-
-        /** An ordered list of frames **/
-        COLLECTION
-    }
-
-    /** Gets the unique id associated with the memory component **/
-    public int getComponentId();
-
-    /** Gets the type associated with the component. **/
-    public Type getType();
-
-    /** Gets the current size (number of allocated frames) of the component. **/
-    public int getTotalAllocation();
-
-    /**
-     * Expands this memory component by the speficied number of frames
-     * 
-     * @param delta
-     *            the amount (measured in number of frames) by which this memory component
-     *            should be expanded
-     */
-    public void expand(int delta);
-
-    /** Clears the allocated frames as a step to reclaim the memory **/
-    public void reset();
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMemoryManager.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMemoryManager.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMemoryManager.java
deleted file mode 100644
index 4902606..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMemoryManager.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import org.apache.asterix.common.feeds.api.IFeedMemoryComponent.Type;
-
-/**
- * Provides management of memory allocated for handling feed data flow through the node controller
- */
-public interface IFeedMemoryManager {
-
-    public static final int START_COLLECTION_SIZE = 20;
-    public static final int START_POOL_SIZE = 10;
-
-    /**
-     * Gets a memory component allocated from the feed memory budget
-     * 
-     * @param type
-     *            the kind of memory component that needs to be allocated
-     * @return
-     * @see Type
-     */
-    public IFeedMemoryComponent getMemoryComponent(Type type);
-
-    /**
-     * Expand a memory component by the default increment
-     * 
-     * @param memoryComponent
-     * @return true if the expansion succeeded
-     *         false if the requested expansion violates the configured budget
-     */
-    public boolean expandMemoryComponent(IFeedMemoryComponent memoryComponent);
-
-    /**
-     * Releases the given memory component to reclaim the memory allocated for the component
-     * 
-     * @param memoryComponent
-     *            the memory component that is being reclaimed/released
-     */
-    public void releaseMemoryComponent(IFeedMemoryComponent memoryComponent);
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMessage.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMessage.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMessage.java
deleted file mode 100644
index 14b8e0a..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMessage.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import java.io.Serializable;
-
-import org.apache.hyracks.api.dataflow.value.JSONSerializable;
-
-/**
- * A control message exchanged between {@Link IFeedManager} and {@Link CentralFeedManager} that requests for an action or reporting of an event
- */
-public interface IFeedMessage extends Serializable, JSONSerializable {
-
-    public enum MessageType {
-        END,
-        XAQL,
-        FEED_REPORT,
-        NODE_REPORT,
-        STORAGE_REPORT,
-        CONGESTION,
-        PREPARE_STALL,
-        TERMINATE_FLOW,
-        SCALE_IN_REQUEST,
-        COMMIT_ACK,
-        COMMIT_ACK_RESPONSE,
-        THROTTLING_ENABLED
-    }
-
-    /**
-     * Gets the type associated with this message
-     * 
-     * @return MessageType type associated with this message
-     */
-    public MessageType getMessageType();
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMessageService.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMessageService.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMessageService.java
deleted file mode 100644
index 12f53be..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMessageService.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-/**
- * Provides the functionality of sending a meesage ({@code IFeedMessage} to the {@code CentralFeedManager}
- */
-public interface IFeedMessageService extends IFeedService {
-
-    /**
-     * Sends a message ({@code IFeedMessage} to the {@code CentralFeedManager} running at the CC
-     * The message is sent asynchronously.
-     * 
-     * @param message
-     *            the message to be sent
-     */
-    public void sendMessage(IFeedMessage message);
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMetadataManager.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMetadataManager.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMetadataManager.java
deleted file mode 100644
index 127b97c..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMetadataManager.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.feeds.FeedConnectionId;
-
-public interface IFeedMetadataManager {
-
-    /**
-     * @param feedConnectionId
-     *            connection id corresponding to the feed connection
-     * @param tuple
-     *            the erroneous tuple that raised an exception
-     * @param message
-     *            the message corresponding to the exception being raised
-     * @param feedManager
-     * @throws AsterixException
-     */
-    public void logTuple(FeedConnectionId feedConnectionId, String tuple, String message, IFeedManager feedManager)
-            throws AsterixException;
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMetricCollector.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMetricCollector.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMetricCollector.java
deleted file mode 100644
index b78d81e..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedMetricCollector.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedRuntimeId;
-
-public interface IFeedMetricCollector {
-
-    public enum ValueType {
-        CPU_USAGE,
-        INFLOW_RATE,
-        OUTFLOW_RATE
-    }
-
-    public enum MetricType {
-        AVG,
-        RATE
-    }
-
-    public boolean sendReport(int senderId, int value);
-
-    public int getMetric(int senderId);
-
-    public int getMetric(FeedConnectionId connectionId, FeedRuntimeId runtimeId, ValueType valueType);
-
-    int createReportSender(FeedConnectionId connectionId, FeedRuntimeId runtimeId, ValueType valueType,
-            MetricType metricType);
-
-    public void removeReportSender(int senderId);
-
-    public void resetReportSender(int senderId);
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedOperatorOutputSideHandler.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedOperatorOutputSideHandler.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedOperatorOutputSideHandler.java
deleted file mode 100644
index 5dec7e3..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedOperatorOutputSideHandler.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.hyracks.api.comm.IFrameWriter;
-
-public interface IFeedOperatorOutputSideHandler extends IFrameWriter {
-
-    public enum Type {
-        BASIC_FEED_OUTPUT_HANDLER,
-        DISTRIBUTE_FEED_OUTPUT_HANDLER,
-        COLLECT_TRANSFORM_FEED_OUTPUT_HANDLER
-    }
-
-    public FeedId getFeedId();
-
-    public Type getType();
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedProvider.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedProvider.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedProvider.java
deleted file mode 100644
index 7565004..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedProvider.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import org.apache.asterix.common.feeds.FeedId;
-
-public interface IFeedProvider {
-
-    public void subscribeFeed(FeedId sourceDeedId);
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedRuntime.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedRuntime.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedRuntime.java
deleted file mode 100644
index 4fcd631..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedRuntime.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import org.apache.asterix.common.feeds.FeedRuntimeId;
-import org.apache.asterix.common.feeds.FeedRuntimeInputHandler;
-import org.apache.hyracks.api.comm.IFrameWriter;
-
-public interface IFeedRuntime {
-
-    public enum FeedRuntimeType {
-        INTAKE,
-        COLLECT,
-        COMPUTE_COLLECT,
-        COMPUTE,
-        STORE,
-        OTHER,
-        ETS,
-        JOIN
-    }
-
-    public enum Mode {
-        PROCESS,
-        SPILL,
-        PROCESS_SPILL,
-        DISCARD,
-        POST_SPILL_DISCARD,
-        PROCESS_BACKLOG,
-        STALL,
-        FAIL,
-        END
-    }
-
-    /**
-     * @return the unique runtime id associated with the feedRuntime
-     */
-    public FeedRuntimeId getRuntimeId();
-
-    /**
-     * @return the frame writer associated with the feed runtime.
-     */
-    public IFrameWriter getFeedFrameWriter();
-
-    public FeedRuntimeInputHandler getInputHandler();
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedService.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedService.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedService.java
deleted file mode 100644
index ac51c95..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedService.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-public interface IFeedService {
-
-    public void start() throws Exception;
-
-    public void stop();
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedSubscriptionManager.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedSubscriptionManager.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedSubscriptionManager.java
deleted file mode 100644
index 91ac4c4..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedSubscriptionManager.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import org.apache.asterix.common.feeds.SubscribableFeedRuntimeId;
-
-public interface IFeedSubscriptionManager {
-
-    /**
-     * @param subscribableRuntime
-     */
-    public void registerFeedSubscribableRuntime(ISubscribableRuntime subscribableRuntime);
-
-    /**
-     * @param subscribableRuntimeId
-     */
-    public void deregisterFeedSubscribableRuntime(SubscribableFeedRuntimeId subscribableRuntimeId);
-
-    /**
-     * @param subscribableRuntimeId
-     * @return
-     */
-    public ISubscribableRuntime getSubscribableRuntime(SubscribableFeedRuntimeId subscribableRuntimeId);
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedTrackingManager.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedTrackingManager.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedTrackingManager.java
deleted file mode 100644
index 280c4d9..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedTrackingManager.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedTupleCommitAckMessage;
-
-public interface IFeedTrackingManager {
-
-    public void submitAckReport(FeedTupleCommitAckMessage ackMessage);
-
-    public void disableAcking(FeedConnectionId connectionId);
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedWork.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedWork.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedWork.java
deleted file mode 100644
index 06fcb76..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedWork.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-/**
- * Represents a feed management task. The task is executed asynchronously.
- */
-public interface IFeedWork {
-
-    public Runnable getRunnable();
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedWorkEventListener.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedWorkEventListener.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedWorkEventListener.java
deleted file mode 100644
index 59b46d1..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedWorkEventListener.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-/**
- * Provides a callback mechanism that in invoked for events related to
- * the execution of a feed management task.
- */
-public interface IFeedWorkEventListener {
-
-    /**
-     * A call back that is invoked after successful completion of a feed
-     * management task.
-     */
-    public void workCompleted(IFeedWork work);
-
-    /**
-     * A call back that is invokved after a failed execution of a feed
-     * management task.
-     * 
-     * @param e
-     *            exception encountered during execution of the task.
-     */
-    public void workFailed(IFeedWork work, Exception e);
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedWorkManager.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedWorkManager.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedWorkManager.java
deleted file mode 100644
index 31506ca..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedWorkManager.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-public interface IFeedWorkManager {
-
-    public void submitWork(IFeedWork work, IFeedWorkEventListener listener);
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFrameEventCallback.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFrameEventCallback.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFrameEventCallback.java
deleted file mode 100644
index 13a0af0..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFrameEventCallback.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-public interface IFrameEventCallback {
-
-    public enum FrameEvent {
-        FINISHED_PROCESSING,
-        PENDING_WORK_THRESHOLD_REACHED,
-        PENDING_WORK_DONE,
-        NO_OP,
-        FINISHED_PROCESSING_SPILLAGE
-    }
-
-    public void frameEvent(FrameEvent frameEvent);
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFramePostProcessor.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFramePostProcessor.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFramePostProcessor.java
deleted file mode 100644
index ed74037..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFramePostProcessor.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import java.nio.ByteBuffer;
-
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-
-public interface IFramePostProcessor {
-
-    public void postProcessFrame(ByteBuffer frame, FrameTupleAccessor frameAccessor);
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFramePreprocessor.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFramePreprocessor.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFramePreprocessor.java
deleted file mode 100644
index 59a6c97..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFramePreprocessor.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import java.nio.ByteBuffer;
-
-public interface IFramePreprocessor {
-
-    public void preProcess(ByteBuffer frame) throws Exception;
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IIntakeProgressTracker.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IIntakeProgressTracker.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IIntakeProgressTracker.java
deleted file mode 100644
index f2c9f63..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IIntakeProgressTracker.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import java.util.Map;
-
-public interface IIntakeProgressTracker {
-
-    public void configure(Map<String, String> configuration);
-
-    public void notifyIngestedTupleTimestamp(long timestamp);
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IMessageReceiver.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IMessageReceiver.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IMessageReceiver.java
deleted file mode 100644
index bc86e86..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IMessageReceiver.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-public interface IMessageReceiver<T> {
-
-    public void sendMessage(T message);
-
-    public void close(boolean processPending);
-
-    public void start();
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/ISubscribableRuntime.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/ISubscribableRuntime.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/ISubscribableRuntime.java
deleted file mode 100644
index 14b89f7..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/ISubscribableRuntime.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import java.util.List;
-
-import org.apache.asterix.common.feeds.CollectionRuntime;
-import org.apache.asterix.common.feeds.DistributeFeedFrameWriter;
-import org.apache.asterix.common.feeds.FeedPolicyAccessor;
-import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-
-/**
- * Represent a feed runtime whose output can be routed along other parallel path(s).
- */
-public interface ISubscribableRuntime extends IFeedRuntime {
-
-    /**
-     * @param collectionRuntime
-     * @throws Exception
-     */
-    public void subscribeFeed(FeedPolicyAccessor fpa, CollectionRuntime collectionRuntime) throws Exception;
-
-    /**
-     * @param collectionRuntime
-     * @throws Exception
-     */
-    public void unsubscribeFeed(CollectionRuntime collectionRuntime) throws Exception;
-
-    /**
-     * @return
-     * @throws Exception
-     */
-    public List<ISubscriberRuntime> getSubscribers();
-
-    /**
-     * @return
-     */
-    public DistributeFeedFrameWriter getFeedFrameWriter();
-
-    /**
-     * @return
-     */
-    public RecordDescriptor getRecordDescriptor();
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/ISubscriberRuntime.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/ISubscriberRuntime.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/ISubscriberRuntime.java
deleted file mode 100644
index 3b2157f..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/ISubscriberRuntime.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import java.util.Map;
-
-import org.apache.asterix.common.feeds.FeedFrameCollector;
-
-public interface ISubscriberRuntime {
-
-    public Map<String, String> getFeedPolicy();
-
-    public FeedFrameCollector getFrameCollector();
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/ISubscriptionProvider.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/ISubscriptionProvider.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/ISubscriptionProvider.java
deleted file mode 100644
index b9264b8..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/ISubscriptionProvider.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import org.apache.asterix.common.feeds.FeedId;
-
-public interface ISubscriptionProvider {
-
-    public void subscribeFeed(FeedId sourceFeedId, FeedId recipientFeedId);
-
-    public void unsubscribeFeed(FeedId sourceFeedId, FeedId recipientFeedId);
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/ITupleTrackingFeedAdapter.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/ITupleTrackingFeedAdapter.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/ITupleTrackingFeedAdapter.java
deleted file mode 100644
index 4067508..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/ITupleTrackingFeedAdapter.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-public interface ITupleTrackingFeedAdapter extends IDataSourceAdapter {
-
-    public void tuplePersistedTimeCallback(long timestamp);
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/EndFeedMessage.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/EndFeedMessage.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/EndFeedMessage.java
deleted file mode 100644
index 84db620..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/EndFeedMessage.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.message;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedConstants;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-
-/**
- * A feed control message indicating the need to end the feed. This message is dispatched
- * to all locations that host an operator involved in the feed pipeline.
- */
-public class EndFeedMessage extends FeedMessage {
-
-    private static final long serialVersionUID = 1L;
-
-    private final FeedId sourceFeedId;
-
-    private final FeedConnectionId connectionId;
-
-    private final FeedRuntimeType sourceRuntimeType;
-
-    private final boolean completeDisconnection;
-
-    private final EndMessageType endMessageType;
-
-    public enum EndMessageType {
-        DISCONNECT_FEED,
-        DISCONTINUE_SOURCE
-    }
-
-    public EndFeedMessage(FeedConnectionId connectionId, FeedRuntimeType sourceRuntimeType, FeedId sourceFeedId,
-            boolean completeDisconnection, EndMessageType endMessageType) {
-        super(MessageType.END);
-        this.connectionId = connectionId;
-        this.sourceRuntimeType = sourceRuntimeType;
-        this.sourceFeedId = sourceFeedId;
-        this.completeDisconnection = completeDisconnection;
-        this.endMessageType = endMessageType;
-    }
-
-    @Override
-    public String toString() {
-        return MessageType.END.name() + "  " + connectionId + " [" + sourceRuntimeType + "] ";
-    }
-
-    public FeedRuntimeType getSourceRuntimeType() {
-        return sourceRuntimeType;
-    }
-
-    public FeedId getSourceFeedId() {
-        return sourceFeedId;
-    }
-
-    public boolean isCompleteDisconnection() {
-        return completeDisconnection;
-    }
-
-    public EndMessageType getEndMessageType() {
-        return endMessageType;
-    }
-
-    @Override
-    public JSONObject toJSON() throws JSONException {
-        JSONObject obj = new JSONObject();
-        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
-        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
-        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
-        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
-        return obj;
-    }
-
-    public FeedConnectionId getFeedConnectionId() {
-        return connectionId;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/FeedCongestionMessage.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/FeedCongestionMessage.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/FeedCongestionMessage.java
deleted file mode 100644
index c9e0fbd..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/FeedCongestionMessage.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.message;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedConstants;
-import org.apache.asterix.common.feeds.FeedConstants.MessageConstants;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.FeedRuntimeId;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.Mode;
-
-public class FeedCongestionMessage extends FeedMessage {
-
-    private static final long serialVersionUID = 1L;
-
-    private final FeedConnectionId connectionId;
-    private final FeedRuntimeId runtimeId;
-    private int inflowRate;
-    private int outflowRate;
-    private Mode mode;
-
-    public FeedCongestionMessage(FeedConnectionId connectionId, FeedRuntimeId runtimeId, int inflowRate,
-            int outflowRate, Mode mode) {
-        super(MessageType.CONGESTION);
-        this.connectionId = connectionId;
-        this.runtimeId = runtimeId;
-        this.inflowRate = inflowRate;
-        this.outflowRate = outflowRate;
-        this.mode = mode;
-    }
-
-    @Override
-    public JSONObject toJSON() throws JSONException {
-        JSONObject obj = new JSONObject();
-        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
-        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
-        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
-        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
-        obj.put(FeedConstants.MessageConstants.RUNTIME_TYPE, runtimeId.getFeedRuntimeType());
-        obj.put(FeedConstants.MessageConstants.OPERAND_ID, runtimeId.getOperandId());
-        obj.put(FeedConstants.MessageConstants.PARTITION, runtimeId.getPartition());
-        obj.put(FeedConstants.MessageConstants.INFLOW_RATE, inflowRate);
-        obj.put(FeedConstants.MessageConstants.OUTFLOW_RATE, outflowRate);
-        obj.put(FeedConstants.MessageConstants.MODE, mode);
-        return obj;
-    }
-
-    public FeedRuntimeId getRuntimeId() {
-        return runtimeId;
-    }
-
-    public int getInflowRate() {
-        return inflowRate;
-    }
-
-    public int getOutflowRate() {
-        return outflowRate;
-    }
-
-    public static FeedCongestionMessage read(JSONObject obj) throws JSONException {
-        FeedId feedId = new FeedId(obj.getString(FeedConstants.MessageConstants.DATAVERSE),
-                obj.getString(FeedConstants.MessageConstants.FEED));
-        FeedConnectionId connectionId = new FeedConnectionId(feedId,
-                obj.getString(FeedConstants.MessageConstants.DATASET));
-        FeedRuntimeId runtimeId = new FeedRuntimeId(FeedRuntimeType.valueOf(obj
-                .getString(FeedConstants.MessageConstants.RUNTIME_TYPE)),
-                obj.getInt(FeedConstants.MessageConstants.PARTITION),
-                obj.getString(FeedConstants.MessageConstants.OPERAND_ID));
-        Mode mode = Mode.valueOf(obj.getString(MessageConstants.MODE));
-        return new FeedCongestionMessage(connectionId, runtimeId,
-                obj.getInt(FeedConstants.MessageConstants.INFLOW_RATE),
-                obj.getInt(FeedConstants.MessageConstants.OUTFLOW_RATE), mode);
-    }
-
-    public FeedConnectionId getConnectionId() {
-        return connectionId;
-    }
-
-    public Mode getMode() {
-        return mode;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/FeedMessage.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/FeedMessage.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/FeedMessage.java
deleted file mode 100644
index 11b1839..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/FeedMessage.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.message;
-
-import org.apache.asterix.common.feeds.api.IFeedMessage;
-import org.apache.hyracks.api.dataflow.value.JSONSerializable;
-
-/**
- * A control message that can be sent to the runtime instance of a
- * feed's adapter.
- */
-public abstract class FeedMessage implements IFeedMessage, JSONSerializable {
-
-    private static final long serialVersionUID = 1L;
-
-    protected final MessageType messageType;
-
-    public FeedMessage(MessageType messageType) {
-        this.messageType = messageType;
-    }
-
-    public MessageType getMessageType() {
-        return messageType;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/FeedReportMessage.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/FeedReportMessage.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/FeedReportMessage.java
deleted file mode 100644
index 1b14855..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/FeedReportMessage.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.asterix.common.feeds.message;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedConstants;
-import org.apache.asterix.common.feeds.FeedConstants.MessageConstants;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.FeedRuntimeId;
-import org.apache.asterix.common.feeds.api.IFeedMetricCollector.ValueType;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-
-public class FeedReportMessage extends FeedMessage {
-
-    private static final long serialVersionUID = 1L;
-
-    private final FeedConnectionId connectionId;
-    private final FeedRuntimeId runtimeId;
-    private final ValueType valueType;
-    private int value;
-
-    public FeedReportMessage(FeedConnectionId connectionId, FeedRuntimeId runtimeId, ValueType valueType, int value) {
-        super(MessageType.FEED_REPORT);
-        this.connectionId = connectionId;
-        this.runtimeId = runtimeId;
-        this.valueType = valueType;
-        this.value = value;
-    }
-
-    public void reset(int value) {
-        this.value = value;
-    }
-
-    @Override
-    public JSONObject toJSON() throws JSONException {
-        JSONObject obj = new JSONObject();
-        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
-        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
-        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
-        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
-        obj.put(FeedConstants.MessageConstants.RUNTIME_TYPE, runtimeId.getFeedRuntimeType());
-        obj.put(FeedConstants.MessageConstants.PARTITION, runtimeId.getPartition());
-        obj.put(FeedConstants.MessageConstants.VALUE_TYPE, valueType);
-        obj.put(FeedConstants.MessageConstants.VALUE, value);
-        return obj;
-    }
-
-    public static FeedReportMessage read(JSONObject obj) throws JSONException {
-        FeedId feedId = new FeedId(obj.getString(FeedConstants.MessageConstants.DATAVERSE),
-                obj.getString(FeedConstants.MessageConstants.FEED));
-        FeedConnectionId connectionId = new FeedConnectionId(feedId,
-                obj.getString(FeedConstants.MessageConstants.DATASET));
-        FeedRuntimeId runtimeId = new FeedRuntimeId(FeedRuntimeType.valueOf(obj
-                .getString(FeedConstants.MessageConstants.RUNTIME_TYPE)),
-                obj.getInt(FeedConstants.MessageConstants.PARTITION), FeedConstants.MessageConstants.NOT_APPLICABLE);
-        ValueType type = ValueType.valueOf(obj.getString(MessageConstants.VALUE_TYPE));
-        int value = Integer.parseInt(obj.getString(MessageConstants.VALUE));
-        return new FeedReportMessage(connectionId, runtimeId, type, value);
-    }
-
-    public int getValue() {
-        return value;
-    }
-
-    public void setValue(int value) {
-        this.value = value;
-    }
-
-    public FeedConnectionId getConnectionId() {
-        return connectionId;
-    }
-
-    public FeedRuntimeId getRuntimeId() {
-        return runtimeId;
-    }
-
-    public ValueType getValueType() {
-        return valueType;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/NodeReportMessage.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/NodeReportMessage.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/NodeReportMessage.java
deleted file mode 100644
index b45a966..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/NodeReportMessage.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.message;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import org.apache.asterix.common.feeds.FeedConstants;
-import org.apache.asterix.common.feeds.api.IFeedMessage;
-
-public class NodeReportMessage extends FeedMessage {
-
-    private static final long serialVersionUID = 1L;
-
-    private double cpuLoad;
-    private double usedHeap;
-    private int nRuntimes;
-
-    public NodeReportMessage(float cpuLoad, long usedHeap, int nRuntimes) {
-        super(IFeedMessage.MessageType.NODE_REPORT);
-        this.usedHeap = usedHeap;
-        this.cpuLoad = cpuLoad;
-        this.nRuntimes = nRuntimes;
-    }
-
-    public void reset(double cpuLoad, double usedHeap, int nRuntimes) {
-        this.cpuLoad = cpuLoad;
-        this.usedHeap = usedHeap;
-        this.nRuntimes = nRuntimes;
-    }
-
-    @Override
-    public JSONObject toJSON() throws JSONException {
-        JSONObject obj = new JSONObject();
-        obj.put(FeedConstants.MessageConstants.CPU_LOAD, cpuLoad);
-        obj.put(FeedConstants.MessageConstants.HEAP_USAGE, usedHeap);
-        obj.put(FeedConstants.MessageConstants.N_RUNTIMES, nRuntimes);
-        return obj;
-    }
-
-    public double getCpuLoad() {
-        return cpuLoad;
-    }
-
-    public double getUsedHeap() {
-        return usedHeap;
-    }
-
-    public int getnRuntimes() {
-        return nRuntimes;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/ScaleInReportMessage.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/ScaleInReportMessage.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/ScaleInReportMessage.java
deleted file mode 100644
index 6204704..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/ScaleInReportMessage.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.message;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedConstants;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-
-/**
- * A feed control message indicating the need to scale in a stage of the feed ingestion pipeline.
- * Currently, scaling-in of the compute stage is supported.
- **/
-public class ScaleInReportMessage extends FeedMessage {
-
-    private static final long serialVersionUID = 1L;
-
-    private final FeedConnectionId connectionId;
-
-    private final FeedRuntimeType runtimeType;
-
-    private int currentCardinality;
-
-    private int reducedCardinaliy;
-
-    public ScaleInReportMessage(FeedConnectionId connectionId, FeedRuntimeType runtimeType, int currentCardinality,
-            int reducedCardinaliy) {
-        super(MessageType.SCALE_IN_REQUEST);
-        this.connectionId = connectionId;
-        this.runtimeType = runtimeType;
-        this.currentCardinality = currentCardinality;
-        this.reducedCardinaliy = reducedCardinaliy;
-    }
-
-    @Override
-    public String toString() {
-        return MessageType.SCALE_IN_REQUEST.name() + "  " + connectionId + " [" + runtimeType + "] "
-                + " currentCardinality " + currentCardinality + " reducedCardinality " + reducedCardinaliy;
-    }
-
-    public FeedRuntimeType getRuntimeType() {
-        return runtimeType;
-    }
-
-    @Override
-    public JSONObject toJSON() throws JSONException {
-        JSONObject obj = new JSONObject();
-        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
-        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
-        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
-        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
-        obj.put(FeedConstants.MessageConstants.RUNTIME_TYPE, runtimeType);
-        obj.put(FeedConstants.MessageConstants.CURRENT_CARDINALITY, currentCardinality);
-        obj.put(FeedConstants.MessageConstants.REDUCED_CARDINALITY, reducedCardinaliy);
-        return obj;
-    }
-
-    public FeedConnectionId getConnectionId() {
-        return connectionId;
-    }
-
-    public static ScaleInReportMessage read(JSONObject obj) throws JSONException {
-        FeedId feedId = new FeedId(obj.getString(FeedConstants.MessageConstants.DATAVERSE),
-                obj.getString(FeedConstants.MessageConstants.FEED));
-        FeedConnectionId connectionId = new FeedConnectionId(feedId,
-                obj.getString(FeedConstants.MessageConstants.DATASET));
-        FeedRuntimeType runtimeType = FeedRuntimeType.valueOf(obj
-                .getString(FeedConstants.MessageConstants.RUNTIME_TYPE));
-        return new ScaleInReportMessage(connectionId, runtimeType,
-                obj.getInt(FeedConstants.MessageConstants.CURRENT_CARDINALITY),
-                obj.getInt(FeedConstants.MessageConstants.REDUCED_CARDINALITY));
-    }
-
-    public void reset(int currentCardinality, int reducedCardinaliy) {
-        this.currentCardinality = currentCardinality;
-        this.reducedCardinaliy = reducedCardinaliy;
-    }
-
-    public int getCurrentCardinality() {
-        return currentCardinality;
-    }
-
-    public void setCurrentCardinality(int currentCardinality) {
-        this.currentCardinality = currentCardinality;
-    }
-
-    public int getReducedCardinaliy() {
-        return reducedCardinaliy;
-    }
-
-    public void setReducedCardinaliy(int reducedCardinaliy) {
-        this.reducedCardinaliy = reducedCardinaliy;
-    }
-
-}



[13/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/RemoteSocketMessageListener.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/RemoteSocketMessageListener.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/RemoteSocketMessageListener.java
new file mode 100644
index 0000000..0749f82
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/RemoteSocketMessageListener.java
@@ -0,0 +1,134 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.message;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.Socket;
+import java.nio.CharBuffer;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+public class RemoteSocketMessageListener {
+
+    private static final Logger LOGGER = Logger.getLogger(RemoteSocketMessageListener.class.getName());
+
+    private final String host;
+    private final int port;
+    private final LinkedBlockingQueue<String> outbox;
+    private final ExecutorService executorService = Executors.newFixedThreadPool(10);
+
+    private RemoteMessageListenerServer listenerServer;
+
+    public RemoteSocketMessageListener(String host, int port, LinkedBlockingQueue<String> outbox) {
+        this.host = host;
+        this.port = port;
+        this.outbox = outbox;
+    }
+
+    public void stop() {
+        if (!executorService.isShutdown()) {
+            executorService.shutdownNow();
+        }
+        listenerServer.stop();
+
+    }
+
+    public void start() throws IOException {
+        listenerServer = new RemoteMessageListenerServer(host, port, outbox);
+        executorService.execute(listenerServer);
+    }
+
+    private static class RemoteMessageListenerServer implements Runnable {
+
+        private final String host;
+        private final int port;
+        private final LinkedBlockingQueue<String> outbox;
+        private Socket client;
+
+        public RemoteMessageListenerServer(String host, int port, LinkedBlockingQueue<String> outbox) {
+            this.host = host;
+            this.port = port;
+            this.outbox = outbox;
+        }
+
+        public void stop() {
+            try {
+                client.close();
+            } catch (IOException e) {
+                e.printStackTrace();
+            }
+        }
+
+        @Override
+        public void run() {
+            char EOL = (char) "\n".getBytes()[0];
+            Socket client = null;
+            try {
+                client = new Socket(host, port);
+                InputStream in = client.getInputStream();
+                CharBuffer buffer = CharBuffer.allocate(5000);
+                char ch;
+                while (true) {
+                    ch = (char) in.read();
+                    if ((ch) == -1) {
+                        break;
+                    }
+                    while (ch != EOL) {
+                        buffer.put(ch);
+                        ch = (char) in.read();
+                    }
+                    buffer.flip();
+                    String s = new String(buffer.array());
+                    synchronized (outbox) {
+                        outbox.add(s + "\n");
+                    }
+                    buffer.position(0);
+                    buffer.limit(5000);
+                }
+
+            } catch (Exception e) {
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("Unable to start Remote Message listener" + client);
+                }
+            } finally {
+                if (client != null && !client.isClosed()) {
+                    try {
+                        client.close();
+                    } catch (Exception e) {
+                        e.printStackTrace();
+                    }
+                }
+            }
+        }
+    }
+
+    public static interface IMessageAnalyzer {
+
+        /**
+         * @return
+         */
+        public LinkedBlockingQueue<String> getMessageQueue();
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/ScaleInReportMessage.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/ScaleInReportMessage.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/ScaleInReportMessage.java
new file mode 100644
index 0000000..62aba04
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/ScaleInReportMessage.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.message;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.util.FeedConstants;
+
+/**
+ * A feed control message indicating the need to scale in a stage of the feed ingestion pipeline.
+ * Currently, scaling-in of the compute stage is supported.
+ **/
+public class ScaleInReportMessage extends FeedMessage {
+
+    private static final long serialVersionUID = 1L;
+
+    private final FeedConnectionId connectionId;
+
+    private final FeedRuntimeType runtimeType;
+
+    private int currentCardinality;
+
+    private int reducedCardinaliy;
+
+    public ScaleInReportMessage(FeedConnectionId connectionId, FeedRuntimeType runtimeType, int currentCardinality,
+            int reducedCardinaliy) {
+        super(MessageType.SCALE_IN_REQUEST);
+        this.connectionId = connectionId;
+        this.runtimeType = runtimeType;
+        this.currentCardinality = currentCardinality;
+        this.reducedCardinaliy = reducedCardinaliy;
+    }
+
+    @Override
+    public String toString() {
+        return MessageType.SCALE_IN_REQUEST.name() + "  " + connectionId + " [" + runtimeType + "] "
+                + " currentCardinality " + currentCardinality + " reducedCardinality " + reducedCardinaliy;
+    }
+
+    public FeedRuntimeType getRuntimeType() {
+        return runtimeType;
+    }
+
+    @Override
+    public JSONObject toJSON() throws JSONException {
+        JSONObject obj = new JSONObject();
+        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
+        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
+        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
+        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
+        obj.put(FeedConstants.MessageConstants.RUNTIME_TYPE, runtimeType);
+        obj.put(FeedConstants.MessageConstants.CURRENT_CARDINALITY, currentCardinality);
+        obj.put(FeedConstants.MessageConstants.REDUCED_CARDINALITY, reducedCardinaliy);
+        return obj;
+    }
+
+    public FeedConnectionId getConnectionId() {
+        return connectionId;
+    }
+
+    public static ScaleInReportMessage read(JSONObject obj) throws JSONException {
+        FeedId feedId = new FeedId(obj.getString(FeedConstants.MessageConstants.DATAVERSE),
+                obj.getString(FeedConstants.MessageConstants.FEED));
+        FeedConnectionId connectionId = new FeedConnectionId(feedId,
+                obj.getString(FeedConstants.MessageConstants.DATASET));
+        FeedRuntimeType runtimeType = FeedRuntimeType.valueOf(obj
+                .getString(FeedConstants.MessageConstants.RUNTIME_TYPE));
+        return new ScaleInReportMessage(connectionId, runtimeType,
+                obj.getInt(FeedConstants.MessageConstants.CURRENT_CARDINALITY),
+                obj.getInt(FeedConstants.MessageConstants.REDUCED_CARDINALITY));
+    }
+
+    public void reset(int currentCardinality, int reducedCardinaliy) {
+        this.currentCardinality = currentCardinality;
+        this.reducedCardinaliy = reducedCardinaliy;
+    }
+
+    public int getCurrentCardinality() {
+        return currentCardinality;
+    }
+
+    public void setCurrentCardinality(int currentCardinality) {
+        this.currentCardinality = currentCardinality;
+    }
+
+    public int getReducedCardinaliy() {
+        return reducedCardinaliy;
+    }
+
+    public void setReducedCardinaliy(int reducedCardinaliy) {
+        this.reducedCardinaliy = reducedCardinaliy;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/SocketMessageListener.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/SocketMessageListener.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/SocketMessageListener.java
new file mode 100644
index 0000000..a1a0ad5
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/SocketMessageListener.java
@@ -0,0 +1,160 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.message;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.ServerSocket;
+import java.net.Socket;
+import java.nio.CharBuffer;
+import java.util.concurrent.Executor;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.external.feed.api.IMessageReceiver;
+
+/**
+ * Listens for messages at a configured port and redirects them to a
+ * an instance of {@code IMessageReceiver}.
+ * Messages may arrive in parallel from multiple senders. Each sender is handled by
+ * a respective instance of {@code ClientHandler}.
+ */
+public class SocketMessageListener {
+
+    private static final Logger LOGGER = Logger.getLogger(SocketMessageListener.class.getName());
+
+    private final IMessageReceiver<String> messageReceiver;
+    private final MessageListenerServer listenerServer;
+
+    private ExecutorService executorService = Executors.newFixedThreadPool(10);
+
+    public SocketMessageListener(int port, IMessageReceiver<String> messageReceiver) {
+        this.messageReceiver = messageReceiver;
+        this.listenerServer = new MessageListenerServer(port, messageReceiver);
+    }
+
+    public void stop() throws IOException {
+        listenerServer.stop();
+        messageReceiver.close(false);
+        if (!executorService.isShutdown()) {
+            executorService.shutdownNow();
+        }
+    }
+
+    public void start() {
+        messageReceiver.start();
+        executorService.execute(listenerServer);
+    }
+
+    private static class MessageListenerServer implements Runnable {
+
+        private final int port;
+        private final IMessageReceiver<String> messageReceiver;
+        private ServerSocket server;
+        private final Executor executor;
+
+        public MessageListenerServer(int port, IMessageReceiver<String> messageReceiver) {
+            this.port = port;
+            this.messageReceiver = messageReceiver;
+            this.executor = Executors.newCachedThreadPool();
+        }
+
+        public void stop() throws IOException {
+            server.close();
+        }
+
+        @Override
+        public void run() {
+            Socket client = null;
+            try {
+                server = new ServerSocket(port);
+                while (true) {
+                    client = server.accept();
+                    ClientHandler handler = new ClientHandler(client, messageReceiver);
+                    executor.execute(handler);
+                }
+            } catch (Exception e) {
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("Unable to start Message listener" + server);
+                }
+            } finally {
+                if (server != null) {
+                    try {
+                        server.close();
+                    } catch (Exception e) {
+                        e.printStackTrace();
+                    }
+                }
+            }
+        }
+
+        private static class ClientHandler implements Runnable {
+
+            private static final char EOL = (char) "\n".getBytes()[0];
+
+            private final Socket client;
+            private final IMessageReceiver<String> messageReceiver;
+
+            public ClientHandler(Socket client, IMessageReceiver<String> messageReceiver) {
+                this.client = client;
+                this.messageReceiver = messageReceiver;
+            }
+
+            @Override
+            public void run() {
+                try {
+                    InputStream in = client.getInputStream();
+                    CharBuffer buffer = CharBuffer.allocate(5000);
+                    char ch;
+                    while (true) {
+                        ch = (char) in.read();
+                        if ((ch) == -1) {
+                            break;
+                        }
+                        while (ch != EOL) {
+                            buffer.put(ch);
+                            ch = (char) in.read();
+                        }
+                        buffer.flip();
+                        String s = new String(buffer.array(), 0, buffer.limit());
+                        messageReceiver.sendMessage(s + "\n");
+                        buffer.position(0);
+                        buffer.limit(5000);
+                    }
+                } catch (Exception e) {
+                    e.printStackTrace();
+                    if (LOGGER.isLoggable(Level.WARNING)) {
+                        LOGGER.warning("Unable to process mesages from client" + client);
+                    }
+                } finally {
+                    if (client != null) {
+                        try {
+                            client.close();
+                        } catch (Exception e) {
+                            e.printStackTrace();
+                        }
+                    }
+                }
+            }
+        }
+
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/StorageReportFeedMessage.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/StorageReportFeedMessage.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/StorageReportFeedMessage.java
new file mode 100644
index 0000000..68ce74d
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/StorageReportFeedMessage.java
@@ -0,0 +1,128 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.message;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.util.FeedConstants;
+import org.apache.asterix.external.util.FeedConstants.MessageConstants;
+
+/**
+ * A feed control message sent from a storage runtime of a feed pipeline to report the intake timestamp corresponding
+ * to the last persisted tuple.
+ */
+public class StorageReportFeedMessage extends FeedMessage {
+
+    private static final long serialVersionUID = 1L;
+
+    private final FeedConnectionId connectionId;
+    private final int partition;
+    private long lastPersistedTupleIntakeTimestamp;
+    private boolean persistenceDelayWithinLimit;
+    private long averageDelay;
+    private int intakePartition;
+
+    public StorageReportFeedMessage(FeedConnectionId connectionId, int partition,
+            long lastPersistedTupleIntakeTimestamp, boolean persistenceDelayWithinLimit, long averageDelay,
+            int intakePartition) {
+        super(MessageType.STORAGE_REPORT);
+        this.connectionId = connectionId;
+        this.partition = partition;
+        this.lastPersistedTupleIntakeTimestamp = lastPersistedTupleIntakeTimestamp;
+        this.persistenceDelayWithinLimit = persistenceDelayWithinLimit;
+        this.averageDelay = averageDelay;
+        this.intakePartition = intakePartition;
+    }
+
+    @Override
+    public String toString() {
+        return messageType.name() + " " + connectionId + " [" + lastPersistedTupleIntakeTimestamp + "] ";
+    }
+
+    public FeedConnectionId getConnectionId() {
+        return connectionId;
+    }
+
+    public long getLastPersistedTupleIntakeTimestamp() {
+        return lastPersistedTupleIntakeTimestamp;
+    }
+
+    public int getPartition() {
+        return partition;
+    }
+
+    public boolean isPersistenceDelayWithinLimit() {
+        return persistenceDelayWithinLimit;
+    }
+
+    public void setPersistenceDelayWithinLimit(boolean persistenceDelayWithinLimit) {
+        this.persistenceDelayWithinLimit = persistenceDelayWithinLimit;
+    }
+
+    public long getAverageDelay() {
+        return averageDelay;
+    }
+
+    public void setAverageDelay(long averageDelay) {
+        this.averageDelay = averageDelay;
+    }
+
+    public int getIntakePartition() {
+        return intakePartition;
+    }
+
+    @Override
+    public JSONObject toJSON() throws JSONException {
+        JSONObject obj = new JSONObject();
+        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
+        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
+        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
+        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
+        obj.put(FeedConstants.MessageConstants.LAST_PERSISTED_TUPLE_INTAKE_TIMESTAMP, lastPersistedTupleIntakeTimestamp);
+        obj.put(MessageConstants.PERSISTENCE_DELAY_WITHIN_LIMIT, persistenceDelayWithinLimit);
+        obj.put(MessageConstants.AVERAGE_PERSISTENCE_DELAY, averageDelay);
+        obj.put(FeedConstants.MessageConstants.PARTITION, partition);
+        obj.put(FeedConstants.MessageConstants.INTAKE_PARTITION, intakePartition);
+
+        return obj;
+    }
+
+    public static StorageReportFeedMessage read(JSONObject obj) throws JSONException {
+        FeedId feedId = new FeedId(obj.getString(FeedConstants.MessageConstants.DATAVERSE),
+                obj.getString(FeedConstants.MessageConstants.FEED));
+        FeedConnectionId connectionId = new FeedConnectionId(feedId,
+                obj.getString(FeedConstants.MessageConstants.DATASET));
+        int partition = obj.getInt(FeedConstants.MessageConstants.PARTITION);
+        long timestamp = obj.getLong(FeedConstants.MessageConstants.LAST_PERSISTED_TUPLE_INTAKE_TIMESTAMP);
+        boolean persistenceDelayWithinLimit = obj.getBoolean(MessageConstants.PERSISTENCE_DELAY_WITHIN_LIMIT);
+        long averageDelay = obj.getLong(MessageConstants.AVERAGE_PERSISTENCE_DELAY);
+        int intakePartition = obj.getInt(MessageConstants.INTAKE_PARTITION);
+        return new StorageReportFeedMessage(connectionId, partition, timestamp, persistenceDelayWithinLimit,
+                averageDelay, intakePartition);
+    }
+
+    public void reset(long lastPersistedTupleIntakeTimestamp, boolean delayWithinLimit, long averageDelay) {
+        this.lastPersistedTupleIntakeTimestamp = lastPersistedTupleIntakeTimestamp;
+        this.persistenceDelayWithinLimit = delayWithinLimit;
+        this.averageDelay = averageDelay;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/TerminateDataFlowMessage.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/TerminateDataFlowMessage.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/TerminateDataFlowMessage.java
new file mode 100644
index 0000000..ab77840
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/TerminateDataFlowMessage.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.external.feed.message;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.util.FeedConstants;
+
+public class TerminateDataFlowMessage extends FeedMessage {
+
+    private static final long serialVersionUID = 1L;
+
+    private final FeedConnectionId connectionId;
+
+    public TerminateDataFlowMessage(FeedConnectionId connectionId) {
+        super(MessageType.TERMINATE_FLOW);
+        this.connectionId = connectionId;
+    }
+
+    @Override
+    public JSONObject toJSON() throws JSONException {
+        JSONObject obj = new JSONObject();
+        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
+        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
+        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
+        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
+        return obj;
+    }
+
+    public FeedConnectionId getConnectionId() {
+        return connectionId;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/ThrottlingEnabledFeedMessage.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/ThrottlingEnabledFeedMessage.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/ThrottlingEnabledFeedMessage.java
new file mode 100644
index 0000000..0459310
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/ThrottlingEnabledFeedMessage.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.message;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+import org.apache.asterix.external.util.FeedConstants;
+
+/**
+ * A feed control message indicating the need to end the feed. This message is dispatched
+ * to all locations that host an operator involved in the feed pipeline.
+ */
+public class ThrottlingEnabledFeedMessage extends FeedMessage {
+
+    private static final long serialVersionUID = 1L;
+
+    private final FeedConnectionId connectionId;
+
+    private final FeedRuntimeId runtimeId;
+
+    public ThrottlingEnabledFeedMessage(FeedConnectionId connectionId, FeedRuntimeId runtimeId) {
+        super(MessageType.THROTTLING_ENABLED);
+        this.connectionId = connectionId;
+        this.runtimeId = runtimeId;
+    }
+
+    @Override
+    public String toString() {
+        return MessageType.END.name() + "  " + connectionId + " [" + runtimeId + "] ";
+    }
+
+    @Override
+    public JSONObject toJSON() throws JSONException {
+        JSONObject obj = new JSONObject();
+        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
+        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
+        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
+        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
+        obj.put(FeedConstants.MessageConstants.RUNTIME_TYPE, runtimeId.getFeedRuntimeType());
+        obj.put(FeedConstants.MessageConstants.OPERAND_ID, runtimeId.getOperandId());
+        obj.put(FeedConstants.MessageConstants.PARTITION, runtimeId.getPartition());
+        return obj;
+    }
+
+    public FeedConnectionId getConnectionId() {
+        return connectionId;
+    }
+
+    public FeedRuntimeId getFeedRuntimeId() {
+        return runtimeId;
+    }
+
+    public static ThrottlingEnabledFeedMessage read(JSONObject obj) throws JSONException {
+        FeedId feedId = new FeedId(obj.getString(FeedConstants.MessageConstants.DATAVERSE),
+                obj.getString(FeedConstants.MessageConstants.FEED));
+        FeedConnectionId connectionId = new FeedConnectionId(feedId,
+                obj.getString(FeedConstants.MessageConstants.DATASET));
+        FeedRuntimeId runtimeId = new FeedRuntimeId(FeedRuntimeType.valueOf(obj
+                .getString(FeedConstants.MessageConstants.RUNTIME_TYPE)),
+                obj.getInt(FeedConstants.MessageConstants.PARTITION),
+                obj.getString(FeedConstants.MessageConstants.OPERAND_ID));
+        return new ThrottlingEnabledFeedMessage(connectionId, runtimeId);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/XAQLFeedMessage.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/XAQLFeedMessage.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/XAQLFeedMessage.java
new file mode 100644
index 0000000..cef3fa9
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/XAQLFeedMessage.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.message;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.util.FeedConstants;
+
+/**
+ * A feed control message indicating the need to execute a give AQL.
+ */
+public class XAQLFeedMessage extends FeedMessage {
+
+    private static final long serialVersionUID = 1L;
+
+    private final String aql;
+    private final FeedConnectionId connectionId;
+
+    public XAQLFeedMessage(FeedConnectionId connectionId, String aql) {
+        super(MessageType.XAQL);
+        this.connectionId = connectionId;
+        this.aql = aql;
+    }
+
+    @Override
+    public String toString() {
+        return messageType.name() + " " + connectionId + " [" + aql + "] ";
+    }
+
+    public FeedConnectionId getConnectionId() {
+        return connectionId;
+    }
+
+    public String getAql() {
+        return aql;
+    }
+
+    @Override
+    public JSONObject toJSON() throws JSONException {
+        JSONObject obj = new JSONObject();
+        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
+        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
+        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
+        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
+        obj.put(FeedConstants.MessageConstants.AQL, aql);
+        return obj;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/policy/FeedPolicy.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/policy/FeedPolicy.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/policy/FeedPolicy.java
new file mode 100644
index 0000000..da5907c
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/policy/FeedPolicy.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.policy;
+
+import java.io.Serializable;
+import java.util.Map;
+
+public class FeedPolicy implements Serializable {
+
+    private static final long serialVersionUID = 1L;
+    private final String dataverseName;
+    // Enforced to be unique within a dataverse.
+    private final String policyName;
+    // A description of the policy
+    private final String description;
+    // The policy properties associated with the feed dataset
+    private Map<String, String> properties;
+
+    public FeedPolicy(String dataverseName, String policyName, String description, Map<String, String> properties) {
+        this.dataverseName = dataverseName;
+        this.policyName = policyName;
+        this.description = description;
+        this.properties = properties;
+    }
+
+    public String getDataverseName() {
+        return dataverseName;
+    }
+
+    public String getPolicyName() {
+        return policyName;
+    }
+
+    @Override
+    public boolean equals(Object other) {
+        if (this == other) {
+            return true;
+        }
+        if (!(other instanceof FeedPolicy)) {
+            return false;
+        }
+        FeedPolicy otherPolicy = (FeedPolicy) other;
+        if (!otherPolicy.dataverseName.equals(dataverseName)) {
+            return false;
+        }
+        if (!otherPolicy.policyName.equals(policyName)) {
+            return false;
+        }
+        return true;
+    }
+
+    public String getDescription() {
+        return description;
+    }
+
+    public Map<String, String> getProperties() {
+        return properties;
+    }
+
+    public void setProperties(Map<String, String> properties) {
+        this.properties = properties;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/policy/FeedPolicyAccessor.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/policy/FeedPolicyAccessor.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/policy/FeedPolicyAccessor.java
new file mode 100644
index 0000000..077a58d
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/policy/FeedPolicyAccessor.java
@@ -0,0 +1,185 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.policy;
+
+import java.io.Serializable;
+import java.util.Map;
+
+/**
+ * A utility class to access the configuration parameters of a feed ingestion policy.
+ */
+public class FeedPolicyAccessor implements Serializable {
+
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * --------------------------
+     * failure configuration
+     * --------------------------
+     **/
+
+    /** continue feed ingestion after a soft (runtime) failure **/
+    public static final String SOFT_FAILURE_CONTINUE = "soft.failure.continue";
+
+    /** log failed tuple to an asterixdb dataset for future reference **/
+    public static final String SOFT_FAILURE_LOG_DATA = "soft.failure.log.data";
+
+    /** continue feed ingestion after loss of one or more machines (hardware failure) **/
+    public static final String HARDWARE_FAILURE_CONTINUE = "hardware.failure.continue";
+
+    /** auto-start a loser feed when the asterixdb instance is restarted **/
+    public static final String CLUSTER_REBOOT_AUTO_RESTART = "cluster.reboot.auto.restart";
+
+    /** framework provides guarantee that each received feed record will be processed through the ingestion pipeline at least once **/
+    public static final String AT_LEAST_ONE_SEMANTICS = "atleast.once.semantics";
+
+    /**
+     * --------------------------
+     * flow control configuration
+     * --------------------------
+     **/
+
+    /** enable buffering in feeds **/
+    public static final String BUFFERING_ENABLED = "buffering.enabled";
+
+    /** spill excess tuples to disk if an operator cannot process incoming data at its arrival rate **/
+    public static final String SPILL_TO_DISK_ON_CONGESTION = "spill.to.disk.on.congestion";
+
+    /** the maximum size of data (tuples) that can be spilled to disk **/
+    public static final String MAX_SPILL_SIZE_ON_DISK = "max.spill.size.on.disk";
+
+    /** discard tuples altogether if an operator cannot process incoming data at its arrival rate **/
+    public static final String DISCARD_ON_CONGESTION = "discard.on.congestion";
+
+    /** maximum fraction of ingested data that can be discarded **/
+    public static final String MAX_FRACTION_DISCARD = "max.fraction.discard";
+
+    /** maximum end-to-end delay/latency in persisting a tuple through the feed ingestion pipeline **/
+    public static final String MAX_DELAY_RECORD_PERSISTENCE = "max.delay.record.persistence";
+
+    /** rate limit the inflow of tuples in accordance with the maximum capacity of the pipeline **/
+    public static final String THROTTLING_ENABLED = "throttling.enabled";
+
+    /** elasticity **/
+    public static final String ELASTIC = "elastic";
+
+    /** statistics **/
+    public static final String TIME_TRACKING = "time.tracking";
+
+    /** logging of statistics **/
+    public static final String LOGGING_STATISTICS = "logging.statistics";
+
+    public static final long NO_LIMIT = -1;
+
+    private Map<String, String> feedPolicy;
+
+    public Map<String, String> getFeedPolicy() {
+        return feedPolicy;
+    }
+
+    public FeedPolicyAccessor(Map<String, String> feedPolicy) {
+        this.feedPolicy = feedPolicy;
+    }
+
+    public void reset(Map<String, String> feedPolicy) {
+        this.feedPolicy = feedPolicy;
+    }
+
+    /** Failure recover/reporting **/
+
+    public boolean logDataOnSoftFailure() {
+        return getBooleanPropertyValue(SOFT_FAILURE_LOG_DATA, false);
+    }
+
+    public boolean continueOnSoftFailure() {
+        return getBooleanPropertyValue(SOFT_FAILURE_CONTINUE, false);
+    }
+
+    public boolean continueOnHardwareFailure() {
+        return getBooleanPropertyValue(HARDWARE_FAILURE_CONTINUE, false);
+    }
+
+    public boolean autoRestartOnClusterReboot() {
+        return getBooleanPropertyValue(CLUSTER_REBOOT_AUTO_RESTART, false);
+    }
+
+    public boolean atleastOnceSemantics() {
+        return getBooleanPropertyValue(AT_LEAST_ONE_SEMANTICS, false);
+    }
+
+    /** flow control **/
+    public boolean bufferingEnabled() {
+        return getBooleanPropertyValue(BUFFERING_ENABLED, false);
+    }
+
+    public boolean spillToDiskOnCongestion() {
+        return getBooleanPropertyValue(SPILL_TO_DISK_ON_CONGESTION, false);
+    }
+
+    public boolean discardOnCongestion() {
+        return getMaxFractionDiscard() > 0;
+    }
+
+    public boolean throttlingEnabled() {
+        return getBooleanPropertyValue(THROTTLING_ENABLED, false);
+    }
+
+    public long getMaxSpillOnDisk() {
+        return getLongPropertyValue(MAX_SPILL_SIZE_ON_DISK, NO_LIMIT);
+    }
+
+    public float getMaxFractionDiscard() {
+        return getFloatPropertyValue(MAX_FRACTION_DISCARD, 0);
+    }
+
+    public long getMaxDelayRecordPersistence() {
+        return getLongPropertyValue(MAX_DELAY_RECORD_PERSISTENCE, Long.MAX_VALUE);
+    }
+
+    /** Elasticity **/
+    public boolean isElastic() {
+        return getBooleanPropertyValue(ELASTIC, false);
+    }
+
+    /** Statistics **/
+    public boolean isTimeTrackingEnabled() {
+        return getBooleanPropertyValue(TIME_TRACKING, false);
+    }
+
+    /** Logging of statistics **/
+    public boolean isLoggingStatisticsEnabled() {
+        return getBooleanPropertyValue(LOGGING_STATISTICS, false);
+    }
+
+    private boolean getBooleanPropertyValue(String key, boolean defValue) {
+        String v = feedPolicy.get(key);
+        return v == null ? false : Boolean.valueOf(v);
+    }
+
+    private long getLongPropertyValue(String key, long defValue) {
+        String v = feedPolicy.get(key);
+        return v != null ? Long.parseLong(v) : defValue;
+    }
+
+    private float getFloatPropertyValue(String key, float defValue) {
+        String v = feedPolicy.get(key);
+        return v != null ? Float.parseFloat(v) : defValue;
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/policy/FeedPolicyEnforcer.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/policy/FeedPolicyEnforcer.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/policy/FeedPolicyEnforcer.java
new file mode 100644
index 0000000..e0944ad
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/policy/FeedPolicyEnforcer.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.policy;
+
+import java.rmi.RemoteException;
+import java.util.Map;
+
+import org.apache.asterix.common.exceptions.ACIDException;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+
+public class FeedPolicyEnforcer {
+
+    private final FeedConnectionId connectionId;
+    private final FeedPolicyAccessor policyAccessor;
+
+    public FeedPolicyEnforcer(FeedConnectionId feedConnectionId, Map<String, String> feedPolicy) {
+        this.connectionId = feedConnectionId;
+        this.policyAccessor = new FeedPolicyAccessor(feedPolicy);
+    }
+
+    public boolean continueIngestionPostSoftwareFailure(Exception e) throws RemoteException, ACIDException {
+        return policyAccessor.continueOnSoftFailure();
+    }
+
+    public FeedPolicyAccessor getFeedPolicyAccessor() {
+        return policyAccessor;
+    }
+
+    public FeedConnectionId getFeedId() {
+        return connectionId;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/AdapterExecutor.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/AdapterExecutor.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/AdapterExecutor.java
new file mode 100644
index 0000000..3ac28c9
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/AdapterExecutor.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.runtime;
+
+import org.apache.asterix.external.api.IAdapterRuntimeManager;
+import org.apache.asterix.external.api.IAdapterRuntimeManager.State;
+import org.apache.asterix.external.api.IFeedAdapter;
+import org.apache.asterix.external.feed.dataflow.DistributeFeedFrameWriter;
+import org.apache.asterix.external.util.ExternalDataExceptionUtils;
+import org.apache.log4j.Logger;
+
+/**
+ * The class in charge of executing feed adapters.
+ */
+public class AdapterExecutor implements Runnable {
+
+    private static final Logger LOGGER = Logger.getLogger(AdapterExecutor.class.getName());
+
+    private final DistributeFeedFrameWriter writer;     // A writer that sends frames to multiple receivers (that can
+                                                        // increase or decrease at any time)
+    private final IFeedAdapter adapter;                 // The adapter
+    private final IAdapterRuntimeManager adapterManager;// The runtime manager <-- two way visibility -->
+
+    public AdapterExecutor(int partition, DistributeFeedFrameWriter writer, IFeedAdapter adapter,
+            IAdapterRuntimeManager adapterManager) {
+        this.writer = writer;
+        this.adapter = adapter;
+        this.adapterManager = adapterManager;
+    }
+
+    @Override
+    public void run() {
+        // Start by getting the partition number from the manager
+        int partition = adapterManager.getPartition();
+        if (LOGGER.isInfoEnabled()) {
+            LOGGER.info("Starting ingestion for partition:" + partition);
+        }
+        boolean continueIngestion = true;
+        boolean failedIngestion = false;
+        while (continueIngestion) {
+            try {
+                // Start the adapter
+                adapter.start(partition, writer);
+                // Adapter has completed execution
+                continueIngestion = false;
+            } catch (Exception e) {
+                LOGGER.error("Exception during feed ingestion ", e);
+                // Check if the adapter wants to continue ingestion
+                if (ExternalDataExceptionUtils.isResolvable(e)) {
+                    continueIngestion = adapter.handleException(e);
+                } else {
+                    continueIngestion = false;
+                }
+                failedIngestion = !continueIngestion;
+            }
+        }
+        // Done with the adapter. about to close, setting the stage based on the failed ingestion flag and notifying the
+        // runtime manager
+        adapterManager.setState(failedIngestion ? State.FAILED_INGESTION : State.FINISHED_INGESTION);
+        synchronized (adapterManager) {
+            adapterManager.notifyAll();
+        }
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/AdapterRuntimeManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/AdapterRuntimeManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/AdapterRuntimeManager.java
new file mode 100644
index 0000000..6c3e44d
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/AdapterRuntimeManager.java
@@ -0,0 +1,146 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.runtime;
+
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.asterix.external.api.IAdapterRuntimeManager;
+import org.apache.asterix.external.api.IFeedAdapter;
+import org.apache.asterix.external.feed.api.IIntakeProgressTracker;
+import org.apache.asterix.external.feed.dataflow.DistributeFeedFrameWriter;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.log4j.Logger;
+
+/**
+ * This class manages the execution of an adapter within a feed
+ */
+public class AdapterRuntimeManager implements IAdapterRuntimeManager {
+
+    private static final Logger LOGGER = Logger.getLogger(AdapterRuntimeManager.class.getName());
+
+    private final FeedId feedId;                    // (dataverse-feed)
+
+    private final IFeedAdapter feedAdapter;         // The adapter
+
+    private final IIntakeProgressTracker tracker;   // Not used. needs to be fixed soon.
+
+    private final AdapterExecutor adapterExecutor;  // The executor for the adapter <-- two way visibility -->
+
+    private final int partition;                    // The partition number
+
+    private final ExecutorService executorService;  // Executor service to run/shutdown the adapter executor
+
+    private IngestionRuntime ingestionRuntime;      // Runtime representing the ingestion stage of a feed <-- two way
+                                                    // visibility -->
+
+    private State state;                            // One of {ACTIVE_INGESTION, NACTIVE_INGESTION, FINISHED_INGESTION,
+                                                    // FAILED_INGESTION}
+
+    public AdapterRuntimeManager(FeedId feedId, IFeedAdapter feedAdapter, IIntakeProgressTracker tracker,
+            DistributeFeedFrameWriter writer, int partition) {
+        this.feedId = feedId;
+        this.feedAdapter = feedAdapter;
+        this.tracker = tracker;
+        this.partition = partition;
+        this.adapterExecutor = new AdapterExecutor(partition, writer, feedAdapter, this);
+        this.executorService = Executors.newSingleThreadExecutor();
+        this.state = State.INACTIVE_INGESTION;
+    }
+
+    @Override
+    public void start() throws Exception {
+        state = State.ACTIVE_INGESTION;
+        executorService.execute(adapterExecutor);
+    }
+
+    @Override
+    public void stop() {
+        boolean stopped = false;
+        try {
+            stopped = feedAdapter.stop();
+        } catch (Exception exception) {
+            LOGGER.error("Unable to stop adapter " + feedAdapter, exception);
+        } finally {
+            state = State.FINISHED_INGESTION;
+            if (stopped) {
+                // stop() returned true, we wait for the process termination
+                executorService.shutdown();
+                try {
+                    executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS);
+                } catch (InterruptedException e) {
+                    LOGGER.error("Interrupted while waiting for feed adapter to finish its work", e);
+                }
+            } else {
+                // stop() returned false, we try to force shutdown
+                executorService.shutdownNow();
+            }
+
+        }
+    }
+
+    @Override
+    public FeedId getFeedId() {
+        return feedId;
+    }
+
+    @Override
+    public String toString() {
+        return feedId + "[" + partition + "]";
+    }
+
+    @Override
+    public IFeedAdapter getFeedAdapter() {
+        return feedAdapter;
+    }
+
+    public IIntakeProgressTracker getTracker() {
+        return tracker;
+    }
+
+    @Override
+    public synchronized State getState() {
+        return state;
+    }
+
+    @Override
+    public synchronized void setState(State state) {
+        this.state = state;
+    }
+
+    public AdapterExecutor getAdapterExecutor() {
+        return adapterExecutor;
+    }
+
+    @Override
+    public int getPartition() {
+        return partition;
+    }
+
+    public IngestionRuntime getIngestionRuntime() {
+        return ingestionRuntime;
+    }
+
+    @Override
+    public IIntakeProgressTracker getProgressTracker() {
+        return tracker;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/CollectionRuntime.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/CollectionRuntime.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/CollectionRuntime.java
new file mode 100644
index 0000000..967dc3e
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/CollectionRuntime.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.runtime;
+
+import java.util.Map;
+
+import org.apache.asterix.external.feed.api.ISubscribableRuntime;
+import org.apache.asterix.external.feed.api.ISubscriberRuntime;
+import org.apache.asterix.external.feed.dataflow.FeedFrameCollector;
+import org.apache.asterix.external.feed.dataflow.FeedFrameCollector.State;
+import org.apache.asterix.external.feed.dataflow.FeedRuntimeInputHandler;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.hyracks.api.comm.IFrameWriter;
+
+/**
+ * Represents the feed runtime that collects feed tuples from another feed.
+ * In case of a primary feed, the CollectionRuntime collects tuples from the feed
+ * intake job. For a secondary feed, tuples are collected from the intake/compute
+ * runtime associated with the source feed.
+ */
+public class CollectionRuntime extends FeedRuntime implements ISubscriberRuntime {
+
+    private final FeedConnectionId connectionId;            // [Dataverse - Feed - Dataset]
+    private final ISubscribableRuntime sourceRuntime;       // Runtime that provides the data
+    private final Map<String, String> feedPolicy;           // Policy associated with the feed
+    private FeedFrameCollector frameCollector;              // Collector that can be plugged into a frame distributor
+
+    public CollectionRuntime(FeedConnectionId connectionId, FeedRuntimeId runtimeId,
+            FeedRuntimeInputHandler inputSideHandler, IFrameWriter outputSideWriter, ISubscribableRuntime sourceRuntime,
+            Map<String, String> feedPolicy) {
+        super(runtimeId, inputSideHandler, outputSideWriter);
+        this.connectionId = connectionId;
+        this.sourceRuntime = sourceRuntime;
+        this.feedPolicy = feedPolicy;
+    }
+
+    public State waitTillCollectionOver() throws InterruptedException {
+        if (!(isCollectionOver())) {
+            synchronized (frameCollector) {
+                while (!isCollectionOver()) {
+                    frameCollector.wait();
+                }
+            }
+        }
+        return frameCollector.getState();
+    }
+
+    private boolean isCollectionOver() {
+        return frameCollector.getState().equals(FeedFrameCollector.State.FINISHED)
+                || frameCollector.getState().equals(FeedFrameCollector.State.HANDOVER);
+    }
+
+    @Override
+    public void setMode(Mode mode) {
+        getInputHandler().setMode(mode);
+    }
+
+    @Override
+    public Map<String, String> getFeedPolicy() {
+        return feedPolicy;
+    }
+
+    public FeedConnectionId getConnectionId() {
+        return connectionId;
+    }
+
+    public ISubscribableRuntime getSourceRuntime() {
+        return sourceRuntime;
+    }
+
+    public void setFrameCollector(FeedFrameCollector frameCollector) {
+        this.frameCollector = frameCollector;
+    }
+
+    @Override
+    public FeedFrameCollector getFrameCollector() {
+        return frameCollector;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/FeedRuntime.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/FeedRuntime.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/FeedRuntime.java
new file mode 100644
index 0000000..76b1b19
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/FeedRuntime.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.runtime;
+
+import org.apache.asterix.external.feed.api.IFeedOperatorOutputSideHandler;
+import org.apache.asterix.external.feed.api.IFeedRuntime;
+import org.apache.asterix.external.feed.dataflow.FeedRuntimeInputHandler;
+import org.apache.hyracks.api.comm.IFrameWriter;
+
+public class FeedRuntime implements IFeedRuntime {
+
+    /** A unique identifier for the runtime **/
+    protected final FeedRuntimeId runtimeId;
+
+    /** The output frame writer associated with the runtime **/
+    protected IFrameWriter frameWriter;
+
+    /** The pre-processor associated with the runtime **/
+    protected FeedRuntimeInputHandler inputHandler;
+
+    public FeedRuntime(FeedRuntimeId runtimeId, FeedRuntimeInputHandler inputHandler, IFrameWriter frameWriter) {
+        this.runtimeId = runtimeId;
+        this.frameWriter = frameWriter;
+        this.inputHandler = inputHandler;
+    }
+
+    public void setFrameWriter(IFeedOperatorOutputSideHandler frameWriter) {
+        this.frameWriter = frameWriter;
+    }
+
+    @Override
+    public FeedRuntimeId getRuntimeId() {
+        return runtimeId;
+    }
+
+    @Override
+    public IFrameWriter getFeedFrameWriter() {
+        return frameWriter;
+    }
+
+    @Override
+    public String toString() {
+        return runtimeId.toString();
+    }
+
+    @Override
+    public FeedRuntimeInputHandler getInputHandler() {
+        return inputHandler;
+    }
+
+    public Mode getMode() {
+        return inputHandler != null ? inputHandler.getMode() : Mode.PROCESS;
+    }
+
+    public void setMode(Mode mode) {
+        this.inputHandler.setMode(mode);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/FeedRuntimeId.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/FeedRuntimeId.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/FeedRuntimeId.java
new file mode 100644
index 0000000..45d8afe
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/FeedRuntimeId.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.runtime;
+
+import java.io.Serializable;
+
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+
+public class FeedRuntimeId implements Serializable {
+
+    private static final long serialVersionUID = 1L;
+
+    public static final String DEFAULT_OPERAND_ID = "N/A";
+
+    private final FeedRuntimeType runtimeType;
+    private final int partition;
+    private final String operandId;
+
+    public FeedRuntimeId(FeedRuntimeType runtimeType, int partition, String operandId) {
+        this.runtimeType = runtimeType;
+        this.partition = partition;
+        this.operandId = operandId;
+    }
+
+    @Override
+    public String toString() {
+        return runtimeType + "[" + partition + "]" + "{" + operandId + "}";
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) {
+            return true;
+        }
+        if (!(o instanceof FeedRuntimeId)) {
+            return false;
+        }
+        FeedRuntimeId other = (FeedRuntimeId) o;
+        return (other.getFeedRuntimeType().equals(runtimeType) && other.getOperandId().equals(operandId) && other
+                .getPartition() == partition);
+    }
+
+    @Override
+    public int hashCode() {
+        return toString().hashCode();
+    }
+
+    public FeedRuntimeType getFeedRuntimeType() {
+        return runtimeType;
+    }
+
+    public int getPartition() {
+        return partition;
+    }
+
+    public FeedRuntimeType getRuntimeType() {
+        return runtimeType;
+    }
+
+    public String getOperandId() {
+        return operandId;
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/IngestionRuntime.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/IngestionRuntime.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/IngestionRuntime.java
new file mode 100644
index 0000000..fd6fcb3
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/IngestionRuntime.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.runtime;
+
+import java.util.logging.Level;
+
+import org.apache.asterix.external.api.IAdapterRuntimeManager;
+import org.apache.asterix.external.feed.dataflow.DistributeFeedFrameWriter;
+import org.apache.asterix.external.feed.dataflow.FeedFrameCollector;
+import org.apache.asterix.external.feed.dataflow.FrameDistributor;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+
+public class IngestionRuntime extends SubscribableRuntime {
+
+    private final IAdapterRuntimeManager adapterRuntimeManager;
+
+    public IngestionRuntime(FeedId feedId, FeedRuntimeId runtimeId, DistributeFeedFrameWriter feedWriter,
+            RecordDescriptor recordDesc, IAdapterRuntimeManager adaptorRuntimeManager) {
+        super(feedId, runtimeId, null, feedWriter, recordDesc);
+        this.adapterRuntimeManager = adaptorRuntimeManager;
+    }
+
+    @Override
+    public void subscribeFeed(FeedPolicyAccessor fpa, CollectionRuntime collectionRuntime) throws Exception {
+        FeedFrameCollector reader = dWriter.subscribeFeed(fpa, collectionRuntime.getInputHandler(),
+                collectionRuntime.getConnectionId());
+        collectionRuntime.setFrameCollector(reader);
+
+        if (dWriter.getDistributionMode().equals(FrameDistributor.DistributionMode.SINGLE)) {
+            adapterRuntimeManager.start();
+        }
+        subscribers.add(collectionRuntime);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Subscribed feed collection [" + collectionRuntime + "] to " + this);
+        }
+    }
+
+    @Override
+    public void unsubscribeFeed(CollectionRuntime collectionRuntime) throws Exception {
+        if (dWriter.getDistributionMode().equals(FrameDistributor.DistributionMode.SINGLE)) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Stopping adapter for " + this + " as no more registered collectors");
+            }
+            adapterRuntimeManager.stop();
+        } else {
+            dWriter.unsubscribeFeed(collectionRuntime.getInputHandler());
+        }
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Unsubscribed feed collection [" + collectionRuntime + "] from " + this);
+        }
+        subscribers.remove(collectionRuntime);
+    }
+
+    public void endOfFeed() {
+        dWriter.notifyEndOfFeed();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Notified End Of Feed  [" + this + "]");
+        }
+    }
+
+    public IAdapterRuntimeManager getAdapterRuntimeManager() {
+        return adapterRuntimeManager;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/SubscribableFeedRuntimeId.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/SubscribableFeedRuntimeId.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/SubscribableFeedRuntimeId.java
new file mode 100644
index 0000000..f6db99c
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/SubscribableFeedRuntimeId.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.runtime;
+
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.management.FeedId;
+
+public class SubscribableFeedRuntimeId extends FeedRuntimeId {
+    private static final long serialVersionUID = 1L;
+    private final FeedId feedId;
+
+    public SubscribableFeedRuntimeId(FeedId feedId, FeedRuntimeType runtimeType, int partition) {
+        super(runtimeType, partition, FeedRuntimeId.DEFAULT_OPERAND_ID);
+        this.feedId = feedId;
+    }
+
+    public FeedId getFeedId() {
+        return feedId;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) {
+            return true;
+        }
+        if (!(o instanceof SubscribableFeedRuntimeId)) {
+            return false;
+        }
+
+        return (super.equals(o) && this.feedId.equals(((SubscribableFeedRuntimeId) o).getFeedId()));
+    }
+
+    @Override
+    public int hashCode() {
+        return super.hashCode() + feedId.hashCode();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/SubscribableRuntime.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/SubscribableRuntime.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/SubscribableRuntime.java
new file mode 100644
index 0000000..056875c
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/SubscribableRuntime.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.runtime;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Logger;
+
+import org.apache.asterix.external.feed.api.ISubscribableRuntime;
+import org.apache.asterix.external.feed.api.ISubscriberRuntime;
+import org.apache.asterix.external.feed.dataflow.DistributeFeedFrameWriter;
+import org.apache.asterix.external.feed.dataflow.FeedFrameCollector;
+import org.apache.asterix.external.feed.dataflow.FeedRuntimeInputHandler;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+
+public class SubscribableRuntime extends FeedRuntime implements ISubscribableRuntime {
+
+    protected static final Logger LOGGER = Logger.getLogger(SubscribableRuntime.class.getName());
+
+    protected final FeedId feedId;
+    protected final List<ISubscriberRuntime> subscribers;
+    protected final RecordDescriptor recordDescriptor;
+    protected final DistributeFeedFrameWriter dWriter;
+
+    public SubscribableRuntime(FeedId feedId, FeedRuntimeId runtimeId, FeedRuntimeInputHandler inputHandler,
+            DistributeFeedFrameWriter dWriter, RecordDescriptor recordDescriptor) {
+        super(runtimeId, inputHandler, dWriter);
+        this.feedId = feedId;
+        this.recordDescriptor = recordDescriptor;
+        this.dWriter = dWriter;
+        this.subscribers = new ArrayList<ISubscriberRuntime>();
+    }
+
+    public FeedId getFeedId() {
+        return feedId;
+    }
+
+    @Override
+    public String toString() {
+        return "SubscribableRuntime" + " [" + feedId + "]" + "(" + runtimeId + ")";
+    }
+
+    @Override
+    public synchronized void subscribeFeed(FeedPolicyAccessor fpa, CollectionRuntime collectionRuntime)
+            throws Exception {
+        FeedFrameCollector collector = dWriter.subscribeFeed(new FeedPolicyAccessor(collectionRuntime.getFeedPolicy()),
+                collectionRuntime.getInputHandler(), collectionRuntime.getConnectionId());
+        collectionRuntime.setFrameCollector(collector);
+        subscribers.add(collectionRuntime);
+    }
+
+    @Override
+    public synchronized void unsubscribeFeed(CollectionRuntime collectionRuntime) throws Exception {
+        dWriter.unsubscribeFeed(collectionRuntime.getFeedFrameWriter());
+        subscribers.remove(collectionRuntime);
+    }
+
+    @Override
+    public synchronized List<ISubscriberRuntime> getSubscribers() {
+        return subscribers;
+    }
+
+    @Override
+    public DistributeFeedFrameWriter getFeedFrameWriter() {
+        return dWriter;
+    }
+
+    public FeedRuntimeType getFeedRuntimeType() {
+        return runtimeId.getFeedRuntimeType();
+    }
+
+    @Override
+    public RecordDescriptor getRecordDescriptor() {
+        return recordDescriptor;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/BasicMonitoredBuffer.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/BasicMonitoredBuffer.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/BasicMonitoredBuffer.java
new file mode 100644
index 0000000..ad40608
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/BasicMonitoredBuffer.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.watch;
+
+import org.apache.asterix.external.feed.api.IExceptionHandler;
+import org.apache.asterix.external.feed.api.IFeedMetricCollector;
+import org.apache.asterix.external.feed.api.IFrameEventCallback;
+import org.apache.asterix.external.feed.api.IFramePostProcessor;
+import org.apache.asterix.external.feed.api.IFramePreprocessor;
+import org.apache.asterix.external.feed.dataflow.FeedRuntimeInputHandler;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+
+public class BasicMonitoredBuffer extends MonitoredBuffer {
+
+    public BasicMonitoredBuffer(IHyracksTaskContext ctx, FeedRuntimeInputHandler inputHandler, IFrameWriter frameWriter, FrameTupleAccessor fta,
+            RecordDescriptor recordDesc, IFeedMetricCollector metricCollector,
+            FeedConnectionId connectionId, FeedRuntimeId runtimeId, IExceptionHandler exceptionHandler,
+            IFrameEventCallback callback, int nPartitions, FeedPolicyAccessor policyAccessor) {
+        super(ctx, inputHandler, frameWriter, fta, recordDesc, metricCollector, connectionId, runtimeId,
+                exceptionHandler, callback, nPartitions, policyAccessor);
+    }
+
+    @Override
+    protected boolean monitorProcessingRate() {
+        return false;
+    }
+
+    @Override
+    protected boolean logInflowOutflowRate() {
+        return false;
+    }
+
+    @Override
+    protected IFramePreprocessor getFramePreProcessor() {
+        return null;
+    }
+
+    @Override
+    protected IFramePostProcessor getFramePostProcessor() {
+        return null;
+    }
+
+    @Override
+    protected boolean monitorInputQueueLength() {
+        return false;
+    }
+
+    @Override
+    protected boolean reportInflowRate() {
+        return false;
+    }
+
+    @Override
+    protected boolean reportOutflowRate() {
+        return false;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/ComputeSideMonitoredBuffer.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/ComputeSideMonitoredBuffer.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/ComputeSideMonitoredBuffer.java
new file mode 100644
index 0000000..211fc7b
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/watch/ComputeSideMonitoredBuffer.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.watch;
+
+import org.apache.asterix.external.feed.api.IExceptionHandler;
+import org.apache.asterix.external.feed.api.IFeedMetricCollector;
+import org.apache.asterix.external.feed.api.IFrameEventCallback;
+import org.apache.asterix.external.feed.api.IFramePostProcessor;
+import org.apache.asterix.external.feed.api.IFramePreprocessor;
+import org.apache.asterix.external.feed.dataflow.FeedRuntimeInputHandler;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+
+public class ComputeSideMonitoredBuffer extends MonitoredBuffer {
+
+    public ComputeSideMonitoredBuffer(IHyracksTaskContext ctx, FeedRuntimeInputHandler inputHandler, IFrameWriter frameWriter,
+            FrameTupleAccessor fta, RecordDescriptor recordDesc, IFeedMetricCollector metricCollector,
+            FeedConnectionId connectionId, FeedRuntimeId runtimeId, IExceptionHandler exceptionHandler,
+            IFrameEventCallback callback, int nPartitions, FeedPolicyAccessor policyAccessor) {
+        super(ctx, inputHandler, frameWriter, fta, recordDesc, metricCollector, connectionId, runtimeId,
+                exceptionHandler, callback, nPartitions, policyAccessor);
+    }
+
+    @Override
+    protected boolean monitorProcessingRate() {
+        return true;
+    }
+
+    protected boolean logInflowOutflowRate() {
+        return true;
+    }
+
+    @Override
+    protected boolean monitorInputQueueLength() {
+        return true;
+    }
+
+    @Override
+    protected IFramePreprocessor getFramePreProcessor() {
+        return null;
+    }
+
+    @Override
+    protected IFramePostProcessor getFramePostProcessor() {
+        return null;
+    }
+
+    @Override
+    protected boolean reportOutflowRate() {
+        return false;
+    }
+
+    @Override
+    protected boolean reportInflowRate() {
+        return false;
+    }
+
+}


[26/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
Feed Fixes and Cleanup

1. Introduce filesystem feed data source.
2. Fix the order of closing feed stages on disconnection.
3. Added Twitter feed to the compatibility utility to allow
   using it with its alias.
4. First part of the feed log space.
5. Fixed the handling of duplicate key exception.

Change-Id: I4e8db26a810efd1fbaa52ceeb3efd0c8328ab070
Reviewed-on: https://asterix-gerrit.ics.uci.edu/574
Tested-by: Jenkins <je...@fulliautomatix.ics.uci.edu>
Reviewed-by: Murtadha Hubail <hu...@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/commit/e800e6d5
Tree: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/tree/e800e6d5
Diff: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/diff/e800e6d5

Branch: refs/heads/master
Commit: e800e6d5f87358519c5c90e296edadb4ad383554
Parents: dace5f2
Author: Abdullah Alamoudi <ba...@gmail.com>
Authored: Thu Jan 14 11:20:55 2016 +0300
Committer: abdullah alamoudi <ba...@gmail.com>
Committed: Thu Jan 14 12:26:31 2016 -0800

----------------------------------------------------------------------
 ...ceRandomPartitioningFeedComputationRule.java |    3 +-
 .../optimizer/rules/UnnestToDataScanRule.java   |   10 +-
 .../am/AbstractIntroduceAccessMethodRule.java   |    1 +
 .../translator/AbstractLangTranslator.java      |    2 +-
 .../asterix/translator/CompiledStatements.java  |    2 +-
 asterix-algebra/src/main/javacc/AQLPlus.jj      |    2 +-
 .../api/common/AsterixAppRuntimeContext.java    |    4 +-
 .../asterix/api/common/FeedWorkCollection.java  |  205 ---
 .../api/http/servlet/ConnectorAPIServlet.java   |    2 +-
 .../asterix/api/http/servlet/FeedServlet.java   |   14 +-
 .../api/http/servlet/FeedServletUtil.java       |    6 +-
 .../asterix/aql/translator/QueryTranslator.java |   92 +-
 .../apache/asterix/feed/CentralFeedManager.java |  110 ++
 .../feed/FeedJobNotificationHandler.java        |  742 +++++++++++
 .../java/org/apache/asterix/feed/FeedJoint.java |  190 +++
 .../asterix/feed/FeedLifecycleListener.java     |  499 ++++++++
 .../apache/asterix/feed/FeedLoadManager.java    |  302 +++++
 .../asterix/feed/FeedMessageReceiver.java       |   91 ++
 .../asterix/feed/FeedTrackingManager.java       |  188 +++
 .../apache/asterix/feed/FeedWorkCollection.java |  206 +++
 .../feed/FeedWorkRequestResponseHandler.java    |  269 ++++
 .../org/apache/asterix/feed/FeedsActivator.java |  118 ++
 .../asterix/feeds/CentralFeedManager.java       |  110 --
 .../apache/asterix/feeds/FeedCollectInfo.java   |   54 -
 .../java/org/apache/asterix/feeds/FeedInfo.java |   53 -
 .../feeds/FeedJobNotificationHandler.java       |  743 -----------
 .../org/apache/asterix/feeds/FeedJoint.java     |  190 ---
 .../asterix/feeds/FeedLifecycleListener.java    |  497 --------
 .../apache/asterix/feeds/FeedLoadManager.java   |  302 -----
 .../org/apache/asterix/feeds/FeedManager.java   |  144 ---
 .../asterix/feeds/FeedMessageReceiver.java      |   92 --
 .../asterix/feeds/FeedMetadataManager.java      |  113 --
 .../asterix/feeds/FeedTrackingManager.java      |  188 ---
 .../feeds/FeedWorkRequestResponseHandler.java   |  269 ----
 .../apache/asterix/feeds/FeedsActivator.java    |  117 --
 .../file/ExternalIndexingOperations.java        |    2 +-
 .../org/apache/asterix/file/FeedOperations.java |   34 +-
 .../file/SecondaryBTreeOperationsHelper.java    |    2 +-
 .../file/SecondaryIndexOperationsHelper.java    |    2 +-
 .../file/SecondaryRTreeOperationsHelper.java    |    2 +-
 .../bootstrap/AsterixGlobalRecoveryManager.java |    4 +-
 .../bootstrap/CCApplicationEntryPoint.java      |    6 +-
 .../bootstrap/ExternalLibraryBootstrap.java     |    7 +-
 .../hyracks/bootstrap/FeedBootstrap.java        |   28 +-
 .../bootstrap/NCApplicationEntryPoint.java      |   40 +-
 .../http/servlet/ConnectorAPIServletTest.java   |    2 +-
 .../metadata/results/basic/meta15/meta15.1.adm  |    4 +-
 .../results/feeds/feeds_01/feeds_01.1.adm       |    2 +-
 .../src/test/resources/runtimets/testsuite.xml  |  153 ++-
 .../common/api/IAsterixAppRuntimeContext.java   |    3 +-
 .../config/AsterixPropertiesAccessor.java       |    2 +-
 .../common/config/MetadataConstants.java        |   33 +
 ...erixLSMInsertDeleteOperatorNodePushable.java |    8 +-
 .../common/exceptions/FrameDataException.java   |    2 +-
 .../common/feeds/BasicMonitoredBuffer.java      |   76 --
 .../asterix/common/feeds/CollectionRuntime.java |   93 --
 .../feeds/ComputeSideMonitoredBuffer.java       |   75 --
 .../apache/asterix/common/feeds/DataBucket.java |   88 --
 .../asterix/common/feeds/DataBucketPool.java    |  110 --
 .../common/feeds/DistributeFeedFrameWriter.java |  143 ---
 .../asterix/common/feeds/FeedActivity.java      |  119 --
 .../feeds/FeedCollectRuntimeInputHandler.java   |   61 -
 .../common/feeds/FeedConnectJobInfo.java        |   93 --
 .../asterix/common/feeds/FeedConnectionId.java  |   74 --
 .../common/feeds/FeedConnectionRequest.java     |  126 --
 .../asterix/common/feeds/FeedConstants.java     |   71 --
 .../common/feeds/FeedExceptionHandler.java      |  108 --
 .../asterix/common/feeds/FeedFrameCache.java    |  171 ---
 .../common/feeds/FeedFrameCollector.java        |  158 ---
 .../common/feeds/FeedFrameDiscarder.java        |   63 -
 .../asterix/common/feeds/FeedFrameHandlers.java |  303 -----
 .../asterix/common/feeds/FeedFrameSpiller.java  |  176 ---
 .../common/feeds/FeedFrameTupleAccessor.java    |  110 --
 .../asterix/common/feeds/FeedFrameUtil.java     |  102 --
 .../org/apache/asterix/common/feeds/FeedId.java |   66 -
 .../asterix/common/feeds/FeedIntakeInfo.java    |   62 -
 .../asterix/common/feeds/FeedJobInfo.java       |   86 --
 .../asterix/common/feeds/FeedJointKey.java      |   79 --
 .../asterix/common/feeds/FeedMemoryManager.java |  112 --
 .../common/feeds/FeedMessageService.java        |  144 ---
 .../common/feeds/FeedMetricCollector.java       |  187 ---
 .../common/feeds/FeedPolicyAccessor.java        |  168 ---
 .../asterix/common/feeds/FeedRuntime.java       |   74 --
 .../asterix/common/feeds/FeedRuntimeId.java     |   80 --
 .../common/feeds/FeedRuntimeInputHandler.java   |  440 -------
 .../common/feeds/FeedRuntimeManager.java        |   81 --
 .../asterix/common/feeds/FeedRuntimeReport.java |   23 -
 .../common/feeds/FeedTupleCommitAckMessage.java |   97 --
 .../feeds/FeedTupleCommitResponseMessage.java   |   80 --
 .../asterix/common/feeds/FrameCollection.java   |  101 --
 .../asterix/common/feeds/FrameDistributor.java  |  360 ------
 .../common/feeds/FrameEventCallback.java        |  102 --
 .../asterix/common/feeds/IngestionRuntime.java  |   75 --
 .../common/feeds/IntakePartitionStatistics.java |   41 -
 .../common/feeds/IntakeSideMonitoredBuffer.java |   76 --
 .../asterix/common/feeds/MessageListener.java   |  126 --
 .../asterix/common/feeds/MessageReceiver.java   |  111 --
 .../asterix/common/feeds/MonitoredBuffer.java   |  388 ------
 .../common/feeds/MonitoredBufferTimerTasks.java |  294 -----
 .../apache/asterix/common/feeds/NodeLoad.java   |   62 -
 .../asterix/common/feeds/NodeLoadReport.java    |   99 --
 .../common/feeds/NodeLoadReportService.java     |  106 --
 .../org/apache/asterix/common/feeds/Series.java |   44 -
 .../apache/asterix/common/feeds/SeriesAvg.java  |   47 -
 .../apache/asterix/common/feeds/SeriesRate.java |   92 --
 .../common/feeds/StorageFrameHandler.java       |  118 --
 .../feeds/StorageSideMonitoredBuffer.java       |  206 ---
 .../common/feeds/SubscribableFeedRuntimeId.java |   52 -
 .../common/feeds/SubscribableRuntime.java       |   90 --
 .../feeds/api/IAdapterRuntimeManager.java       |   82 --
 .../common/feeds/api/ICentralFeedManager.java   |   34 -
 .../common/feeds/api/IDataSourceAdapter.java    |   60 -
 .../common/feeds/api/IExceptionHandler.java     |   43 -
 .../feeds/api/IFeedConnectionManager.java       |   75 --
 .../common/feeds/api/IFeedFrameHandler.java     |   39 -
 .../asterix/common/feeds/api/IFeedJoint.java    |  121 --
 .../api/IFeedLifecycleEventSubscriber.java      |   36 -
 .../IFeedLifecycleIntakeEventSubscriber.java    |   28 -
 .../feeds/api/IFeedLifecycleListener.java       |   56 -
 .../common/feeds/api/IFeedLoadManager.java      |   60 -
 .../asterix/common/feeds/api/IFeedManager.java  |   97 --
 .../common/feeds/api/IFeedMemoryComponent.java  |   58 -
 .../common/feeds/api/IFeedMemoryManager.java    |   58 -
 .../asterix/common/feeds/api/IFeedMessage.java  |   52 -
 .../common/feeds/api/IFeedMessageService.java   |   34 -
 .../common/feeds/api/IFeedMetadataManager.java  |   39 -
 .../common/feeds/api/IFeedMetricCollector.java  |   50 -
 .../api/IFeedOperatorOutputSideHandler.java     |   36 -
 .../asterix/common/feeds/api/IFeedProvider.java |   26 -
 .../asterix/common/feeds/api/IFeedRuntime.java  |   62 -
 .../asterix/common/feeds/api/IFeedService.java  |   26 -
 .../feeds/api/IFeedSubscriptionManager.java     |   41 -
 .../common/feeds/api/IFeedTrackingManager.java  |   29 -
 .../asterix/common/feeds/api/IFeedWork.java     |   28 -
 .../feeds/api/IFeedWorkEventListener.java       |   41 -
 .../common/feeds/api/IFeedWorkManager.java      |   25 -
 .../common/feeds/api/IFrameEventCallback.java   |   32 -
 .../common/feeds/api/IFramePostProcessor.java   |   28 -
 .../common/feeds/api/IFramePreprocessor.java    |   26 -
 .../feeds/api/IIntakeProgressTracker.java       |   29 -
 .../common/feeds/api/IMessageReceiver.java      |   28 -
 .../common/feeds/api/ISubscribableRuntime.java  |   61 -
 .../common/feeds/api/ISubscriberRuntime.java    |   30 -
 .../common/feeds/api/ISubscriptionProvider.java |   29 -
 .../feeds/api/ITupleTrackingFeedAdapter.java    |   24 -
 .../common/feeds/message/EndFeedMessage.java    |   97 --
 .../feeds/message/FeedCongestionMessage.java    |  103 --
 .../common/feeds/message/FeedMessage.java       |   42 -
 .../common/feeds/message/FeedReportMessage.java |  100 --
 .../common/feeds/message/NodeReportMessage.java |   69 -
 .../feeds/message/ScaleInReportMessage.java     |  114 --
 .../feeds/message/StorageReportFeedMessage.java |  129 --
 .../message/ThrottlingEnabledFeedMessage.java   |   86 --
 .../asterix/common/parse/ITupleForwarder.java   |    5 +-
 .../asterix/common/utils/StoragePathUtil.java   |   61 +
 .../adapter/factory/GenericAdapterFactory.java  |    2 +-
 .../asterix/external/api/IAdapterFactory.java   |    1 -
 .../external/api/IAdapterRuntimeManager.java    |   83 ++
 .../external/api/IDataFlowController.java       |   15 +-
 .../external/api/IDataSourceAdapter.java        |   51 +
 .../asterix/external/api/IFeedAdapter.java      |   50 +
 .../asterix/external/api/IStreamDataParser.java |   12 +-
 .../dataflow/AbstractDataFlowController.java    |    3 +-
 .../AbstractFeedDataFlowController.java         |   68 +
 .../dataflow/FeedRecordDataFlowController.java  |  114 ++
 .../dataflow/FeedStreamDataFlowController.java  |   87 ++
 .../external/dataflow/FeedTupleForwarder.java   |   83 ++
 .../dataflow/RecordDataFlowController.java      |   12 +-
 .../dataflow/StreamDataFlowController.java      |   10 +
 .../dataset/adapter/AdapterIdentifier.java      |   66 +
 .../dataset/adapter/GenericAdapter.java         |   19 +-
 .../dataset/adapter/StreamBasedAdapter.java     |   65 -
 .../external/feed/api/ICentralFeedManager.java  |   34 +
 .../external/feed/api/IExceptionHandler.java    |   43 +
 .../apache/asterix/external/feed/api/IFeed.java |   55 +
 .../feed/api/IFeedConnectionManager.java        |   75 ++
 .../external/feed/api/IFeedFrameHandler.java    |   39 +
 .../asterix/external/feed/api/IFeedJoint.java   |  121 ++
 .../feed/api/IFeedLifecycleEventSubscriber.java |   36 +
 .../IFeedLifecycleIntakeEventSubscriber.java    |   28 +
 .../feed/api/IFeedLifecycleListener.java        |   56 +
 .../external/feed/api/IFeedLoadManager.java     |   60 +
 .../asterix/external/feed/api/IFeedManager.java |   72 ++
 .../external/feed/api/IFeedMemoryComponent.java |   58 +
 .../external/feed/api/IFeedMemoryManager.java   |   58 +
 .../asterix/external/feed/api/IFeedMessage.java |   52 +
 .../external/feed/api/IFeedMessageService.java  |   34 +
 .../external/feed/api/IFeedMetadataManager.java |   39 +
 .../external/feed/api/IFeedMetricCollector.java |   50 +
 .../api/IFeedOperatorOutputSideHandler.java     |   49 +
 .../external/feed/api/IFeedProvider.java        |   26 +
 .../asterix/external/feed/api/IFeedRuntime.java |   62 +
 .../asterix/external/feed/api/IFeedService.java |   26 +
 .../feed/api/IFeedSubscriptionManager.java      |   41 +
 .../external/feed/api/IFeedTrackingManager.java |   29 +
 .../asterix/external/feed/api/IFeedWork.java    |   28 +
 .../feed/api/IFeedWorkEventListener.java        |   41 +
 .../external/feed/api/IFeedWorkManager.java     |   25 +
 .../external/feed/api/IFrameEventCallback.java  |   32 +
 .../external/feed/api/IFramePostProcessor.java  |   28 +
 .../external/feed/api/IFramePreprocessor.java   |   26 +
 .../feed/api/IIntakeProgressTracker.java        |   29 +
 .../external/feed/api/IMessageReceiver.java     |   28 +
 .../external/feed/api/ISubscribableRuntime.java |   61 +
 .../external/feed/api/ISubscriberRuntime.java   |   30 +
 .../feed/api/ISubscriptionProvider.java         |   29 +
 .../CollectTransformFeedFrameWriter.java        |  119 ++
 .../external/feed/dataflow/DataBucket.java      |   89 ++
 .../external/feed/dataflow/DataBucketPool.java  |  110 ++
 .../dataflow/DistributeFeedFrameWriter.java     |  159 +++
 .../FeedCollectRuntimeInputHandler.java         |   64 +
 .../feed/dataflow/FeedExceptionHandler.java     |   80 ++
 .../external/feed/dataflow/FeedFrameCache.java  |  172 +++
 .../feed/dataflow/FeedFrameCollector.java       |  160 +++
 .../feed/dataflow/FeedFrameDiscarder.java       |   67 +
 .../feed/dataflow/FeedFrameHandlers.java        |  305 +++++
 .../feed/dataflow/FeedFrameSpiller.java         |  188 +++
 .../feed/dataflow/FeedFrameTupleAccessor.java   |  110 ++
 .../feed/dataflow/FeedFrameTupleDecorator.java  |  108 ++
 .../feed/dataflow/FeedRuntimeInputHandler.java  |  468 +++++++
 .../external/feed/dataflow/FrameCollection.java |  101 ++
 .../feed/dataflow/FrameDistributor.java         |  361 ++++++
 .../feed/dataflow/FrameEventCallback.java       |  103 ++
 .../feed/dataflow/StorageFrameHandler.java      |  119 ++
 .../feed/management/FeedCollectInfo.java        |   52 +
 .../feed/management/FeedConnectionId.java       |   74 ++
 .../feed/management/FeedConnectionManager.java  |  107 ++
 .../feed/management/FeedConnectionRequest.java  |  125 ++
 .../external/feed/management/FeedId.java        |   66 +
 .../external/feed/management/FeedInfo.java      |   53 +
 .../external/feed/management/FeedJointKey.java  |   79 ++
 .../FeedLifecycleEventSubscriber.java           |   66 +
 .../external/feed/management/FeedManager.java   |  137 ++
 .../feed/management/FeedMemoryManager.java      |  114 ++
 .../feed/management/FeedMetadataManager.java    |  112 ++
 .../feed/management/FeedRuntimeManager.java     |   83 ++
 .../management/FeedSubscriptionManager.java     |   76 ++
 .../feed/management/FeedWorkManager.java        |   50 +
 .../external/feed/message/EndFeedMessage.java   |   96 ++
 .../feed/message/FeedCongestionMessage.java     |  102 ++
 .../external/feed/message/FeedMessage.java      |   42 +
 .../feed/message/FeedMessageService.java        |  145 +++
 .../feed/message/FeedReportMessage.java         |   99 ++
 .../feed/message/FeedTupleCommitAckMessage.java |   98 ++
 .../message/FeedTupleCommitResponseMessage.java |   81 ++
 .../external/feed/message/MessageListener.java  |  126 ++
 .../external/feed/message/MessageReceiver.java  |  111 ++
 .../feed/message/NodeReportMessage.java         |   68 +
 .../feed/message/PrepareStallMessage.java       |   68 +
 .../message/RemoteSocketMessageListener.java    |  134 ++
 .../feed/message/ScaleInReportMessage.java      |  113 ++
 .../feed/message/SocketMessageListener.java     |  160 +++
 .../feed/message/StorageReportFeedMessage.java  |  128 ++
 .../feed/message/TerminateDataFlowMessage.java  |   52 +
 .../message/ThrottlingEnabledFeedMessage.java   |   85 ++
 .../external/feed/message/XAQLFeedMessage.java  |   66 +
 .../external/feed/policy/FeedPolicy.java        |   80 ++
 .../feed/policy/FeedPolicyAccessor.java         |  185 +++
 .../feed/policy/FeedPolicyEnforcer.java         |   49 +
 .../external/feed/runtime/AdapterExecutor.java  |   81 ++
 .../feed/runtime/AdapterRuntimeManager.java     |  146 +++
 .../feed/runtime/CollectionRuntime.java         |   96 ++
 .../external/feed/runtime/FeedRuntime.java      |   75 ++
 .../external/feed/runtime/FeedRuntimeId.java    |   80 ++
 .../external/feed/runtime/IngestionRuntime.java |   83 ++
 .../feed/runtime/SubscribableFeedRuntimeId.java |   53 +
 .../feed/runtime/SubscribableRuntime.java       |   95 ++
 .../feed/watch/BasicMonitoredBuffer.java        |   80 ++
 .../feed/watch/ComputeSideMonitoredBuffer.java  |   79 ++
 .../external/feed/watch/FeedActivity.java       |  116 ++
 .../external/feed/watch/FeedConnectJobInfo.java |   94 ++
 .../external/feed/watch/FeedIntakeInfo.java     |   63 +
 .../external/feed/watch/FeedJobInfo.java        |   86 ++
 .../feed/watch/FeedMetricCollector.java         |  189 +++
 .../feed/watch/IntakePartitionStatistics.java   |   41 +
 .../feed/watch/IntakeSideMonitoredBuffer.java   |   80 ++
 .../external/feed/watch/MonitoredBuffer.java    |  396 ++++++
 .../feed/watch/MonitoredBufferTimerTasks.java   |  299 +++++
 .../asterix/external/feed/watch/NodeLoad.java   |   62 +
 .../external/feed/watch/NodeLoadReport.java     |  100 ++
 .../feed/watch/NodeLoadReportService.java       |  107 ++
 .../asterix/external/feed/watch/Series.java     |   44 +
 .../asterix/external/feed/watch/SeriesAvg.java  |   47 +
 .../asterix/external/feed/watch/SeriesRate.java |   92 ++
 .../feed/watch/StorageSideMonitoredBuffer.java  |  211 ++++
 .../external/feeds/FeedPolicyEnforcer.java      |   50 -
 .../reader/AbstractStreamRecordReader.java      |   17 +-
 .../input/record/reader/LineRecordReader.java   |   10 +-
 .../record/reader/QuotedLineRecordReader.java   |    4 +
 .../reader/SemiStructuredRecordReader.java      |   12 +-
 .../record/reader/TwitterPushRecordReader.java  |   18 +-
 .../factory/TwitterRecordReaderFactory.java     |    2 +-
 .../input/stream/AInputStreamReader.java        |    9 +
 .../input/stream/HDFSInputStreamProvider.java   |    2 +-
 .../stream/LocalFSInputStreamProvider.java      |   42 +-
 .../stream/LocalFileSystemInputStream.java      |  125 ++
 .../TwitterFirehoseInputStreamProvider.java     |    2 +-
 .../LocalFSInputStreamProviderFactory.java      |   41 +-
 .../external/library/ExternalFunction.java      |    2 +-
 .../external/library/ResultCollector.java       |    1 +
 .../external/library/java/JObjectUtil.java      |   44 +-
 .../ExternalDataScanOperatorDescriptor.java     |   74 ++
 ...DatasetIndexesRecoverOperatorDescriptor.java |    1 -
 .../FeedCollectOperatorDescriptor.java          |  171 +++
 .../FeedCollectOperatorNodePushable.java        |  207 +++
 .../operators/FeedIntakeOperatorDescriptor.java |  138 ++
 .../FeedIntakeOperatorNodePushable.java         |  216 ++++
 .../FeedMessageOperatorDescriptor.java          |   53 +
 .../FeedMessageOperatorNodePushable.java        |  303 +++++
 .../operators/FeedMetaComputeNodePushable.java  |  224 ++++
 .../operators/FeedMetaNodePushable.java         |  184 +++
 .../operators/FeedMetaOperatorDescriptor.java   |  132 ++
 .../operators/FeedMetaStoreNodePushable.java    |  220 ++++
 .../asterix/external/parser/ADMDataParser.java  |    7 +
 .../external/parser/DelimitedDataParser.java    |    8 +-
 .../provider/AdapterFactoryProvider.java        |   14 +-
 .../provider/DataflowControllerProvider.java    |   16 +-
 .../provider/DatasourceFactoryProvider.java     |    7 +-
 .../asterix/external/runtime/DataGenerator.java | 1188 ------------------
 .../runtime/GenericSocketFeedAdapter.java       |  118 --
 .../GenericSocketFeedAdapterFactory.java        |  154 ---
 .../external/runtime/SocketClientAdapter.java   |  106 --
 .../runtime/SocketClientAdapterFactory.java     |   77 --
 .../external/runtime/TweetGenerator.java        |  156 ---
 .../external/util/DNSResolverFactory.java       |   36 -
 .../asterix/external/util/DataGenerator.java    | 1186 +++++++++++++++++
 .../asterix/external/util/DataflowUtils.java    |   11 +-
 .../apache/asterix/external/util/Datatypes.java |    9 +-
 .../util/ExternalDataCompatibilityUtils.java    |   35 +-
 .../external/util/ExternalDataConstants.java    |   18 +-
 .../util/ExternalDataExceptionUtils.java        |   22 +
 .../external/util/ExternalDataUtils.java        |   37 +-
 .../asterix/external/util/FeedConstants.java    |   76 ++
 .../asterix/external/util/FeedFrameUtil.java    |  102 ++
 .../asterix/external/util/FeedLogManager.java   |  152 +++
 .../apache/asterix/external/util/FeedUtils.java |   69 +
 .../external/util/FileSystemWatcher.java        |  244 ++++
 .../external/util/LocalFileSystemUtils.java     |   75 ++
 .../external/util/NodeResolverFactory.java      |   36 +
 .../asterix/external/util/TweetGenerator.java   |  156 +++
 .../asterix/external/util/TweetProcessor.java   |  121 --
 .../external/library/UpperCaseFunction.java     |   10 +-
 .../library/adapter/TestTypedAdapter.java       |   43 +-
 .../adapter/TestTypedAdapterFactory.java        |    2 +-
 .../aql/statement/SubscribeFeedStatement.java   |   21 +-
 asterix-lang-aql/src/main/javacc/AQL.jj         |    2 +-
 .../lang/common/statement/DatasetDecl.java      |    2 +-
 .../lang/common/visitor/FormatPrintVisitor.java |    2 +-
 .../visitor/VariableCheckAndRewriteVisitor.java |    2 +-
 asterix-lang-sqlpp/src/main/javacc/SQLPP.jj     |    2 +-
 .../apache/asterix/metadata/MetadataCache.java  |  138 +-
 .../asterix/metadata/MetadataManager.java       |   15 +-
 .../apache/asterix/metadata/MetadataNode.java   |   26 +-
 .../metadata/MetadataTransactionContext.java    |   23 +-
 .../asterix/metadata/api/IMetadataEntity.java   |    6 +-
 .../asterix/metadata/api/IMetadataManager.java  |   41 +-
 .../asterix/metadata/api/IMetadataNode.java     |    8 +-
 .../metadata/bootstrap/MetadataBootstrap.java   |   42 +-
 .../metadata/bootstrap/MetadataConstants.java   |   33 -
 .../metadata/bootstrap/MetadataIndex.java       |    1 +
 .../MetadataIndexImmutableProperties.java       |    2 +
 .../metadata/declared/AqlMetadataProvider.java  |   48 +-
 .../metadata/declared/FeedDataSource.java       |   17 +-
 .../metadata/entities/CompactionPolicy.java     |    6 +-
 .../asterix/metadata/entities/Dataset.java      |    6 +-
 .../metadata/entities/DatasourceAdapter.java    |   14 +-
 .../asterix/metadata/entities/Datatype.java     |    6 +-
 .../asterix/metadata/entities/Dataverse.java    |    6 +-
 .../apache/asterix/metadata/entities/Feed.java  |   72 +-
 .../asterix/metadata/entities/FeedPolicy.java   |   99 --
 .../metadata/entities/FeedPolicyEntity.java     |   49 +
 .../asterix/metadata/entities/Function.java     |    6 +-
 .../apache/asterix/metadata/entities/Index.java |    6 +-
 .../asterix/metadata/entities/Library.java      |    6 +-
 .../asterix/metadata/entities/NodeGroup.java    |    6 +-
 .../asterix/metadata/entities/PrimaryFeed.java  |   80 --
 .../metadata/entities/SecondaryFeed.java        |   64 -
 .../DatasourceAdapterTupleTranslator.java       |    7 +-
 .../FeedPolicyTupleTranslator.java              |   14 +-
 .../FeedTupleTranslator.java                    |   37 +-
 .../feeds/AbstractDatasourceAdapter.java        |   43 -
 .../feeds/AbstractFeedDatasourceAdapter.java    |   39 -
 .../asterix/metadata/feeds/AdapterExecutor.java |   74 --
 .../metadata/feeds/AdapterIdentifier.java       |   66 -
 .../metadata/feeds/AdapterRuntimeManager.java   |  131 --
 .../metadata/feeds/BuiltinFeedPolicies.java     |   64 +-
 .../feeds/CollectTransformFeedFrameWriter.java  |  120 --
 .../ExternalDataScanOperatorDescriptor.java     |   74 --
 .../metadata/feeds/FeedActivityIdFactory.java   |   40 -
 .../feeds/FeedCollectOperatorDescriptor.java    |  170 ---
 .../feeds/FeedCollectOperatorNodePushable.java  |  208 ---
 .../metadata/feeds/FeedConnectionManager.java   |  109 --
 .../metadata/feeds/FeedFrameTupleDecorator.java |  108 --
 .../feeds/FeedIntakeOperatorDescriptor.java     |  136 --
 .../feeds/FeedIntakeOperatorNodePushable.java   |  215 ----
 .../feeds/FeedLifecycleEventSubscriber.java     |   66 -
 .../feeds/FeedMessageOperatorDescriptor.java    |   53 -
 .../feeds/FeedMessageOperatorNodePushable.java  |  302 -----
 .../feeds/FeedMetaComputeNodePushable.java      |  227 ----
 .../metadata/feeds/FeedMetaNodePushable.java    |  189 ---
 .../feeds/FeedMetaOperatorDescriptor.java       |  132 --
 .../feeds/FeedMetaStoreNodePushable.java        |  224 ----
 .../metadata/feeds/FeedMetadataUtil.java        |  583 +++++++++
 .../metadata/feeds/FeedSubscriptionManager.java |   76 --
 .../apache/asterix/metadata/feeds/FeedUtil.java |  590 ---------
 .../asterix/metadata/feeds/FeedWorkManager.java |   50 -
 .../metadata/feeds/IAdapterExecutor.java        |   40 -
 .../asterix/metadata/feeds/IFeedMessage.java    |   32 -
 .../metadata/feeds/ITypedAdapterFactory.java    |   31 -
 .../asterix/metadata/feeds/MessageListener.java |  138 --
 .../metadata/feeds/PrepareStallMessage.java     |   70 --
 .../feeds/RemoteSocketMessageListener.java      |  134 --
 .../metadata/feeds/SocketMessageListener.java   |  160 ---
 .../feeds/TerminateDataFlowMessage.java         |   54 -
 .../asterix/metadata/feeds/XAQLFeedMessage.java |   68 -
 .../asterix/metadata/utils/DatasetUtils.java    |    2 +-
 .../utils/SplitsAndConstraintsUtil.java         |   79 +-
 .../om/util/AsterixClusterProperties.java       |   24 +-
 418 files changed, 19689 insertions(+), 19427 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceRandomPartitioningFeedComputationRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceRandomPartitioningFeedComputationRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceRandomPartitioningFeedComputationRule.java
index 35bc3fd..fb81885 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceRandomPartitioningFeedComputationRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceRandomPartitioningFeedComputationRule.java
@@ -20,11 +20,10 @@ package org.apache.asterix.optimizer.rules;
 
 import org.apache.commons.lang3.mutable.Mutable;
 import org.apache.commons.lang3.mutable.MutableObject;
-
 import org.apache.asterix.metadata.declared.AqlDataSource;
 import org.apache.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
-import org.apache.asterix.metadata.declared.FeedDataSource;
 import org.apache.asterix.metadata.entities.Feed;
+import org.apache.asterix.metadata.declared.FeedDataSource;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
 import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/UnnestToDataScanRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/UnnestToDataScanRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/UnnestToDataScanRule.java
index dab6193..1945be3 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/UnnestToDataScanRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/UnnestToDataScanRule.java
@@ -22,8 +22,8 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
-import org.apache.asterix.common.feeds.FeedActivity.FeedActivityDetails;
-import org.apache.asterix.common.feeds.api.IFeedLifecycleListener.ConnectionLocation;
+import org.apache.asterix.external.feed.api.IFeedLifecycleListener.ConnectionLocation;
+import org.apache.asterix.external.feed.watch.FeedActivity.FeedActivityDetails;
 import org.apache.asterix.metadata.declared.AqlDataSource;
 import org.apache.asterix.metadata.declared.AqlMetadataProvider;
 import org.apache.asterix.metadata.declared.AqlSourceId;
@@ -31,7 +31,7 @@ import org.apache.asterix.metadata.declared.FeedDataSource;
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.entities.Dataverse;
 import org.apache.asterix.metadata.entities.Feed;
-import org.apache.asterix.metadata.entities.FeedPolicy;
+import org.apache.asterix.metadata.entities.FeedPolicyEntity;
 import org.apache.asterix.metadata.feeds.BuiltinFeedPolicies;
 import org.apache.asterix.metadata.utils.DatasetUtils;
 import org.apache.asterix.om.base.AString;
@@ -154,7 +154,7 @@ public class UnnestToDataScanRule implements IAlgebraicRewriteRule {
 
                 AqlSourceId asid = new AqlSourceId(dataverse, getTargetFeed);
                 String policyName = metadataProvider.getConfig().get(FeedActivityDetails.FEED_POLICY_NAME);
-                FeedPolicy policy = metadataProvider.findFeedPolicy(dataverse, policyName);
+                FeedPolicyEntity policy = metadataProvider.findFeedPolicy(dataverse, policyName);
                 if (policy == null) {
                     policy = BuiltinFeedPolicies.getFeedPolicy(policyName);
                     if (policy == null) {
@@ -193,7 +193,7 @@ public class UnnestToDataScanRule implements IAlgebraicRewriteRule {
     }
 
     private AqlDataSource createFeedDataSource(AqlSourceId aqlId, String targetDataset, String sourceFeedName,
-            String subscriptionLocation, AqlMetadataProvider metadataProvider, FeedPolicy feedPolicy,
+            String subscriptionLocation, AqlMetadataProvider metadataProvider, FeedPolicyEntity feedPolicy,
             String outputType, String locations) throws AlgebricksException {
         if (!aqlId.getDataverseName().equals(
                 metadataProvider.getDefaultDataverse() == null ? null : metadataProvider.getDefaultDataverse()

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
index 1e24ea8..0c2463c 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
@@ -25,6 +25,7 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.asterix.common.config.MetadataConstants;
 import org.apache.asterix.common.config.DatasetConfig.IndexType;
 import org.apache.asterix.dataflow.data.common.AqlExpressionTypeComputer;
 import org.apache.asterix.metadata.api.IMetadataEntity;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java b/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
index 87d4f1c..a161717 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
@@ -24,6 +24,7 @@ import java.util.logging.Level;
 import java.util.logging.Logger;
 
 import org.apache.asterix.common.api.IClusterManagementWork.ClusterState;
+import org.apache.asterix.common.config.MetadataConstants;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.lang.common.base.Statement;
 import org.apache.asterix.lang.common.statement.DatasetDecl;
@@ -32,7 +33,6 @@ import org.apache.asterix.lang.common.statement.DeleteStatement;
 import org.apache.asterix.lang.common.statement.DropStatement;
 import org.apache.asterix.lang.common.statement.InsertStatement;
 import org.apache.asterix.lang.common.statement.NodeGroupDropStatement;
-import org.apache.asterix.metadata.bootstrap.MetadataConstants;
 import org.apache.asterix.metadata.dataset.hints.DatasetHints;
 import org.apache.asterix.metadata.entities.AsterixBuiltinTypeMap;
 import org.apache.asterix.metadata.entities.Dataverse;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java b/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java
index d4825c4..7221972 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java
@@ -22,7 +22,7 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.asterix.common.config.DatasetConfig.IndexType;
-import org.apache.asterix.common.feeds.FeedConnectionRequest;
+import org.apache.asterix.external.feed.management.FeedConnectionRequest;
 import org.apache.asterix.lang.common.base.Expression;
 import org.apache.asterix.lang.common.base.Statement.Kind;
 import org.apache.asterix.lang.common.expression.VariableExpr;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-algebra/src/main/javacc/AQLPlus.jj
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/javacc/AQLPlus.jj b/asterix-algebra/src/main/javacc/AQLPlus.jj
index 853631c..b10d837 100644
--- a/asterix-algebra/src/main/javacc/AQLPlus.jj
+++ b/asterix-algebra/src/main/javacc/AQLPlus.jj
@@ -41,6 +41,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.asterix.common.config.MetadataConstants;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.functions.FunctionSignature;
 import org.apache.asterix.lang.aql.clause.DistinctClause;
@@ -100,7 +101,6 @@ import org.apache.asterix.lang.common.statement.WriteStatement;
 import org.apache.asterix.lang.common.struct.Identifier;
 import org.apache.asterix.lang.common.struct.QuantifiedPair;
 import org.apache.asterix.lang.common.struct.VarIdentifier;
-import org.apache.asterix.metadata.bootstrap.MetadataConstants;
 import org.apache.hyracks.algebricks.core.algebra.expressions.IndexedNLJoinExpressionAnnotation;
 
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixAppRuntimeContext.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixAppRuntimeContext.java b/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixAppRuntimeContext.java
index 4a8a323..6d0b321 100644
--- a/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixAppRuntimeContext.java
+++ b/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixAppRuntimeContext.java
@@ -39,7 +39,6 @@ import org.apache.asterix.common.context.AsterixFileMapManager;
 import org.apache.asterix.common.context.DatasetLifecycleManager;
 import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.feeds.api.IFeedManager;
 import org.apache.asterix.common.replication.IRemoteRecoveryManager;
 import org.apache.asterix.common.replication.IReplicaResourcesManager;
 import org.apache.asterix.common.replication.IReplicationChannel;
@@ -47,8 +46,9 @@ import org.apache.asterix.common.replication.IReplicationManager;
 import org.apache.asterix.common.transactions.IAsterixAppRuntimeContextProvider;
 import org.apache.asterix.common.transactions.IRecoveryManager;
 import org.apache.asterix.common.transactions.IRecoveryManager.SystemState;
+import org.apache.asterix.external.feed.api.IFeedManager;
+import org.apache.asterix.external.feed.management.FeedManager;
 import org.apache.asterix.common.transactions.ITransactionSubsystem;
-import org.apache.asterix.feeds.FeedManager;
 import org.apache.asterix.metadata.bootstrap.MetadataIndexImmutableProperties;
 import org.apache.asterix.om.util.AsterixClusterProperties;
 import org.apache.asterix.replication.management.ReplicationChannel;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/api/common/FeedWorkCollection.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/common/FeedWorkCollection.java b/asterix-app/src/main/java/org/apache/asterix/api/common/FeedWorkCollection.java
deleted file mode 100644
index 6aad64b..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/api/common/FeedWorkCollection.java
+++ /dev/null
@@ -1,205 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.api.common;
-
-import java.io.PrintWriter;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.api.common.SessionConfig.OutputFormat;
-import org.apache.asterix.aql.translator.QueryTranslator;
-import org.apache.asterix.common.feeds.FeedConnectionRequest;
-import org.apache.asterix.common.feeds.FeedConnectionRequest.ConnectionStatus;
-import org.apache.asterix.common.feeds.api.IFeedWork;
-import org.apache.asterix.common.feeds.api.IFeedWorkEventListener;
-import org.apache.asterix.compiler.provider.AqlCompilationProvider;
-import org.apache.asterix.compiler.provider.ILangCompilationProvider;
-import org.apache.asterix.feeds.FeedCollectInfo;
-import org.apache.asterix.lang.aql.statement.SubscribeFeedStatement;
-import org.apache.asterix.lang.common.base.Statement;
-import org.apache.asterix.lang.common.statement.DataverseDecl;
-import org.apache.asterix.lang.common.struct.Identifier;
-import org.apache.asterix.om.util.AsterixAppContextInfo;
-import org.apache.hyracks.api.job.JobId;
-
-/**
- * A collection of feed management related task, each represented as an implementation of {@code IFeedWork}.
- */
-public class FeedWorkCollection {
-
-    private static Logger LOGGER = Logger.getLogger(FeedWorkCollection.class.getName());
-    private static final ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
-
-    /**
-     * The task of subscribing to a feed to obtain data.
-     */
-    public static class SubscribeFeedWork implements IFeedWork {
-
-        private final Runnable runnable;
-
-        private final FeedConnectionRequest request;
-
-        @Override
-        public Runnable getRunnable() {
-            return runnable;
-        }
-
-        public SubscribeFeedWork(String[] locations, FeedConnectionRequest request) {
-            this.runnable = new SubscribeFeedWorkRunnable(locations, request);
-            this.request = request;
-        }
-
-        private static class SubscribeFeedWorkRunnable implements Runnable {
-
-            private final FeedConnectionRequest request;
-            private final String[] locations;
-
-            public SubscribeFeedWorkRunnable(String[] locations, FeedConnectionRequest request) {
-                this.request = request;
-                this.locations = locations;
-            }
-
-            @Override
-            public void run() {
-                try {
-                    PrintWriter writer = new PrintWriter(System.out, true);
-                    SessionConfig pc = new SessionConfig(writer, OutputFormat.ADM);
-                    DataverseDecl dataverseDecl = new DataverseDecl(
-                            new Identifier(request.getReceivingFeedId().getDataverse()));
-                    SubscribeFeedStatement subscribeStmt = new SubscribeFeedStatement(locations, request);
-                    List<Statement> statements = new ArrayList<Statement>();
-                    statements.add(dataverseDecl);
-                    statements.add(subscribeStmt);
-                    QueryTranslator translator = new QueryTranslator(statements, pc, compilationProvider);
-                    translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null,
-                            QueryTranslator.ResultDelivery.SYNC);
-                    if (LOGGER.isLoggable(Level.INFO)) {
-                        LOGGER.info("Submitted connection requests for execution: " + request);
-                    }
-                } catch (Exception e) {
-                    if (LOGGER.isLoggable(Level.SEVERE)) {
-                        LOGGER.severe("Exception in executing " + request);
-                    }
-                    throw new RuntimeException(e);
-                }
-            }
-        }
-
-        public static class FeedSubscribeWorkEventListener implements IFeedWorkEventListener {
-
-            @Override
-            public void workFailed(IFeedWork work, Exception e) {
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning(" Feed subscription request " + ((SubscribeFeedWork) work).request
-                            + " failed with exception " + e);
-                }
-            }
-
-            @Override
-            public void workCompleted(IFeedWork work) {
-                ((SubscribeFeedWork) work).request.setSubscriptionStatus(ConnectionStatus.ACTIVE);
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.warning(" Feed subscription request " + ((SubscribeFeedWork) work).request + " completed ");
-                }
-            }
-
-        }
-
-        public FeedConnectionRequest getRequest() {
-            return request;
-        }
-
-        @Override
-        public String toString() {
-            return "SubscribeFeedWork for [" + request + "]";
-        }
-
-    }
-
-    /**
-     * The task of activating a set of feeds.
-     */
-    public static class ActivateFeedWork implements IFeedWork {
-
-        private final Runnable runnable;
-
-        @Override
-        public Runnable getRunnable() {
-            return runnable;
-        }
-
-        public ActivateFeedWork(List<FeedCollectInfo> feedsToRevive) {
-            this.runnable = new FeedsActivateRunnable(feedsToRevive);
-        }
-
-        public ActivateFeedWork() {
-            this.runnable = new FeedsActivateRunnable();
-        }
-
-        private static class FeedsActivateRunnable implements Runnable {
-
-            private List<FeedCollectInfo> feedsToRevive;
-            private Mode mode;
-
-            public enum Mode {
-                REVIVAL_POST_NODE_REJOIN
-            }
-
-            public FeedsActivateRunnable(List<FeedCollectInfo> feedsToRevive) {
-                this.feedsToRevive = feedsToRevive;
-            }
-
-            public FeedsActivateRunnable() {
-            }
-
-            @Override
-            public void run() {
-                switch (mode) {
-                    case REVIVAL_POST_NODE_REJOIN:
-                        try {
-                            Thread.sleep(10000);
-                        } catch (InterruptedException e1) {
-                            if (LOGGER.isLoggable(Level.INFO)) {
-                                LOGGER.info("Attempt to resume feed interrupted");
-                            }
-                            throw new IllegalStateException(e1.getMessage());
-                        }
-                        for (FeedCollectInfo finfo : feedsToRevive) {
-                            try {
-                                JobId jobId = AsterixAppContextInfo.getInstance().getHcc().startJob(finfo.jobSpec);
-                                if (LOGGER.isLoggable(Level.INFO)) {
-                                    LOGGER.info("Resumed feed :" + finfo.feedConnectionId + " job id " + jobId);
-                                    LOGGER.info("Job:" + finfo.jobSpec);
-                                }
-                            } catch (Exception e) {
-                                if (LOGGER.isLoggable(Level.WARNING)) {
-                                    LOGGER.warning(
-                                            "Unable to resume feed " + finfo.feedConnectionId + " " + e.getMessage());
-                                }
-                            }
-                        }
-                }
-            }
-
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java
index 1e6cc66..4df461b 100644
--- a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java
+++ b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java
@@ -29,7 +29,7 @@ import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import org.apache.asterix.feeds.CentralFeedManager;
+import org.apache.asterix.feed.CentralFeedManager;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
 import org.apache.asterix.metadata.declared.AqlMetadataProvider;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServlet.java b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServlet.java
index 35efcb1..6957926 100644
--- a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServlet.java
+++ b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServlet.java
@@ -32,13 +32,13 @@ import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import org.apache.asterix.common.feeds.FeedActivity;
-import org.apache.asterix.common.feeds.FeedActivity.FeedActivityDetails;
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.api.IFeedLoadManager;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.asterix.feeds.CentralFeedManager;
+import org.apache.asterix.external.feed.api.IFeedLoadManager;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.watch.FeedActivity;
+import org.apache.asterix.external.feed.watch.FeedActivity.FeedActivityDetails;
+import org.apache.asterix.feed.CentralFeedManager;
 
 public class FeedServlet extends HttpServlet {
     private static final long serialVersionUID = 1L;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServletUtil.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServletUtil.java b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServletUtil.java
index f1473f4..d459775 100644
--- a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServletUtil.java
+++ b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServletUtil.java
@@ -26,9 +26,9 @@ import java.util.concurrent.LinkedBlockingQueue;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.feeds.FeedLifecycleListener;
-import org.apache.asterix.metadata.feeds.RemoteSocketMessageListener;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.message.RemoteSocketMessageListener;
+import org.apache.asterix.feed.FeedLifecycleListener;
 
 public class FeedServletUtil {
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java b/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java
index 545cb67..3b6af6d 100644
--- a/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java
+++ b/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java
@@ -48,26 +48,30 @@ import org.apache.asterix.common.config.DatasetConfig.ExternalDatasetTransaction
 import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
 import org.apache.asterix.common.config.DatasetConfig.IndexType;
 import org.apache.asterix.common.config.GlobalConfig;
+import org.apache.asterix.common.config.MetadataConstants;
 import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.feeds.FeedActivity.FeedActivityDetails;
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedConnectionRequest;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.FeedJointKey;
-import org.apache.asterix.common.feeds.FeedPolicyAccessor;
-import org.apache.asterix.common.feeds.api.IFeedJoint;
-import org.apache.asterix.common.feeds.api.IFeedJoint.FeedJointType;
-import org.apache.asterix.common.feeds.api.IFeedLifecycleEventSubscriber;
-import org.apache.asterix.common.feeds.api.IFeedLifecycleEventSubscriber.FeedLifecycleEvent;
-import org.apache.asterix.common.feeds.api.IFeedLifecycleListener.ConnectionLocation;
 import org.apache.asterix.common.functions.FunctionSignature;
 import org.apache.asterix.compiler.provider.ILangCompilationProvider;
 import org.apache.asterix.external.api.IAdapterFactory;
+import org.apache.asterix.external.feed.api.IFeed;
+import org.apache.asterix.external.feed.api.IFeed.FeedType;
+import org.apache.asterix.external.feed.api.IFeedJoint;
+import org.apache.asterix.external.feed.api.IFeedJoint.FeedJointType;
+import org.apache.asterix.external.feed.api.IFeedLifecycleEventSubscriber;
+import org.apache.asterix.external.feed.api.IFeedLifecycleEventSubscriber.FeedLifecycleEvent;
+import org.apache.asterix.external.feed.api.IFeedLifecycleListener.ConnectionLocation;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedConnectionRequest;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.management.FeedJointKey;
+import org.apache.asterix.external.feed.management.FeedLifecycleEventSubscriber;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.external.feed.watch.FeedActivity.FeedActivityDetails;
 import org.apache.asterix.external.indexing.ExternalFile;
-import org.apache.asterix.feeds.CentralFeedManager;
-import org.apache.asterix.feeds.FeedJoint;
-import org.apache.asterix.feeds.FeedLifecycleListener;
+import org.apache.asterix.feed.CentralFeedManager;
+import org.apache.asterix.feed.FeedJoint;
+import org.apache.asterix.feed.FeedLifecycleListener;
 import org.apache.asterix.file.DatasetOperations;
 import org.apache.asterix.file.DataverseOperations;
 import org.apache.asterix.file.ExternalIndexingOperations;
@@ -120,7 +124,6 @@ import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
 import org.apache.asterix.metadata.api.IMetadataEntity;
-import org.apache.asterix.metadata.bootstrap.MetadataConstants;
 import org.apache.asterix.metadata.dataset.hints.DatasetHints;
 import org.apache.asterix.metadata.dataset.hints.DatasetHints.DatasetNodegroupCardinalityHint;
 import org.apache.asterix.metadata.declared.AqlMetadataProvider;
@@ -130,16 +133,12 @@ import org.apache.asterix.metadata.entities.Datatype;
 import org.apache.asterix.metadata.entities.Dataverse;
 import org.apache.asterix.metadata.entities.ExternalDatasetDetails;
 import org.apache.asterix.metadata.entities.Feed;
-import org.apache.asterix.metadata.entities.Feed.FeedType;
-import org.apache.asterix.metadata.entities.FeedPolicy;
+import org.apache.asterix.metadata.entities.FeedPolicyEntity;
 import org.apache.asterix.metadata.entities.Function;
 import org.apache.asterix.metadata.entities.Index;
 import org.apache.asterix.metadata.entities.InternalDatasetDetails;
 import org.apache.asterix.metadata.entities.NodeGroup;
-import org.apache.asterix.metadata.entities.PrimaryFeed;
-import org.apache.asterix.metadata.entities.SecondaryFeed;
-import org.apache.asterix.metadata.feeds.FeedLifecycleEventSubscriber;
-import org.apache.asterix.metadata.feeds.FeedUtil;
+import org.apache.asterix.metadata.feeds.FeedMetadataUtil;
 import org.apache.asterix.metadata.utils.DatasetUtils;
 import org.apache.asterix.metadata.utils.ExternalDatasetsRegistry;
 import org.apache.asterix.metadata.utils.MetadataLockManager;
@@ -239,11 +238,11 @@ public class QueryTranslator extends AbstractLangTranslator {
     /**
      * Compiles and submits for execution a list of AQL statements.
      * @param hcc
-     *        A Hyracks client connection that is used to submit a jobspec to Hyracks.
+     *            A Hyracks client connection that is used to submit a jobspec to Hyracks.
      * @param hdc
-     *        A Hyracks dataset client object that is used to read the results.
+     *            A Hyracks dataset client object that is used to read the results.
      * @param resultDelivery
-     *        True if the results should be read asynchronously or false if we should wait for results to be read.
+     *            True if the results should be read asynchronously or false if we should wait for results to be read.
      * @return A List<QueryResult> containing a QueryResult instance corresponding to each submitted query.
      * @throws Exception
      */
@@ -1933,13 +1932,13 @@ public class QueryTranslator extends AbstractLangTranslator {
                 case CREATE_PRIMARY_FEED:
                     CreatePrimaryFeedStatement cpfs = (CreatePrimaryFeedStatement) stmt;
                     String adaptorName = cpfs.getAdaptorName();
-                    feed = new PrimaryFeed(dataverseName, feedName, adaptorName, cpfs.getAdaptorConfiguration(),
-                            cfs.getAppliedFunction());
+                    feed = new Feed(dataverseName, feedName, cfs.getAppliedFunction(), FeedType.PRIMARY, feedName,
+                            adaptorName, cpfs.getAdaptorConfiguration());
                     break;
                 case CREATE_SECONDARY_FEED:
                     CreateSecondaryFeedStatement csfs = (CreateSecondaryFeedStatement) stmt;
-                    feed = new SecondaryFeed(dataverseName, feedName, csfs.getSourceFeedName(),
-                            csfs.getAppliedFunction());
+                    feed = new Feed(dataverseName, feedName, csfs.getAppliedFunction(), FeedType.SECONDARY,
+                            csfs.getSourceFeedName(), null, null);
                     break;
                 default:
                     throw new IllegalStateException();
@@ -1961,14 +1960,14 @@ public class QueryTranslator extends AbstractLangTranslator {
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
         String dataverse;
         String policy;
-        FeedPolicy newPolicy = null;
+        FeedPolicyEntity newPolicy = null;
         CreateFeedPolicyStatement cfps = (CreateFeedPolicyStatement) stmt;
         dataverse = getActiveDataverse(null);
         policy = cfps.getPolicyName();
         MetadataLockManager.INSTANCE.createFeedPolicyBegin(dataverse, dataverse + "." + policy);
         try {
-            FeedPolicy feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(metadataProvider.getMetadataTxnContext(),
-                    dataverse, policy);
+            FeedPolicyEntity feedPolicy = MetadataManager.INSTANCE
+                    .getFeedPolicy(metadataProvider.getMetadataTxnContext(), dataverse, policy);
             if (feedPolicy != null) {
                 if (cfps.getIfNotExists()) {
                     MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
@@ -1980,7 +1979,7 @@ public class QueryTranslator extends AbstractLangTranslator {
             boolean extendingExisting = cfps.getSourcePolicyName() != null;
             String description = cfps.getDescription() == null ? "" : cfps.getDescription();
             if (extendingExisting) {
-                FeedPolicy sourceFeedPolicy = MetadataManager.INSTANCE
+                FeedPolicyEntity sourceFeedPolicy = MetadataManager.INSTANCE
                         .getFeedPolicy(metadataProvider.getMetadataTxnContext(), dataverse, cfps.getSourcePolicyName());
                 if (sourceFeedPolicy == null) {
                     sourceFeedPolicy = MetadataManager.INSTANCE.getFeedPolicy(metadataProvider.getMetadataTxnContext(),
@@ -1991,7 +1990,7 @@ public class QueryTranslator extends AbstractLangTranslator {
                 }
                 Map<String, String> policyProperties = sourceFeedPolicy.getProperties();
                 policyProperties.putAll(cfps.getProperties());
-                newPolicy = new FeedPolicy(dataverse, policy, description, policyProperties);
+                newPolicy = new FeedPolicyEntity(dataverse, policy, description, policyProperties);
             } else {
                 Properties prop = new Properties();
                 try {
@@ -2004,7 +2003,7 @@ public class QueryTranslator extends AbstractLangTranslator {
                 for (Entry<Object, Object> entry : prop.entrySet()) {
                     policyProperties.put((String) entry.getKey(), (String) entry.getValue());
                 }
-                newPolicy = new FeedPolicy(dataverse, policy, description, policyProperties);
+                newPolicy = new FeedPolicyEntity(dataverse, policy, description, policyProperties);
             }
             MetadataManager.INSTANCE.addFeedPolicy(mdTxnCtx, newPolicy);
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
@@ -2071,7 +2070,7 @@ public class QueryTranslator extends AbstractLangTranslator {
         MetadataLockManager.INSTANCE.dropFeedPolicyBegin(dataverseName, dataverseName + "." + policyName);
 
         try {
-            FeedPolicy feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(mdTxnCtx, dataverseName, policyName);
+            FeedPolicyEntity feedPolicy = MetadataManager.INSTANCE.getFeedPolicy(mdTxnCtx, dataverseName, policyName);
             if (feedPolicy == null) {
                 if (!stmtFeedPolicyDrop.getIfExists()) {
                     throw new AlgebricksException("Unknown policy " + policyName + " in dataverse " + dataverseName);
@@ -2112,10 +2111,10 @@ public class QueryTranslator extends AbstractLangTranslator {
             CompiledConnectFeedStatement cbfs = new CompiledConnectFeedStatement(dataverseName, cfs.getFeedName(),
                     cfs.getDatasetName().getValue(), cfs.getPolicy(), cfs.getQuery(), cfs.getVarCounter());
 
-            FeedUtil.validateIfDatasetExists(dataverseName, cfs.getDatasetName().getValue(),
+            FeedMetadataUtil.validateIfDatasetExists(dataverseName, cfs.getDatasetName().getValue(),
                     metadataProvider.getMetadataTxnContext());
 
-            Feed feed = FeedUtil.validateIfFeedExists(dataverseName, cfs.getFeedName(),
+            Feed feed = FeedMetadataUtil.validateIfFeedExists(dataverseName, cfs.getFeedName(),
                     metadataProvider.getMetadataTxnContext());
 
             feedConnId = new FeedConnectionId(dataverseName, cfs.getFeedName(), cfs.getDatasetName().getValue());
@@ -2125,7 +2124,8 @@ public class QueryTranslator extends AbstractLangTranslator {
                         + cfs.getDatasetName().getValue());
             }
 
-            FeedPolicy feedPolicy = FeedUtil.validateIfPolicyExists(dataverseName, cbfs.getPolicyName(), mdTxnCtx);
+            FeedPolicyEntity feedPolicy = FeedMetadataUtil.validateIfPolicyExists(dataverseName, cbfs.getPolicyName(),
+                    mdTxnCtx);
 
             // All Metadata checks have passed. Feed connect request is valid. //
 
@@ -2139,8 +2139,8 @@ public class QueryTranslator extends AbstractLangTranslator {
             subscriberRegistered = true;
             if (createFeedIntakeJob) {
                 FeedId feedId = connectionRequest.getFeedJointKey().getFeedId();
-                PrimaryFeed primaryFeed = (PrimaryFeed) MetadataManager.INSTANCE.getFeed(mdTxnCtx,
-                        feedId.getDataverse(), feedId.getFeedName());
+                Feed primaryFeed = MetadataManager.INSTANCE.getFeed(mdTxnCtx, feedId.getDataverse(),
+                        feedId.getFeedName());
                 Pair<JobSpecification, IAdapterFactory> pair = FeedOperations.buildFeedIntakeJobSpec(primaryFeed,
                         metadataProvider, policyAccessor);
                 // adapter configuration are valid at this stage
@@ -2205,7 +2205,7 @@ public class QueryTranslator extends AbstractLangTranslator {
      * @throws MetadataException
      */
     private Triple<FeedConnectionRequest, Boolean, List<IFeedJoint>> getFeedConnectionRequest(String dataverse,
-            Feed feed, String dataset, FeedPolicy feedPolicy, MetadataTransactionContext mdTxnCtx)
+            Feed feed, String dataset, FeedPolicyEntity feedPolicy, MetadataTransactionContext mdTxnCtx)
                     throws MetadataException {
         IFeedJoint sourceFeedJoint = null;
         FeedConnectionRequest request = null;
@@ -2269,12 +2269,12 @@ public class QueryTranslator extends AbstractLangTranslator {
     private FeedJointKey getFeedJointKey(Feed feed, MetadataTransactionContext ctx) throws MetadataException {
         Feed sourceFeed = feed;
         List<String> appliedFunctions = new ArrayList<String>();
-        while (sourceFeed.getFeedType().equals(FeedType.SECONDARY)) {
+        while (sourceFeed.getFeedType().equals(IFeed.FeedType.SECONDARY)) {
             if (sourceFeed.getAppliedFunction() != null) {
                 appliedFunctions.add(0, sourceFeed.getAppliedFunction().getName());
             }
             Feed parentFeed = MetadataManager.INSTANCE.getFeed(ctx, feed.getDataverseName(),
-                    ((SecondaryFeed) sourceFeed).getSourceFeedName());
+                    sourceFeed.getSourceFeedName());
             sourceFeed = parentFeed;
         }
 
@@ -2295,8 +2295,8 @@ public class QueryTranslator extends AbstractLangTranslator {
         boolean bActiveTxn = true;
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
 
-        FeedUtil.validateIfDatasetExists(dataverseName, cfs.getDatasetName().getValue(), mdTxnCtx);
-        Feed feed = FeedUtil.validateIfFeedExists(dataverseName, cfs.getFeedName().getValue(), mdTxnCtx);
+        FeedMetadataUtil.validateIfDatasetExists(dataverseName, cfs.getDatasetName().getValue(), mdTxnCtx);
+        Feed feed = FeedMetadataUtil.validateIfFeedExists(dataverseName, cfs.getFeedName().getValue(), mdTxnCtx);
 
         FeedConnectionId connectionId = new FeedConnectionId(feed.getFeedId(), cfs.getDatasetName().getValue());
         boolean isFeedConnectionActive = FeedLifecycleListener.INSTANCE.isFeedConnectionActive(connectionId);
@@ -2369,7 +2369,7 @@ public class QueryTranslator extends AbstractLangTranslator {
 
         try {
 
-            JobSpecification alteredJobSpec = FeedUtil.alterJobSpecificationForFeed(compiled, feedConnectionId,
+            JobSpecification alteredJobSpec = FeedMetadataUtil.alterJobSpecificationForFeed(compiled, feedConnectionId,
                     bfs.getSubscriptionRequest().getPolicyParameters());
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
             bActiveTxn = false;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feed/CentralFeedManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feed/CentralFeedManager.java b/asterix-app/src/main/java/org/apache/asterix/feed/CentralFeedManager.java
new file mode 100644
index 0000000..4020bde
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/feed/CentralFeedManager.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.feed;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringReader;
+import java.util.List;
+
+import org.apache.asterix.api.common.SessionConfig;
+import org.apache.asterix.api.common.SessionConfig.OutputFormat;
+import org.apache.asterix.aql.translator.QueryTranslator;
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.compiler.provider.AqlCompilationProvider;
+import org.apache.asterix.compiler.provider.ILangCompilationProvider;
+import org.apache.asterix.external.feed.api.ICentralFeedManager;
+import org.apache.asterix.external.feed.api.IFeedLoadManager;
+import org.apache.asterix.external.feed.api.IFeedTrackingManager;
+import org.apache.asterix.external.feed.message.SocketMessageListener;
+import org.apache.asterix.lang.aql.parser.AQLParserFactory;
+import org.apache.asterix.lang.common.base.IParser;
+import org.apache.asterix.lang.common.base.IParserFactory;
+import org.apache.asterix.lang.common.base.Statement;
+import org.apache.asterix.om.util.AsterixAppContextInfo;
+import org.apache.hyracks.api.client.IHyracksClientConnection;
+import org.apache.hyracks.api.job.JobId;
+import org.apache.hyracks.api.job.JobSpecification;
+
+public class CentralFeedManager implements ICentralFeedManager {
+
+    private static final ICentralFeedManager centralFeedManager = new CentralFeedManager();
+    private static final ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
+
+    public static ICentralFeedManager getInstance() {
+        return centralFeedManager;
+    }
+
+    private final int port;
+    private final IFeedLoadManager feedLoadManager;
+    private final IFeedTrackingManager feedTrackingManager;
+    private final SocketMessageListener messageListener;
+
+    private CentralFeedManager() {
+        this.port = AsterixAppContextInfo.getInstance().getFeedProperties().getFeedCentralManagerPort();
+        this.feedLoadManager = new FeedLoadManager();
+        this.feedTrackingManager = new FeedTrackingManager();
+        this.messageListener = new SocketMessageListener(port, new FeedMessageReceiver(this));
+    }
+
+    @Override
+    public void start() throws AsterixException {
+        messageListener.start();
+    }
+
+    @Override
+    public void stop() throws AsterixException, IOException {
+        messageListener.stop();
+    }
+
+    public static JobId runJob(JobSpecification spec, boolean waitForCompletion) throws Exception {
+        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+        JobId jobId = hcc.startJob(spec);
+        if (waitForCompletion) {
+            hcc.waitForCompletion(jobId);
+        }
+        return jobId;
+    }
+
+    @Override
+    public IFeedLoadManager getFeedLoadManager() {
+        return feedLoadManager;
+    }
+
+    @Override
+    public IFeedTrackingManager getFeedTrackingManager() {
+        return feedTrackingManager;
+    }
+
+    public static class AQLExecutor {
+
+        private static final PrintWriter out = new PrintWriter(System.out, true);
+        private static final IParserFactory parserFactory = new AQLParserFactory();
+
+        public static void executeAQL(String aql) throws Exception {
+            IParser parser = parserFactory.createParser(new StringReader(aql));
+            List<Statement> statements = parser.parse();
+            SessionConfig pc = new SessionConfig(out, OutputFormat.ADM);
+            QueryTranslator translator = new QueryTranslator(statements, pc, compilationProvider);
+            translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null,
+                    QueryTranslator.ResultDelivery.SYNC);
+        }
+    }
+
+}


[07/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/util/DataGenerator.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/DataGenerator.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/DataGenerator.java
new file mode 100644
index 0000000..d999e7e
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/DataGenerator.java
@@ -0,0 +1,1186 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.util;
+
+import java.nio.CharBuffer;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Random;
+
+public class DataGenerator {
+
+    private RandomDateGenerator randDateGen;
+    private RandomNameGenerator randNameGen;
+    private RandomMessageGenerator randMessageGen;
+    private RandomLocationGenerator randLocationGen;
+    private Random random = new Random();
+    private TwitterUser twUser = new TwitterUser();
+    private TweetMessage twMessage = new TweetMessage();
+    private static final String DEFAULT_COUNTRY = "US";
+
+    public DataGenerator(InitializationInfo info) {
+        initialize(info);
+    }
+
+    public class TweetMessageIterator implements Iterator<TweetMessage> {
+
+        private final int duration;
+        private long startTime = 0;
+        private int tweetId;
+
+        public TweetMessageIterator(int duration) {
+            this.duration = duration;
+            this.startTime = System.currentTimeMillis();
+        }
+
+        @Override
+        public boolean hasNext() {
+            if (duration == TweetGenerator.INFINITY) {
+                return true;
+            }
+            return System.currentTimeMillis() - startTime <= duration * 1000;
+        }
+
+        @Override
+        public TweetMessage next() {
+            tweetId++;
+            TweetMessage msg = null;
+            getTwitterUser(null);
+            Message message = randMessageGen.getNextRandomMessage();
+            Point location = randLocationGen.getRandomPoint();
+            DateTime sendTime = randDateGen.getNextRandomDatetime();
+            twMessage.reset(tweetId, twUser, location.getLatitude(), location.getLongitude(), sendTime.toString(),
+                    message, DEFAULT_COUNTRY);
+            msg = twMessage;
+            return msg;
+        }
+
+        @Override
+        public void remove() {
+            // TODO Auto-generated method stub
+
+        }
+
+    }
+
+    public static class InitializationInfo {
+        public Date startDate = new Date(1, 1, 2005);
+        public Date endDate = new Date(8, 20, 2012);
+        public String[] lastNames = DataGenerator.lastNames;
+        public String[] firstNames = DataGenerator.firstNames;
+        public String[] vendors = DataGenerator.vendors;
+        public String[] jargon = DataGenerator.jargon;
+        public String[] org_list = DataGenerator.org_list;
+    }
+
+    public void initialize(InitializationInfo info) {
+        randDateGen = new RandomDateGenerator(info.startDate, info.endDate);
+        randNameGen = new RandomNameGenerator(info.firstNames, info.lastNames);
+        randLocationGen = new RandomLocationGenerator(24, 49, 66, 98);
+        randMessageGen = new RandomMessageGenerator(info.vendors, info.jargon);
+    }
+
+    public void getTwitterUser(String usernameSuffix) {
+        String suggestedName = randNameGen.getRandomName();
+        String[] nameComponents = suggestedName.split(" ");
+        String screenName = nameComponents[0] + nameComponents[1] + randNameGen.getRandomNameSuffix();
+        String name = suggestedName;
+        if (usernameSuffix != null) {
+            name = name + usernameSuffix;
+        }
+        int numFriends = random.nextInt((int) (100)); // draw from Zipfian
+        int statusesCount = random.nextInt(500); // draw from Zipfian
+        int followersCount = random.nextInt((int) (200));
+        twUser.reset(screenName, numFriends, statusesCount, name, followersCount);
+    }
+
+    public static class RandomDateGenerator {
+
+        private final Date startDate;
+        private final Date endDate;
+        private final Random random = new Random();
+        private final int yearDifference;
+        private Date workingDate;
+        private Date recentDate;
+        private DateTime dateTime;
+
+        public RandomDateGenerator(Date startDate, Date endDate) {
+            this.startDate = startDate;
+            this.endDate = endDate;
+            this.yearDifference = endDate.getYear() - startDate.getYear() + 1;
+            this.workingDate = new Date();
+            this.recentDate = new Date();
+            this.dateTime = new DateTime();
+        }
+
+        public Date getStartDate() {
+            return startDate;
+        }
+
+        public Date getEndDate() {
+            return endDate;
+        }
+
+        public Date getNextRandomDate() {
+            int year = random.nextInt(yearDifference) + startDate.getYear();
+            int month;
+            int day;
+            if (year == endDate.getYear()) {
+                month = random.nextInt(endDate.getMonth()) + 1;
+                if (month == endDate.getMonth()) {
+                    day = random.nextInt(endDate.getDay()) + 1;
+                } else {
+                    day = random.nextInt(28) + 1;
+                }
+            } else {
+                month = random.nextInt(12) + 1;
+                day = random.nextInt(28) + 1;
+            }
+            workingDate.reset(month, day, year);
+            return workingDate;
+        }
+
+        public DateTime getNextRandomDatetime() {
+            Date randomDate = getNextRandomDate();
+            dateTime.reset(randomDate);
+            return dateTime;
+        }
+
+        public Date getNextRecentDate(Date date) {
+            int year = date.getYear()
+                    + (date.getYear() == endDate.getYear() ? 0 : random.nextInt(endDate.getYear() - date.getYear()));
+            int month = (year == endDate.getYear()) ? date.getMonth() == endDate.getMonth() ? (endDate.getMonth())
+                    : (date.getMonth() + random.nextInt(endDate.getMonth() - date.getMonth())) : random.nextInt(12) + 1;
+
+            int day = (year == endDate.getYear()) ? month == endDate.getMonth() ? date.getDay() == endDate.getDay() ? endDate
+                    .getDay() : date.getDay() + random.nextInt(endDate.getDay() - date.getDay())
+                    : random.nextInt(28) + 1
+                    : random.nextInt(28) + 1;
+            recentDate.reset(month, day, year);
+            return recentDate;
+        }
+
+    }
+
+    public static class DateTime extends Date {
+
+        private String hour = "10";
+        private String min = "10";
+        private String sec = "00";
+
+        public DateTime(int month, int day, int year, String hour, String min, String sec) {
+            super(month, day, year);
+            this.hour = hour;
+            this.min = min;
+            this.sec = sec;
+        }
+
+        public DateTime() {
+        }
+
+        public void reset(int month, int day, int year, String hour, String min, String sec) {
+            super.setDay(month);
+            super.setDay(day);
+            super.setYear(year);
+            this.hour = hour;
+            this.min = min;
+            this.sec = sec;
+        }
+
+        public DateTime(Date date) {
+            super(date.getMonth(), date.getDay(), date.getYear());
+        }
+
+        public void reset(Date date) {
+            reset(date.getMonth(), date.getDay(), date.getYear());
+        }
+
+        public DateTime(Date date, int hour, int min, int sec) {
+            super(date.getMonth(), date.getDay(), date.getYear());
+            this.hour = (hour < 10) ? "0" : "" + hour;
+            this.min = (min < 10) ? "0" : "" + min;
+            this.sec = (sec < 10) ? "0" : "" + sec;
+        }
+
+        public String toString() {
+            StringBuilder builder = new StringBuilder();
+            builder.append("\"");
+            builder.append(super.getYear());
+            builder.append("-");
+            builder.append(super.getMonth() < 10 ? "0" + super.getMonth() : super.getMonth());
+            builder.append("-");
+            builder.append(super.getDay() < 10 ? "0" + super.getDay() : super.getDay());
+            builder.append("T");
+            builder.append(hour + ":" + min + ":" + sec);
+            builder.append("\"");
+            return builder.toString();
+        }
+    }
+
+    public static class Message {
+
+        private char[] message = new char[500];
+        private List<String> referredTopics;
+        private int length;
+
+        public Message(char[] m, List<String> referredTopics) {
+            System.arraycopy(m, 0, message, 0, m.length);
+            length = m.length;
+            this.referredTopics = referredTopics;
+        }
+
+        public Message() {
+            referredTopics = new ArrayList<String>();
+            length = 0;
+        }
+
+        public List<String> getReferredTopics() {
+            return referredTopics;
+        }
+
+        public void reset(char[] m, int offset, int length, List<String> referredTopics) {
+            System.arraycopy(m, offset, message, 0, length);
+            this.length = length;
+            this.referredTopics = referredTopics;
+        }
+
+        public int getLength() {
+            return length;
+        }
+
+        public char charAt(int index) {
+            return message[index];
+        }
+
+    }
+
+    public static class Point {
+
+        private float latitude;
+        private float longitude;
+
+        public float getLatitude() {
+            return latitude;
+        }
+
+        public float getLongitude() {
+            return longitude;
+        }
+
+        public Point(float latitude, float longitude) {
+            this.latitude = latitude;
+            this.longitude = longitude;
+        }
+
+        public void reset(float latitude, float longitude) {
+            this.latitude = latitude;
+            this.longitude = longitude;
+        }
+
+        public Point() {
+        }
+
+        public String toString() {
+            StringBuilder builder = new StringBuilder();
+            builder.append("point(\"" + latitude + "," + longitude + "\")");
+            return builder.toString();
+        }
+    }
+
+    public static class RandomNameGenerator {
+
+        private String[] firstNames;
+        private String[] lastNames;
+
+        private final Random random = new Random();
+
+        private final String[] connectors = new String[] { "_", "#", "$", "@" };
+
+        public RandomNameGenerator(String[] firstNames, String[] lastNames) {
+            this.firstNames = firstNames;
+            this.lastNames = lastNames;
+        }
+
+        public String getRandomName() {
+            String name;
+            name = getSuggestedName();
+            return name;
+
+        }
+
+        private String getSuggestedName() {
+            int firstNameIndex = random.nextInt(firstNames.length);
+            int lastNameIndex = random.nextInt(lastNames.length);
+            String suggestedName = firstNames[firstNameIndex] + " " + lastNames[lastNameIndex];
+            return suggestedName;
+        }
+
+        public String getRandomNameSuffix() {
+            return connectors[random.nextInt(connectors.length)] + random.nextInt(1000);
+        }
+    }
+
+    public static class RandomMessageGenerator {
+
+        private final MessageTemplate messageTemplate;
+
+        public RandomMessageGenerator(String[] vendors, String[] jargon) {
+            List<String> vendorList = new ArrayList<String>();
+            for (String v : vendors) {
+                vendorList.add(v);
+            }
+            List<String> jargonList = new ArrayList<String>();
+            for (String j : jargon) {
+                jargonList.add(j);
+            }
+            this.messageTemplate = new MessageTemplate(vendorList, jargonList);
+        }
+
+        public Message getNextRandomMessage() {
+            return messageTemplate.getNextMessage();
+        }
+    }
+
+    public static class AbstractMessageTemplate {
+
+        protected final Random random = new Random();
+
+        protected String[] positiveVerbs = new String[] { "like", "love" };
+        protected String[] negativeVerbs = new String[] { "dislike", "hate", "can't stand" };
+
+        protected String[] negativeAdjectives = new String[] { "horrible", "bad", "terrible", "OMG" };
+        protected String[] postiveAdjectives = new String[] { "good", "awesome", "amazing", "mind-blowing" };
+
+        protected String[] otherWords = new String[] { "the", "its" };
+    }
+
+    public static class MessageTemplate extends AbstractMessageTemplate {
+
+        private List<String> vendors;
+        private List<String> jargon;
+        private CharBuffer buffer;
+        private List<String> referredTopics;
+        private Message message = new Message();
+
+        public MessageTemplate(List<String> vendors, List<String> jargon) {
+            this.vendors = vendors;
+            this.jargon = jargon;
+            buffer = CharBuffer.allocate(2500);
+            referredTopics = new ArrayList<String>();
+        }
+
+        public Message getNextMessage() {
+            buffer.position(0);
+            buffer.limit(2500);
+            referredTopics.clear();
+            boolean isPositive = random.nextBoolean();
+            String[] verbArray = isPositive ? positiveVerbs : negativeVerbs;
+            String[] adjectiveArray = isPositive ? postiveAdjectives : negativeAdjectives;
+            String verb = verbArray[random.nextInt(verbArray.length)];
+            String adjective = adjectiveArray[random.nextInt(adjectiveArray.length)];
+
+            buffer.put(" ");
+            buffer.put(verb);
+            buffer.put(" ");
+            String vendor = vendors.get(random.nextInt(vendors.size()));
+            referredTopics.add(vendor);
+            buffer.append(vendor);
+            buffer.append(" ");
+            buffer.append(otherWords[random.nextInt(otherWords.length)]);
+            buffer.append(" ");
+            String jargonTerm = jargon.get(random.nextInt(jargon.size()));
+            referredTopics.add(jargonTerm);
+            buffer.append(jargonTerm);
+            buffer.append(" is ");
+            buffer.append(adjective);
+            if (random.nextBoolean()) {
+                buffer.append(isPositive ? ":)" : ":(");
+            }
+
+            buffer.flip();
+            message.reset(buffer.array(), 0, buffer.limit(), referredTopics);
+            return message;
+        }
+    }
+
+    public static class RandomUtil {
+
+        public static Random random = new Random();
+
+        public static int[] getKFromN(int k, int n) {
+            int[] result = new int[k];
+            int cnt = 0;
+            HashSet<Integer> values = new HashSet<Integer>();
+            while (cnt < k) {
+                int val = random.nextInt(n + 1);
+                if (values.contains(val)) {
+                    continue;
+                }
+
+                result[cnt++] = val;
+                values.add(val);
+            }
+            return result;
+        }
+    }
+
+    public static class RandomLocationGenerator {
+
+        private Random random = new Random();
+
+        private final int beginLat;
+        private final int endLat;
+        private final int beginLong;
+        private final int endLong;
+
+        private Point point;
+
+        public RandomLocationGenerator(int beginLat, int endLat, int beginLong, int endLong) {
+            this.beginLat = beginLat;
+            this.endLat = endLat;
+            this.beginLong = beginLong;
+            this.endLong = endLong;
+            this.point = new Point();
+        }
+
+        public Point getRandomPoint() {
+            int latMajor = beginLat + random.nextInt(endLat - beginLat);
+            int latMinor = random.nextInt(100);
+            float latitude = latMajor + ((float) latMinor) / 100;
+
+            int longMajor = beginLong + random.nextInt(endLong - beginLong);
+            int longMinor = random.nextInt(100);
+            float longitude = longMajor + ((float) longMinor) / 100;
+
+            point.reset(latitude, longitude);
+            return point;
+        }
+
+    }
+
+    public static class TweetMessage {
+
+        private static final String[] DEFAULT_FIELDS = new String[] { TweetFields.TWEETID, TweetFields.USER,
+                TweetFields.LATITUDE, TweetFields.LONGITUDE, TweetFields.MESSAGE_TEXT, TweetFields.CREATED_AT,
+                TweetFields.COUNTRY };
+
+        private int id;
+        private TwitterUser user;
+        private double latitude;
+        private double longitude;
+        private String created_at;
+        private Message messageText;
+        private String country;
+
+        public static final class TweetFields {
+            public static final String TWEETID = "id";
+            public static final String USER = "user";
+            public static final String LATITUDE = "latitude";
+            public static final String LONGITUDE = "longitude";
+            public static final String MESSAGE_TEXT = "message_text";
+            public static final String CREATED_AT = "created_at";
+            public static final String COUNTRY = "country";
+
+        }
+
+        public TweetMessage() {
+        }
+
+        public TweetMessage(int tweetid, TwitterUser user, double latitude, double longitude, String created_at,
+                Message messageText, String country) {
+            this.id = tweetid;
+            this.user = user;
+            this.latitude = latitude;
+            this.longitude = longitude;
+            this.created_at = created_at;
+            this.messageText = messageText;
+            this.country = country;
+        }
+
+        public void reset(int tweetid, TwitterUser user, double latitude, double longitude, String created_at,
+                Message messageText, String country) {
+            this.id = tweetid;
+            this.user = user;
+            this.latitude = latitude;
+            this.longitude = longitude;
+            this.created_at = created_at;
+            this.messageText = messageText;
+            this.country = country;
+        }
+
+        public String getAdmEquivalent(String[] fields) {
+            if (fields == null) {
+                fields = DEFAULT_FIELDS;
+            }
+            StringBuilder builder = new StringBuilder();
+            builder.append("{");
+            for (String field : fields) {
+                switch (field) {
+                    case Datatypes.Tweet.ID:
+                        appendFieldName(builder, Datatypes.Tweet.ID);
+                        builder.append("int64(\"" + id + "\")");
+                        break;
+                    case Datatypes.Tweet.USER:
+                        appendFieldName(builder, Datatypes.Tweet.USER);
+                        builder.append(user);
+                        break;
+                    case Datatypes.Tweet.LATITUDE:
+                        appendFieldName(builder, Datatypes.Tweet.LATITUDE);
+                        builder.append(latitude);
+                        break;
+                    case Datatypes.Tweet.LONGITUDE:
+                        appendFieldName(builder, Datatypes.Tweet.LONGITUDE);
+                        builder.append(longitude);
+                        break;
+                    case Datatypes.Tweet.MESSAGE:
+                        appendFieldName(builder, Datatypes.Tweet.MESSAGE);
+                        builder.append("\"");
+                        for (int i = 0; i < messageText.getLength(); i++) {
+                            builder.append(messageText.charAt(i));
+                        }
+                        builder.append("\"");
+                        break;
+                    case Datatypes.Tweet.CREATED_AT:
+                        appendFieldName(builder, Datatypes.Tweet.CREATED_AT);
+                        builder.append(created_at);
+                        break;
+                    case Datatypes.Tweet.COUNTRY:
+                        appendFieldName(builder, Datatypes.Tweet.COUNTRY);
+                        builder.append("\"" + country + "\"");
+                        break;
+                }
+                builder.append(",");
+            }
+            builder.deleteCharAt(builder.length() - 1);
+            builder.append("}");
+            return builder.toString();
+        }
+
+        private void appendFieldName(StringBuilder builder, String fieldName) {
+            builder.append("\"" + fieldName + "\":");
+        }
+
+        public int getTweetid() {
+            return id;
+        }
+
+        public void setTweetid(int tweetid) {
+            this.id = tweetid;
+        }
+
+        public TwitterUser getUser() {
+            return user;
+        }
+
+        public void setUser(TwitterUser user) {
+            this.user = user;
+        }
+
+        public double getLatitude() {
+            return latitude;
+        }
+
+        public String getSendTime() {
+            return created_at;
+        }
+
+        public Message getMessageText() {
+            return messageText;
+        }
+
+        public void setMessageText(Message messageText) {
+            this.messageText = messageText;
+        }
+
+        public String getCountry() {
+            return country;
+        }
+
+    }
+
+    public static class TwitterUser {
+
+        private String screenName;
+        private String lang = "en";
+        private int friendsCount;
+        private int statusesCount;
+        private String name;
+        private int followersCount;
+
+        public TwitterUser() {
+
+        }
+
+        public TwitterUser(String screenName, int friendsCount, int statusesCount, String name, int followersCount) {
+            this.screenName = screenName;
+            this.friendsCount = friendsCount;
+            this.statusesCount = statusesCount;
+            this.name = name;
+            this.followersCount = followersCount;
+        }
+
+        public void reset(String screenName, int friendsCount, int statusesCount, String name, int followersCount) {
+            this.screenName = screenName;
+            this.friendsCount = friendsCount;
+            this.statusesCount = statusesCount;
+            this.name = name;
+            this.followersCount = followersCount;
+        }
+
+        public String getScreenName() {
+            return screenName;
+        }
+
+        public int getFriendsCount() {
+            return friendsCount;
+        }
+
+        public int getStatusesCount() {
+            return statusesCount;
+        }
+
+        public String getName() {
+            return name;
+        }
+
+        public int getFollowersCount() {
+            return followersCount;
+        }
+
+        public String toString() {
+            StringBuilder builder = new StringBuilder();
+            builder.append("{");
+            builder.append("\"screen_name\":" + "\"" + screenName + "\"");
+            builder.append(",");
+            builder.append("\"language\":" + "\"" + lang + "\"");
+            builder.append(",");
+            builder.append("\"friends_count\":" + friendsCount);
+            builder.append(",");
+            builder.append("\"status_count\":" + statusesCount);
+            builder.append(",");
+            builder.append("\"name\":" + "\"" + name + "\"");
+            builder.append(",");
+            builder.append("\"followers_count\":" + followersCount);
+            builder.append("}");
+            return builder.toString();
+        }
+
+    }
+
+    public static class Date {
+
+        private int day;
+        private int month;
+        private int year;
+
+        public Date(int month, int day, int year) {
+            this.month = month;
+            this.day = day;
+            this.year = year;
+        }
+
+        public void reset(int month, int day, int year) {
+            this.month = month;
+            this.day = day;
+            this.year = year;
+        }
+
+        public int getDay() {
+            return day;
+        }
+
+        public int getMonth() {
+            return month;
+        }
+
+        public int getYear() {
+            return year;
+        }
+
+        public Date() {
+        }
+
+        public String toString() {
+            StringBuilder builder = new StringBuilder();
+            builder.append("date");
+            builder.append("(\"");
+            builder.append(year);
+            builder.append("-");
+            builder.append(month < 10 ? "0" + month : "" + month);
+            builder.append("-");
+            builder.append(day < 10 ? "0" + day : "" + day);
+            builder.append("\")");
+            return builder.toString();
+        }
+
+        public void setDay(int day) {
+            this.day = day;
+        }
+
+        public void setMonth(int month) {
+            this.month = month;
+        }
+
+        public void setYear(int year) {
+            this.year = year;
+        }
+    }
+
+    public static String[] lastNames = { "Hoopengarner", "Harrow", "Gardner", "Blyant", "Best", "Buttermore", "Gronko",
+            "Mayers", "Countryman", "Neely", "Ruhl", "Taggart", "Bash", "Cason", "Hil", "Zalack", "Mingle", "Carr",
+            "Rohtin", "Wardle", "Pullman", "Wire", "Kellogg", "Hiles", "Keppel", "Bratton", "Sutton", "Wickes",
+            "Muller", "Friedline", "Llora", "Elizabeth", "Anderson", "Gaskins", "Rifler", "Vinsant", "Stanfield",
+            "Black", "Guest", "Hujsak", "Carter", "Weidemann", "Hays", "Patton", "Hayhurst", "Paynter", "Cressman",
+            "Fiddler", "Evans", "Sherlock", "Woodworth", "Jackson", "Bloise", "Schneider", "Ring", "Kepplinger",
+            "James", "Moon", "Bennett", "Bashline", "Ryals", "Zeal", "Christman", "Milliron", "Nash", "Ewing", "Camp",
+            "Mason", "Richardson", "Bowchiew", "Hahn", "Wilson", "Wood", "Toyley", "Williamson", "Lafortune", "Errett",
+            "Saltser", "Hirleman", "Brindle", "Newbiggin", "Ulery", "Lambert", "Shick", "Kuster", "Moore", "Finck",
+            "Powell", "Jolce", "Townsend", "Sauter", "Cowher", "Wolfe", "Cavalet", "Porter", "Laborde", "Ballou",
+            "Murray", "Stoddard", "Pycroft", "Milne", "King", "Todd", "Staymates", "Hall", "Romanoff", "Keilbach",
+            "Sandford", "Hamilton", "Fye", "Kline", "Weeks", "Mcelroy", "Mccullough", "Bryant", "Hill", "Moore",
+            "Ledgerwood", "Prevatt", "Eckert", "Read", "Hastings", "Doverspike", "Allshouse", "Bryan", "Mccallum",
+            "Lombardi", "Mckendrick", "Cattley", "Barkley", "Steiner", "Finlay", "Priebe", "Armitage", "Hall", "Elder",
+            "Erskine", "Hatcher", "Walker", "Pearsall", "Dunkle", "Haile", "Adams", "Miller", "Newbern", "Basinger",
+            "Fuhrer", "Brinigh", "Mench", "Blackburn", "Bastion", "Mccune", "Bridger", "Hynes", "Quinn", "Courtney",
+            "Geddinge", "Field", "Seelig", "Cable", "Earhart", "Harshman", "Roby", "Beals", "Berry", "Reed", "Hector",
+            "Pittman", "Haverrman", "Kalp", "Briner", "Joghs", "Cowart", "Close", "Wynne", "Harden", "Weldy",
+            "Stephenson", "Hildyard", "Moberly", "Wells", "Mackendoerfer", "Fisher", "Oppie", "Oneal", "Churchill",
+            "Keister", "Alice", "Tavoularis", "Fisher", "Hair", "Burns", "Veith", "Wile", "Fuller", "Fields", "Clark",
+            "Randolph", "Stone", "Mcclymonds", "Holtzer", "Donkin", "Wilkinson", "Rosensteel", "Albright", "Stahl",
+            "Fox", "Kadel", "Houser", "Hanseu", "Henderson", "Davis", "Bicknell", "Swain", "Mercer", "Holdeman",
+            "Enderly", "Caesar", "Margaret", "Munshower", "Elless", "Lucy", "Feufer", "Schofield", "Graham",
+            "Blatenberger", "Benford", "Akers", "Campbell", "Ann", "Sadley", "Ling", "Gongaware", "Schmidt", "Endsley",
+            "Groah", "Flanders", "Reichard", "Lowstetter", "Sandblom", "Griffis", "Basmanoff", "Coveney", "Hawker",
+            "Archibald", "Hutton", "Barnes", "Diegel", "Raybould", "Focell", "Breitenstein", "Murray", "Chauvin",
+            "Busk", "Pheleps", "Teagarden", "Northey", "Baumgartner", "Fleming", "Harris", "Parkinson", "Carpenter",
+            "Whirlow", "Bonner", "Wortman", "Rogers", "Scott", "Lowe", "Mckee", "Huston", "Bullard", "Throckmorton",
+            "Rummel", "Mathews", "Dull", "Saline", "Tue", "Woolery", "Lalty", "Schrader", "Ramsey", "Eisenmann",
+            "Philbrick", "Sybilla", "Wallace", "Fonblanque", "Paul", "Orbell", "Higgens", "Casteel", "Franks",
+            "Demuth", "Eisenman", "Hay", "Robinson", "Fischer", "Hincken", "Wylie", "Leichter", "Bousum",
+            "Littlefield", "Mcdonald", "Greif", "Rhodes", "Wall", "Steele", "Baldwin", "Smith", "Stewart", "Schere",
+            "Mary", "Aultman", "Emrick", "Guess", "Mitchell", "Painter", "Aft", "Hasely", "Weldi", "Loewentsein",
+            "Poorbaugh", "Kepple", "Noton", "Judge", "Jackson", "Style", "Adcock", "Diller", "Marriman", "Johnston",
+            "Children", "Monahan", "Ehret", "Shaw", "Congdon", "Pinney", "Millard", "Crissman", "Tanner", "Rose",
+            "Knisely", "Cypret", "Sommer", "Poehl", "Hardie", "Bender", "Overholt", "Gottwine", "Beach", "Leslie",
+            "Trevithick", "Langston", "Magor", "Shotts", "Howe", "Hunter", "Cross", "Kistler", "Dealtry", "Christner",
+            "Pennington", "Thorley", "Eckhardstein", "Van", "Stroh", "Stough", "Stall", "Beedell", "Shea", "Garland",
+            "Mays", "Pritchard", "Frankenberger", "Rowley", "Lane", "Baum", "Alliman", "Park", "Jardine", "Butler",
+            "Cherry", "Kooser", "Baxter", "Billimek", "Downing", "Hurst", "Wood", "Baird", "Watkins", "Edwards",
+            "Kemerer", "Harding", "Owens", "Eiford", "Keener", "Garneis", "Fiscina", "Mang", "Draudy", "Mills",
+            "Gibson", "Reese", "Todd", "Ramos", "Levett", "Wilks", "Ward", "Mosser", "Dunlap", "Kifer", "Christopher",
+            "Ashbaugh", "Wynter", "Rawls", "Cribbs", "Haynes", "Thigpen", "Schreckengost", "Bishop", "Linton",
+            "Chapman", "James", "Jerome", "Hook", "Omara", "Houston", "Maclagan", "Sandys", "Pickering", "Blois",
+            "Dickson", "Kemble", "Duncan", "Woodward", "Southern", "Henley", "Treeby", "Cram", "Elsas", "Driggers",
+            "Warrick", "Overstreet", "Hindman", "Buck", "Sulyard", "Wentzel", "Swink", "Butt", "Schaeffer",
+            "Hoffhants", "Bould", "Willcox", "Lotherington", "Bagley", "Graff", "White", "Wheeler", "Sloan",
+            "Rodacker", "Hanford", "Jowers", "Kunkle", "Cass", "Powers", "Gilman", "Mcmichaels", "Hobbs", "Herndon",
+            "Prescott", "Smail", "Mcdonald", "Biery", "Orner", "Richards", "Mueller", "Isaman", "Bruxner", "Goodman",
+            "Barth", "Turzanski", "Vorrasi", "Stainforth", "Nehling", "Rahl", "Erschoff", "Greene", "Mckinnon",
+            "Reade", "Smith", "Pery", "Roose", "Greenwood", "Weisgarber", "Curry", "Holts", "Zadovsky", "Parrish",
+            "Putnam", "Munson", "Mcindoe", "Nickolson", "Brooks", "Bollinger", "Stroble", "Siegrist", "Fulton",
+            "Tomey", "Zoucks", "Roberts", "Otis", "Clarke", "Easter", "Johnson", "Fylbrigg", "Taylor", "Swartzbaugh",
+            "Weinstein", "Gadow", "Sayre", "Marcotte", "Wise", "Atweeke", "Mcfall", "Napier", "Eisenhart", "Canham",
+            "Sealis", "Baughman", "Gertraht", "Losey", "Laurence", "Eva", "Pershing", "Kern", "Pirl", "Rega",
+            "Sanborn", "Kanaga", "Sanders", "Anderson", "Dickinson", "Osteen", "Gettemy", "Crom", "Snyder", "Reed",
+            "Laurenzi", "Riggle", "Tillson", "Fowler", "Raub", "Jenner", "Koepple", "Soames", "Goldvogel", "Dimsdale",
+            "Zimmer", "Giesen", "Baker", "Beail", "Mortland", "Bard", "Sanner", "Knopsnider", "Jenkins", "Bailey",
+            "Werner", "Barrett", "Faust", "Agg", "Tomlinson", "Williams", "Little", "Greenawalt", "Wells", "Wilkins",
+            "Gisiko", "Bauerle", "Harrold", "Prechtl", "Polson", "Faast", "Winton", "Garneys", "Peters", "Potter",
+            "Porter", "Tennant", "Eve", "Dugger", "Jones", "Burch", "Cowper", "Whittier" };
+
+    public static String[] firstNames = { "Albert", "Jacquelin", "Dona", "Alia", "Mayme", "Genoveva", "Emma", "Lena",
+            "Melody", "Vilma", "Katelyn", "Jeremy", "Coral", "Leann", "Lita", "Gilda", "Kayla", "Alvina", "Maranda",
+            "Verlie", "Khadijah", "Karey", "Patrice", "Kallie", "Corey", "Mollie", "Daisy", "Melanie", "Sarita",
+            "Nichole", "Pricilla", "Terresa", "Berneice", "Arianne", "Brianne", "Lavinia", "Ulrike", "Lesha", "Adell",
+            "Ardelle", "Marisha", "Laquita", "Karyl", "Maryjane", "Kendall", "Isobel", "Raeann", "Heike", "Barbera",
+            "Norman", "Yasmine", "Nevada", "Mariam", "Edith", "Eugena", "Lovie", "Maren", "Bennie", "Lennie", "Tamera",
+            "Crystal", "Randi", "Anamaria", "Chantal", "Jesenia", "Avis", "Shela", "Randy", "Laurena", "Sharron",
+            "Christiane", "Lorie", "Mario", "Elizabeth", "Reina", "Adria", "Lakisha", "Brittni", "Azzie", "Dori",
+            "Shaneka", "Asuncion", "Katheryn", "Laurice", "Sharita", "Krystal", "Reva", "Inger", "Alpha", "Makeda",
+            "Anabel", "Loni", "Tiara", "Meda", "Latashia", "Leola", "Chin", "Daisey", "Ivory", "Amalia", "Logan",
+            "Tyler", "Kyong", "Carolann", "Maryetta", "Eufemia", "Anya", "Doreatha", "Lorna", "Rutha", "Ehtel",
+            "Debbie", "Chassidy", "Sang", "Christa", "Lottie", "Chun", "Karine", "Peggie", "Amina", "Melany", "Alayna",
+            "Scott", "Romana", "Naomi", "Christiana", "Salena", "Taunya", "Mitsue", "Regina", "Chelsie", "Charity",
+            "Dacia", "Aletha", "Latosha", "Lia", "Tamica", "Chery", "Bianca", "Shu", "Georgianne", "Myriam", "Austin",
+            "Wan", "Mallory", "Jana", "Georgie", "Jenell", "Kori", "Vicki", "Delfina", "June", "Mellisa", "Catherina",
+            "Claudie", "Tynisha", "Dayle", "Enriqueta", "Belen", "Pia", "Sarai", "Rosy", "Renay", "Kacie", "Frieda",
+            "Cayla", "Elissa", "Claribel", "Sabina", "Mackenzie", "Raina", "Cira", "Mitzie", "Aubrey", "Serafina",
+            "Maria", "Katharine", "Esperanza", "Sung", "Daria", "Billye", "Stefanie", "Kasha", "Holly", "Suzanne",
+            "Inga", "Flora", "Andria", "Genevie", "Eladia", "Janet", "Erline", "Renna", "Georgeanna", "Delorse",
+            "Elnora", "Rudy", "Rima", "Leanora", "Letisha", "Love", "Alverta", "Pinkie", "Domonique", "Jeannie",
+            "Jose", "Jacqueline", "Tara", "Lily", "Erna", "Tennille", "Galina", "Tamala", "Kirby", "Nichelle",
+            "Myesha", "Farah", "Santa", "Ludie", "Kenia", "Yee", "Micheline", "Maryann", "Elaina", "Ethelyn",
+            "Emmaline", "Shanell", "Marina", "Nila", "Alane", "Shakira", "Dorris", "Belinda", "Elois", "Barbie",
+            "Carita", "Gisela", "Lura", "Fransisca", "Helga", "Peg", "Leonarda", "Earlie", "Deetta", "Jacquetta",
+            "Blossom", "Kayleigh", "Deloras", "Keshia", "Christinia", "Dulce", "Bernie", "Sheba", "Lashanda", "Tula",
+            "Claretta", "Kary", "Jeanette", "Lupita", "Lenora", "Hisako", "Sherise", "Glynda", "Adela", "Chia",
+            "Sudie", "Mindy", "Caroyln", "Lindsey", "Xiomara", "Mercedes", "Onie", "Loan", "Alexis", "Tommie",
+            "Donette", "Monica", "Soo", "Camellia", "Lavera", "Valery", "Ariana", "Sophia", "Loris", "Ginette",
+            "Marielle", "Tari", "Julissa", "Alesia", "Suzanna", "Emelda", "Erin", "Ladawn", "Sherilyn", "Candice",
+            "Nereida", "Fairy", "Carl", "Joel", "Marilee", "Gracia", "Cordie", "So", "Shanita", "Drew", "Cassie",
+            "Sherie", "Marget", "Norma", "Delois", "Debera", "Chanelle", "Catarina", "Aracely", "Carlene", "Tricia",
+            "Aleen", "Katharina", "Marguerita", "Guadalupe", "Margorie", "Mandie", "Kathe", "Chong", "Sage", "Faith",
+            "Maryrose", "Stephany", "Ivy", "Pauline", "Susie", "Cristen", "Jenifer", "Annette", "Debi", "Karmen",
+            "Luci", "Shayla", "Hope", "Ocie", "Sharie", "Tami", "Breana", "Kerry", "Rubye", "Lashay", "Sondra",
+            "Katrice", "Brunilda", "Cortney", "Yan", "Zenobia", "Penni", "Addie", "Lavona", "Noel", "Anika",
+            "Herlinda", "Valencia", "Bunny", "Tory", "Victoria", "Carrie", "Mikaela", "Wilhelmina", "Chung",
+            "Hortencia", "Gerda", "Wen", "Ilana", "Sibyl", "Candida", "Victorina", "Chantell", "Casie", "Emeline",
+            "Dominica", "Cecila", "Delora", "Miesha", "Nova", "Sally", "Ronald", "Charlette", "Francisca", "Mina",
+            "Jenna", "Loraine", "Felisa", "Lulu", "Page", "Lyda", "Babara", "Flor", "Walter", "Chan", "Sherika",
+            "Kala", "Luna", "Vada", "Syreeta", "Slyvia", "Karin", "Renata", "Robbi", "Glenda", "Delsie", "Lizzie",
+            "Genia", "Caitlin", "Bebe", "Cory", "Sam", "Leslee", "Elva", "Caren", "Kasie", "Leticia", "Shannan",
+            "Vickey", "Sandie", "Kyle", "Chang", "Terrilyn", "Sandra", "Elida", "Marketta", "Elsy", "Tu", "Carman",
+            "Ashlie", "Vernia", "Albertine", "Vivian", "Elba", "Bong", "Margy", "Janetta", "Xiao", "Teofila", "Danyel",
+            "Nickole", "Aleisha", "Tera", "Cleotilde", "Dara", "Paulita", "Isela", "Maricela", "Rozella", "Marivel",
+            "Aurora", "Melissa", "Carylon", "Delinda", "Marvella", "Candelaria", "Deidre", "Tawanna", "Myrtie",
+            "Milagro", "Emilie", "Coretta", "Ivette", "Suzann", "Ammie", "Lucina", "Lory", "Tena", "Eleanor",
+            "Cherlyn", "Tiana", "Brianna", "Myra", "Flo", "Carisa", "Kandi", "Erlinda", "Jacqulyn", "Fermina", "Riva",
+            "Palmira", "Lindsay", "Annmarie", "Tamiko", "Carline", "Amelia", "Quiana", "Lashawna", "Veola", "Belva",
+            "Marsha", "Verlene", "Alex", "Leisha", "Camila", "Mirtha", "Melva", "Lina", "Arla", "Cythia", "Towanda",
+            "Aracelis", "Tasia", "Aurore", "Trinity", "Bernadine", "Farrah", "Deneen", "Ines", "Betty", "Lorretta",
+            "Dorethea", "Hertha", "Rochelle", "Juli", "Shenika", "Yung", "Lavon", "Deeanna", "Nakia", "Lynnette",
+            "Dinorah", "Nery", "Elene", "Carolee", "Mira", "Franchesca", "Lavonda", "Leida", "Paulette", "Dorine",
+            "Allegra", "Keva", "Jeffrey", "Bernardina", "Maryln", "Yoko", "Faviola", "Jayne", "Lucilla", "Charita",
+            "Ewa", "Ella", "Maggie", "Ivey", "Bettie", "Jerri", "Marni", "Bibi", "Sabrina", "Sarah", "Marleen",
+            "Katherin", "Remona", "Jamika", "Antonina", "Oliva", "Lajuana", "Fonda", "Sigrid", "Yael", "Billi",
+            "Verona", "Arminda", "Mirna", "Tesha", "Katheleen", "Bonita", "Kamilah", "Patrica", "Julio", "Shaina",
+            "Mellie", "Denyse", "Deandrea", "Alena", "Meg", "Kizzie", "Krissy", "Karly", "Alleen", "Yahaira", "Lucie",
+            "Karena", "Elaine", "Eloise", "Buena", "Marianela", "Renee", "Nan", "Carolynn", "Windy", "Avril", "Jane",
+            "Vida", "Thea", "Marvel", "Rosaline", "Tifany", "Robena", "Azucena", "Carlota", "Mindi", "Andera", "Jenny",
+            "Courtney", "Lyndsey", "Willette", "Kristie", "Shaniqua", "Tabatha", "Ngoc", "Una", "Marlena", "Louetta",
+            "Vernie", "Brandy", "Jacquelyne", "Jenelle", "Elna", "Erminia", "Ida", "Audie", "Louis", "Marisol",
+            "Shawana", "Harriette", "Karol", "Kitty", "Esmeralda", "Vivienne", "Eloisa", "Iris", "Jeanice", "Cammie",
+            "Jacinda", "Shena", "Floy", "Theda", "Lourdes", "Jayna", "Marg", "Kati", "Tanna", "Rosalyn", "Maxima",
+            "Soon", "Angelika", "Shonna", "Merle", "Kassandra", "Deedee", "Heidi", "Marti", "Renae", "Arleen",
+            "Alfredia", "Jewell", "Carley", "Pennie", "Corina", "Tonisha", "Natividad", "Lilliana", "Darcie", "Shawna",
+            "Angel", "Piedad", "Josefa", "Rebbeca", "Natacha", "Nenita", "Petrina", "Carmon", "Chasidy", "Temika",
+            "Dennise", "Renetta", "Augusta", "Shirlee", "Valeri", "Casimira", "Janay", "Berniece", "Deborah", "Yaeko",
+            "Mimi", "Digna", "Irish", "Cher", "Yong", "Lucila", "Jimmie", "Junko", "Lezlie", "Waneta", "Sandee",
+            "Marquita", "Eura", "Freeda", "Annabell", "Laree", "Jaye", "Wendy", "Toshia", "Kylee", "Aleta", "Emiko",
+            "Clorinda", "Sixta", "Audrea", "Juanita", "Birdie", "Reita", "Latanya", "Nia", "Leora", "Laurine",
+            "Krysten", "Jerrie", "Chantel", "Ira", "Sena", "Andre", "Jann", "Marla", "Precious", "Katy", "Gabrielle",
+            "Yvette", "Brook", "Shirlene", "Eldora", "Laura", "Milda", "Euna", "Jettie", "Debora", "Lise", "Edythe",
+            "Leandra", "Shandi", "Araceli", "Johanne", "Nieves", "Denese", "Carmelita", "Nohemi", "Annice", "Natalie",
+            "Yolande", "Jeffie", "Vashti", "Vickie", "Obdulia", "Youlanda", "Lupe", "Tomoko", "Monserrate", "Domitila",
+            "Etsuko", "Adrienne", "Lakesha", "Melissia", "Odessa", "Meagan", "Veronika", "Jolyn", "Isabelle", "Leah",
+            "Rhiannon", "Gianna", "Audra", "Sommer", "Renate", "Perla", "Thao", "Myong", "Lavette", "Mark", "Emilia",
+            "Ariane", "Karl", "Dorie", "Jacquie", "Mia", "Malka", "Shenita", "Tashina", "Christine", "Cherri", "Roni",
+            "Fran", "Mildred", "Sara", "Clarissa", "Fredia", "Elease", "Samuel", "Earlene", "Vernita", "Mae", "Concha",
+            "Renea", "Tamekia", "Hye", "Ingeborg", "Tessa", "Kelly", "Kristin", "Tam", "Sacha", "Kanisha", "Jillian",
+            "Tiffanie", "Ashlee", "Madelyn", "Donya", "Clementine", "Mickie", "My", "Zena", "Terrie", "Samatha",
+            "Gertie", "Tarra", "Natalia", "Sharlene", "Evie", "Shalon", "Rosalee", "Numbers", "Jodi", "Hattie",
+            "Naoma", "Valene", "Whitley", "Claude", "Alline", "Jeanne", "Camie", "Maragret", "Viola", "Kris", "Marlo",
+            "Arcelia", "Shari", "Jalisa", "Corrie", "Eleonor", "Angelyn", "Merry", "Lauren", "Melita", "Gita",
+            "Elenor", "Aurelia", "Janae", "Lyndia", "Margeret", "Shawanda", "Rolande", "Shirl", "Madeleine", "Celinda",
+            "Jaleesa", "Shemika", "Joye", "Tisa", "Trudie", "Kathrine", "Clarita", "Dinah", "Georgia", "Antoinette",
+            "Janis", "Suzette", "Sherri", "Herta", "Arie", "Hedy", "Cassi", "Audrie", "Caryl", "Jazmine", "Jessica",
+            "Beverly", "Elizbeth", "Marylee", "Londa", "Fredericka", "Argelia", "Nana", "Donnette", "Damaris",
+            "Hailey", "Jamee", "Kathlene", "Glayds", "Lydia", "Apryl", "Verla", "Adam", "Concepcion", "Zelda",
+            "Shonta", "Vernice", "Detra", "Meghann", "Sherley", "Sheri", "Kiyoko", "Margarita", "Adaline", "Mariela",
+            "Velda", "Ailene", "Juliane", "Aiko", "Edyth", "Cecelia", "Shavon", "Florance", "Madeline", "Rheba",
+            "Deann", "Ignacia", "Odelia", "Heide", "Mica", "Jennette", "Maricruz", "Ouida", "Darcy", "Laure",
+            "Justina", "Amada", "Laine", "Cruz", "Sunny", "Francene", "Roxanna", "Nam", "Nancie", "Deanna", "Letty",
+            "Britni", "Kazuko", "Lacresha", "Simon", "Caleb", "Milton", "Colton", "Travis", "Miles", "Jonathan",
+            "Logan", "Rolf", "Emilio", "Roberto", "Marcus", "Tim", "Delmar", "Devon", "Kurt", "Edward", "Jeffrey",
+            "Elvis", "Alfonso", "Blair", "Wm", "Sheldon", "Leonel", "Michal", "Federico", "Jacques", "Leslie",
+            "Augustine", "Hugh", "Brant", "Hong", "Sal", "Modesto", "Curtis", "Jefferey", "Adam", "John", "Glenn",
+            "Vance", "Alejandro", "Refugio", "Lucio", "Demarcus", "Chang", "Huey", "Neville", "Preston", "Bert",
+            "Abram", "Foster", "Jamison", "Kirby", "Erich", "Manual", "Dustin", "Derrick", "Donnie", "Jospeh", "Chris",
+            "Josue", "Stevie", "Russ", "Stanley", "Nicolas", "Samuel", "Waldo", "Jake", "Max", "Ernest", "Reinaldo",
+            "Rene", "Gale", "Morris", "Nathan", "Maximo", "Courtney", "Theodore", "Octavio", "Otha", "Delmer",
+            "Graham", "Dean", "Lowell", "Myles", "Colby", "Boyd", "Adolph", "Jarrod", "Nick", "Mark", "Clinton", "Kim",
+            "Sonny", "Dalton", "Tyler", "Jody", "Orville", "Luther", "Rubin", "Hollis", "Rashad", "Barton", "Vicente",
+            "Ted", "Rick", "Carmine", "Clifton", "Gayle", "Christopher", "Jessie", "Bradley", "Clay", "Theo", "Josh",
+            "Mitchell", "Boyce", "Chung", "Eugenio", "August", "Norbert", "Sammie", "Jerry", "Adan", "Edmundo",
+            "Homer", "Hilton", "Tod", "Kirk", "Emmett", "Milan", "Quincy", "Jewell", "Herb", "Steve", "Carmen",
+            "Bobby", "Odis", "Daron", "Jeremy", "Carl", "Hunter", "Tuan", "Thurman", "Asa", "Brenton", "Shane",
+            "Donny", "Andreas", "Teddy", "Dario", "Cyril", "Hoyt", "Teodoro", "Vincenzo", "Hilario", "Daren",
+            "Agustin", "Marquis", "Ezekiel", "Brendan", "Johnson", "Alden", "Richie", "Granville", "Chad", "Joseph",
+            "Lamont", "Jordon", "Gilberto", "Chong", "Rosendo", "Eddy", "Rob", "Dewitt", "Andre", "Titus", "Russell",
+            "Rigoberto", "Dick", "Garland", "Gabriel", "Hank", "Darius", "Ignacio", "Lazaro", "Johnie", "Mauro",
+            "Edmund", "Trent", "Harris", "Osvaldo", "Marvin", "Judson", "Rodney", "Randall", "Renato", "Richard",
+            "Denny", "Jon", "Doyle", "Cristopher", "Wilson", "Christian", "Jamie", "Roland", "Ken", "Tad", "Romeo",
+            "Seth", "Quinton", "Byron", "Ruben", "Darrel", "Deandre", "Broderick", "Harold", "Ty", "Monroe", "Landon",
+            "Mohammed", "Angel", "Arlen", "Elias", "Andres", "Carlton", "Numbers", "Tony", "Thaddeus", "Issac",
+            "Elmer", "Antoine", "Ned", "Fermin", "Grover", "Benito", "Abdul", "Cortez", "Eric", "Maxwell", "Coy",
+            "Gavin", "Rich", "Andy", "Del", "Giovanni", "Major", "Efren", "Horacio", "Joaquin", "Charles", "Noah",
+            "Deon", "Pasquale", "Reed", "Fausto", "Jermaine", "Irvin", "Ray", "Tobias", "Carter", "Yong", "Jorge",
+            "Brent", "Daniel", "Zane", "Walker", "Thad", "Shaun", "Jaime", "Mckinley", "Bradford", "Nathanial",
+            "Jerald", "Aubrey", "Virgil", "Abel", "Philip", "Chester", "Chadwick", "Dominick", "Britt", "Emmitt",
+            "Ferdinand", "Julian", "Reid", "Santos", "Dwain", "Morgan", "James", "Marion", "Micheal", "Eddie", "Brett",
+            "Stacy", "Kerry", "Dale", "Nicholas", "Darrick", "Freeman", "Scott", "Newton", "Sherman", "Felton",
+            "Cedrick", "Winfred", "Brad", "Fredric", "Dewayne", "Virgilio", "Reggie", "Edgar", "Heriberto", "Shad",
+            "Timmy", "Javier", "Nestor", "Royal", "Lynn", "Irwin", "Ismael", "Jonas", "Wiley", "Austin", "Kieth",
+            "Gonzalo", "Paris", "Earnest", "Arron", "Jarred", "Todd", "Erik", "Maria", "Chauncey", "Neil", "Conrad",
+            "Maurice", "Roosevelt", "Jacob", "Sydney", "Lee", "Basil", "Louis", "Rodolfo", "Rodger", "Roman", "Corey",
+            "Ambrose", "Cristobal", "Sylvester", "Benton", "Franklin", "Marcelo", "Guillermo", "Toby", "Jeramy",
+            "Donn", "Danny", "Dwight", "Clifford", "Valentine", "Matt", "Jules", "Kareem", "Ronny", "Lonny", "Son",
+            "Leopoldo", "Dannie", "Gregg", "Dillon", "Orlando", "Weston", "Kermit", "Damian", "Abraham", "Walton",
+            "Adrian", "Rudolf", "Will", "Les", "Norberto", "Fred", "Tyrone", "Ariel", "Terry", "Emmanuel", "Anderson",
+            "Elton", "Otis", "Derek", "Frankie", "Gino", "Lavern", "Jarod", "Kenny", "Dane", "Keenan", "Bryant",
+            "Eusebio", "Dorian", "Ali", "Lucas", "Wilford", "Jeremiah", "Warner", "Woodrow", "Galen", "Bob",
+            "Johnathon", "Amado", "Michel", "Harry", "Zachery", "Taylor", "Booker", "Hershel", "Mohammad", "Darrell",
+            "Kyle", "Stuart", "Marlin", "Hyman", "Jeffery", "Sidney", "Merrill", "Roy", "Garrett", "Porter", "Kenton",
+            "Giuseppe", "Terrance", "Trey", "Felix", "Buster", "Von", "Jackie", "Linwood", "Darron", "Francisco",
+            "Bernie", "Diego", "Brendon", "Cody", "Marco", "Ahmed", "Antonio", "Vince", "Brooks", "Kendrick", "Ross",
+            "Mohamed", "Jim", "Benny", "Gerald", "Pablo", "Charlie", "Antony", "Werner", "Hipolito", "Minh", "Mel",
+            "Derick", "Armand", "Fidel", "Lewis", "Donnell", "Desmond", "Vaughn", "Guadalupe", "Keneth", "Rodrick",
+            "Spencer", "Chas", "Gus", "Harlan", "Wes", "Carmelo", "Jefferson", "Gerard", "Jarvis", "Haywood", "Hayden",
+            "Sergio", "Gene", "Edgardo", "Colin", "Horace", "Dominic", "Aldo", "Adolfo", "Juan", "Man", "Lenard",
+            "Clement", "Everett", "Hal", "Bryon", "Mason", "Emerson", "Earle", "Laurence", "Columbus", "Lamar",
+            "Douglas", "Ian", "Fredrick", "Marc", "Loren", "Wallace", "Randell", "Noble", "Ricardo", "Rory", "Lindsey",
+            "Boris", "Bill", "Carlos", "Domingo", "Grant", "Craig", "Ezra", "Matthew", "Van", "Rudy", "Danial",
+            "Brock", "Maynard", "Vincent", "Cole", "Damion", "Ellsworth", "Marcel", "Markus", "Rueben", "Tanner",
+            "Reyes", "Hung", "Kennith", "Lindsay", "Howard", "Ralph", "Jed", "Monte", "Garfield", "Avery", "Bernardo",
+            "Malcolm", "Sterling", "Ezequiel", "Kristofer", "Luciano", "Casey", "Rosario", "Ellis", "Quintin",
+            "Trevor", "Miquel", "Jordan", "Arthur", "Carson", "Tyron", "Grady", "Walter", "Jonathon", "Ricky",
+            "Bennie", "Terrence", "Dion", "Dusty", "Roderick", "Isaac", "Rodrigo", "Harrison", "Zack", "Dee", "Devin",
+            "Rey", "Ulysses", "Clint", "Greg", "Dino", "Frances", "Wade", "Franklyn", "Jude", "Bradly", "Salvador",
+            "Rocky", "Weldon", "Lloyd", "Milford", "Clarence", "Alec", "Allan", "Bobbie", "Oswaldo", "Wilfred",
+            "Raleigh", "Shelby", "Willy", "Alphonso", "Arnoldo", "Robbie", "Truman", "Nicky", "Quinn", "Damien",
+            "Lacy", "Marcos", "Parker", "Burt", "Carroll", "Denver", "Buck", "Dong", "Normand", "Billie", "Edwin",
+            "Troy", "Arden", "Rusty", "Tommy", "Kenneth", "Leo", "Claud", "Joel", "Kendall", "Dante", "Milo", "Cruz",
+            "Lucien", "Ramon", "Jarrett", "Scottie", "Deshawn", "Ronnie", "Pete", "Alonzo", "Whitney", "Stefan",
+            "Sebastian", "Edmond", "Enrique", "Branden", "Leonard", "Loyd", "Olin", "Ron", "Rhett", "Frederic",
+            "Orval", "Tyrell", "Gail", "Eli", "Antonia", "Malcom", "Sandy", "Stacey", "Nickolas", "Hosea", "Santo",
+            "Oscar", "Fletcher", "Dave", "Patrick", "Dewey", "Bo", "Vito", "Blaine", "Randy", "Robin", "Winston",
+            "Sammy", "Edwardo", "Manuel", "Valentin", "Stanford", "Filiberto", "Buddy", "Zachariah", "Johnnie",
+            "Elbert", "Paul", "Isreal", "Jerrold", "Leif", "Owen", "Sung", "Junior", "Raphael", "Josef", "Donte",
+            "Allen", "Florencio", "Raymond", "Lauren", "Collin", "Eliseo", "Bruno", "Martin", "Lyndon", "Kurtis",
+            "Salvatore", "Erwin", "Michael", "Sean", "Davis", "Alberto", "King", "Rolland", "Joe", "Tory", "Chase",
+            "Dallas", "Vernon", "Beau", "Terrell", "Reynaldo", "Monty", "Jame", "Dirk", "Florentino", "Reuben", "Saul",
+            "Emory", "Esteban", "Michale", "Claudio", "Jacinto", "Kelley", "Levi", "Andrea", "Lanny", "Wendell",
+            "Elwood", "Joan", "Felipe", "Palmer", "Elmo", "Lawrence", "Hubert", "Rudolph", "Duane", "Cordell",
+            "Everette", "Mack", "Alan", "Efrain", "Trenton", "Bryan", "Tom", "Wilmer", "Clyde", "Chance", "Lou",
+            "Brain", "Justin", "Phil", "Jerrod", "George", "Kris", "Cyrus", "Emery", "Rickey", "Lincoln", "Renaldo",
+            "Mathew", "Luke", "Dwayne", "Alexis", "Jackson", "Gil", "Marty", "Burton", "Emil", "Glen", "Willian",
+            "Clemente", "Keven", "Barney", "Odell", "Reginald", "Aurelio", "Damon", "Ward", "Gustavo", "Harley",
+            "Peter", "Anibal", "Arlie", "Nigel", "Oren", "Zachary", "Scot", "Bud", "Wilbert", "Bart", "Josiah",
+            "Marlon", "Eldon", "Darryl", "Roger", "Anthony", "Omer", "Francis", "Patricia", "Moises", "Chuck",
+            "Waylon", "Hector", "Jamaal", "Cesar", "Julius", "Rex", "Norris", "Ollie", "Isaias", "Quentin", "Graig",
+            "Lyle", "Jeffry", "Karl", "Lester", "Danilo", "Mike", "Dylan", "Carlo", "Ryan", "Leon", "Percy", "Lucius",
+            "Jamel", "Lesley", "Joey", "Cornelius", "Rico", "Arnulfo", "Chet", "Margarito", "Ernie", "Nathanael",
+            "Amos", "Cleveland", "Luigi", "Alfonzo", "Phillip", "Clair", "Elroy", "Alva", "Hans", "Shon", "Gary",
+            "Jesus", "Cary", "Silas", "Keith", "Israel", "Willard", "Randolph", "Dan", "Adalberto", "Claude",
+            "Delbert", "Garry", "Mary", "Larry", "Riley", "Robt", "Darwin", "Barrett", "Steven", "Kelly", "Herschel",
+            "Darnell", "Scotty", "Armando", "Miguel", "Lawerence", "Wesley", "Garth", "Carol", "Micah", "Alvin",
+            "Billy", "Earl", "Pat", "Brady", "Cory", "Carey", "Bernard", "Jayson", "Nathaniel", "Gaylord", "Archie",
+            "Dorsey", "Erasmo", "Angelo", "Elisha", "Long", "Augustus", "Hobert", "Drew", "Stan", "Sherwood",
+            "Lorenzo", "Forrest", "Shawn", "Leigh", "Hiram", "Leonardo", "Gerry", "Myron", "Hugo", "Alvaro", "Leland",
+            "Genaro", "Jamey", "Stewart", "Elden", "Irving", "Olen", "Antone", "Freddy", "Lupe", "Joshua", "Gregory",
+            "Andrew", "Sang", "Wilbur", "Gerardo", "Merlin", "Williams", "Johnny", "Alex", "Tommie", "Jimmy",
+            "Donovan", "Dexter", "Gaston", "Tracy", "Jeff", "Stephen", "Berry", "Anton", "Darell", "Fritz", "Willis",
+            "Noel", "Mariano", "Crawford", "Zoey", "Alex", "Brianna", "Carlie", "Lloyd", "Cal", "Astor", "Randolf",
+            "Magdalene", "Trevelyan", "Terance", "Roy", "Kermit", "Harriett", "Crystal", "Laurinda", "Kiersten",
+            "Phyllida", "Liz", "Bettie", "Rena", "Colten", "Berenice", "Sindy", "Wilma", "Amos", "Candi", "Ritchie",
+            "Dirk", "Kathlyn", "Callista", "Anona", "Flossie", "Sterling", "Calista", "Regan", "Erica", "Jeana",
+            "Keaton", "York", "Nolan", "Daniel", "Benton", "Tommie", "Serenity", "Deanna", "Chas", "Heron", "Marlyn",
+            "Xylia", "Tristin", "Lyndon", "Andriana", "Madelaine", "Maddison", "Leila", "Chantelle", "Audrey",
+            "Connor", "Daley", "Tracee", "Tilda", "Eliot", "Merle", "Linwood", "Kathryn", "Silas", "Alvina",
+            "Phinehas", "Janis", "Alvena", "Zubin", "Gwendolen", "Caitlyn", "Bertram", "Hailee", "Idelle", "Homer",
+            "Jannah", "Delbert", "Rhianna", "Cy", "Jefferson", "Wayland", "Nona", "Tempest", "Reed", "Jenifer",
+            "Ellery", "Nicolina", "Aldous", "Prince", "Lexia", "Vinnie", "Doug", "Alberic", "Kayleen", "Woody",
+            "Rosanne", "Ysabel", "Skyler", "Twyla", "Geordie", "Leta", "Clive", "Aaron", "Scottie", "Celeste", "Chuck",
+            "Erle", "Lallie", "Jaycob", "Ray", "Carrie", "Laurita", "Noreen", "Meaghan", "Ulysses", "Andy", "Drogo",
+            "Dina", "Yasmin", "Mya", "Luvenia", "Urban", "Jacob", "Laetitia", "Sherry", "Love", "Michaela", "Deonne",
+            "Summer", "Brendon", "Sheena", "Mason", "Jayson", "Linden", "Salal", "Darrell", "Diana", "Hudson",
+            "Lennon", "Isador", "Charley", "April", "Ralph", "James", "Mina", "Jolyon", "Laurine", "Monna", "Carita",
+            "Munro", "Elsdon", "Everette", "Radclyffe", "Darrin", "Herbert", "Gawain", "Sheree", "Trudy", "Emmaline",
+            "Kassandra", "Rebecca", "Basil", "Jen", "Don", "Osborne", "Lilith", "Hannah", "Fox", "Rupert", "Paulene",
+            "Darius", "Wally", "Baptist", "Sapphire", "Tia", "Sondra", "Kylee", "Ashton", "Jepson", "Joetta", "Val",
+            "Adela", "Zacharias", "Zola", "Marmaduke", "Shannah", "Posie", "Oralie", "Brittany", "Ernesta", "Raymund",
+            "Denzil", "Daren", "Roosevelt", "Nelson", "Fortune", "Mariel", "Nick", "Jaden", "Upton", "Oz", "Margaux",
+            "Precious", "Albert", "Bridger", "Jimmy", "Nicola", "Rosalynne", "Keith", "Walt", "Della", "Joanna",
+            "Xenia", "Esmeralda", "Major", "Simon", "Rexana", "Stacy", "Calanthe", "Sherley", "Kaitlyn", "Graham",
+            "Ramsey", "Abbey", "Madlyn", "Kelvin", "Bill", "Rue", "Monica", "Caileigh", "Laraine", "Booker", "Jayna",
+            "Greta", "Jervis", "Sherman", "Kendrick", "Tommy", "Iris", "Geffrey", "Kaelea", "Kerr", "Garrick", "Jep",
+            "Audley", "Nic", "Bronte", "Beulah", "Patricia", "Jewell", "Deidra", "Cory", "Everett", "Harper",
+            "Charity", "Godfrey", "Jaime", "Sinclair", "Talbot", "Dayna", "Cooper", "Rosaline", "Jennie", "Eileen",
+            "Latanya", "Corinna", "Roxie", "Caesar", "Charles", "Pollie", "Lindsey", "Sorrel", "Dwight", "Jocelyn",
+            "Weston", "Shyla", "Valorie", "Bessie", "Josh", "Lessie", "Dayton", "Kathi", "Chasity", "Wilton", "Adam",
+            "William", "Ash", "Angela", "Ivor", "Ria", "Jazmine", "Hailey", "Jo", "Silvestra", "Ernie", "Clifford",
+            "Levi", "Matilda", "Quincey", "Camilla", "Delicia", "Phemie", "Laurena", "Bambi", "Lourdes", "Royston",
+            "Chastity", "Lynwood", "Elle", "Brenda", "Phoebe", "Timothy", "Raschelle", "Lilly", "Burt", "Rina",
+            "Rodney", "Maris", "Jaron", "Wilf", "Harlan", "Audra", "Vincent", "Elwyn", "Drew", "Wynter", "Ora",
+            "Lissa", "Virgil", "Xavier", "Chad", "Ollie", "Leyton", "Karolyn", "Skye", "Roni", "Gladys", "Dinah",
+            "Penny", "August", "Osmund", "Whitaker", "Brande", "Cornell", "Phil", "Zara", "Kilie", "Gavin", "Coty",
+            "Randy", "Teri", "Keira", "Pru", "Clemency", "Kelcey", "Nevil", "Poppy", "Gareth", "Christabel", "Bastian",
+            "Wynonna", "Roselyn", "Goddard", "Collin", "Trace", "Neal", "Effie", "Denys", "Virginia", "Richard",
+            "Isiah", "Harrietta", "Gaylord", "Diamond", "Trudi", "Elaine", "Jemmy", "Gage", "Annabel", "Quincy", "Syd",
+            "Marianna", "Philomena", "Aubree", "Kathie", "Jacki", "Kelley", "Bess", "Cecil", "Maryvonne", "Kassidy",
+            "Anselm", "Dona", "Darby", "Jamison", "Daryl", "Darell", "Teal", "Lennie", "Bartholomew", "Katie",
+            "Maybelline", "Kimball", "Elvis", "Les", "Flick", "Harley", "Beth", "Bidelia", "Montague", "Helen", "Ozzy",
+            "Stef", "Debra", "Maxene", "Stefanie", "Russ", "Avril", "Johnathan", "Orson", "Chelsey", "Josephine",
+            "Deshaun", "Wendell", "Lula", "Ferdinanda", "Greg", "Brad", "Kynaston", "Dena", "Russel", "Robertina",
+            "Misti", "Leon", "Anjelica", "Bryana", "Myles", "Judi", "Curtis", "Davin", "Kristia", "Chrysanta",
+            "Hayleigh", "Hector", "Osbert", "Eustace", "Cary", "Tansy", "Cayley", "Maryann", "Alissa", "Ike",
+            "Tranter", "Reina", "Alwilda", "Sidony", "Columbine", "Astra", "Jillie", "Stephania", "Jonah", "Kennedy",
+            "Ferdinand", "Allegria", "Donella", "Kelleigh", "Darian", "Eldreda", "Jayden", "Herbie", "Jake", "Winston",
+            "Vi", "Annie", "Cherice", "Hugo", "Tricia", "Haydee", "Cassarah", "Darden", "Mallory", "Alton", "Hadley",
+            "Romayne", "Lacey", "Ern", "Alayna", "Cecilia", "Seward", "Tilly", "Edgar", "Concordia", "Ibbie", "Dahlia",
+            "Oswin", "Stu", "Brett", "Maralyn", "Kristeen", "Dotty", "Robyn", "Nessa", "Tresha", "Guinevere",
+            "Emerson", "Haze", "Lyn", "Henderson", "Lexa", "Jaylen", "Gail", "Lizette", "Tiara", "Robbie", "Destiny",
+            "Alice", "Livia", "Rosy", "Leah", "Jan", "Zach", "Vita", "Gia", "Micheal", "Rowina", "Alysha", "Bobbi",
+            "Delores", "Osmond", "Karaugh", "Wilbur", "Kasandra", "Renae", "Kaety", "Dora", "Gaye", "Amaryllis",
+            "Katelyn", "Dacre", "Prudence", "Ebony", "Camron", "Jerrold", "Vivyan", "Randall", "Donna", "Misty",
+            "Damon", "Selby", "Esmund", "Rian", "Garry", "Julius", "Raelene", "Clement", "Dom", "Tibby", "Moss",
+            "Millicent", "Gwendoline", "Berry", "Ashleigh", "Lilac", "Quin", "Vere", "Creighton", "Harriet", "Malvina",
+            "Lianne", "Pearle", "Kizzie", "Kara", "Petula", "Jeanie", "Maria", "Pacey", "Victoria", "Huey", "Toni",
+            "Rose", "Wallis", "Diggory", "Josiah", "Delma", "Keysha", "Channing", "Prue", "Lee", "Ryan", "Sidney",
+            "Valerie", "Clancy", "Ezra", "Gilbert", "Clare", "Laz", "Crofton", "Mike", "Annabella", "Tara", "Eldred",
+            "Arthur", "Jaylon", "Peronel", "Paden", "Dot", "Marian", "Amyas", "Alexus", "Esmond", "Abbie", "Stanley",
+            "Brittani", "Vickie", "Errol", "Kimberlee", "Uland", "Ebenezer", "Howie", "Eveline", "Andrea", "Trish",
+            "Hopkin", "Bryanna", "Temperance", "Valarie", "Femie", "Alix", "Terrell", "Lewin", "Lorrin", "Happy",
+            "Micah", "Rachyl", "Sloan", "Gertrude", "Elizabeth", "Dorris", "Andra", "Bram", "Gary", "Jeannine",
+            "Maurene", "Irene", "Yolonda", "Jonty", "Coleen", "Cecelia", "Chantal", "Stuart", "Caris", "Ros",
+            "Kaleigh", "Mirabelle", "Kolby", "Primrose", "Susannah", "Ginny", "Jinny", "Dolly", "Lettice", "Sonny",
+            "Melva", "Ernest", "Garret", "Reagan", "Trenton", "Gallagher", "Edwin", "Nikolas", "Corrie", "Lynette",
+            "Ettie", "Sly", "Debbi", "Eudora", "Brittney", "Tacey", "Marius", "Anima", "Gordon", "Olivia", "Kortney",
+            "Shantel", "Kolleen", "Nevaeh", "Buck", "Sera", "Liliana", "Aric", "Kalyn", "Mick", "Libby", "Ingram",
+            "Alexandria", "Darleen", "Jacklyn", "Hughie", "Tyler", "Aida", "Ronda", "Deemer", "Taryn", "Laureen",
+            "Samantha", "Dave", "Hardy", "Baldric", "Montgomery", "Gus", "Ellis", "Titania", "Luke", "Chase", "Haidee",
+            "Mayra", "Isabell", "Trinity", "Milo", "Abigail", "Tacita", "Meg", "Hervey", "Natasha", "Sadie", "Holden",
+            "Dee", "Mansel", "Perry", "Randi", "Frederica", "Georgina", "Kolour", "Debbie", "Seraphina", "Elspet",
+            "Julyan", "Raven", "Zavia", "Jarvis", "Jaymes", "Grover", "Cairo", "Alea", "Jordon", "Braxton", "Donny",
+            "Rhoda", "Tonya", "Bee", "Alyssia", "Ashlyn", "Reanna", "Lonny", "Arlene", "Deb", "Jane", "Nikole",
+            "Bettina", "Harrison", "Tamzen", "Arielle", "Adelaide", "Faith", "Bridie", "Wilburn", "Fern", "Nan",
+            "Shaw", "Zeke", "Alan", "Dene", "Gina", "Alexa", "Bailey", "Sal", "Tammy", "Maximillian", "America",
+            "Sylvana", "Fitz", "Mo", "Marissa", "Cass", "Eldon", "Wilfrid", "Tel", "Joann", "Kendra", "Tolly",
+            "Leanne", "Ferdie", "Haven", "Lucas", "Marlee", "Cyrilla", "Red", "Phoenix", "Jazmin", "Carin", "Gena",
+            "Lashonda", "Tucker", "Genette", "Kizzy", "Winifred", "Melody", "Keely", "Kaylyn", "Radcliff", "Lettie",
+            "Foster", "Lyndsey", "Nicholas", "Farley", "Louisa", "Dana", "Dortha", "Francine", "Doran", "Bonita",
+            "Hal", "Sawyer", "Reginald", "Aislin", "Nathan", "Baylee", "Abilene", "Ladonna", "Maurine", "Shelly",
+            "Deandre", "Jasmin", "Roderic", "Tiffany", "Amanda", "Verity", "Wilford", "Gayelord", "Whitney", "Demelza",
+            "Kenton", "Alberta", "Kyra", "Tabitha", "Sampson", "Korey", "Lillian", "Edison", "Clayton", "Steph",
+            "Maya", "Dusty", "Jim", "Ronny", "Adrianne", "Bernard", "Harris", "Kiley", "Alexander", "Kisha", "Ethalyn",
+            "Patience", "Briony", "Indigo", "Aureole", "Makenzie", "Molly", "Sherilyn", "Barry", "Laverne", "Hunter",
+            "Rocky", "Tyreek", "Madalyn", "Phyliss", "Chet", "Beatrice", "Faye", "Lavina", "Madelyn", "Tracey",
+            "Gyles", "Patti", "Carlyn", "Stephanie", "Jackalyn", "Larrie", "Kimmy", "Isolda", "Emelina", "Lis",
+            "Zillah", "Cody", "Sheard", "Rufus", "Paget", "Mae", "Rexanne", "Luvinia", "Tamsen", "Rosanna", "Greig",
+            "Stacia", "Mabelle", "Quianna", "Lotus", "Delice", "Bradford", "Angus", "Cosmo", "Earlene", "Adrian",
+            "Arlie", "Noelle", "Sabella", "Isa", "Adelle", "Innocent", "Kirby", "Trixie", "Kenelm", "Nelda", "Melia",
+            "Kendal", "Dorinda", "Placid", "Linette", "Kam", "Sherisse", "Evan", "Ewart", "Janice", "Linton",
+            "Jacaline", "Charissa", "Douglas", "Aileen", "Kemp", "Oli", "Amethyst", "Rosie", "Nigella", "Sherill",
+            "Anderson", "Alanna", "Eric", "Claudia", "Jennifer", "Boniface", "Harriet", "Vernon", "Lucy", "Shawnee",
+            "Gerard", "Cecily", "Romey", "Randall", "Wade", "Lux", "Dawson", "Gregg", "Kade", "Roxanne", "Melinda",
+            "Rolland", "Rowanne", "Fannie", "Isidore", "Melia", "Harvie", "Salal", "Eleonor", "Jacquette", "Lavone",
+            "Shanika", "Tarquin", "Janet", "Josslyn", "Maegan", "Augusta", "Aubree", "Francene", "Martie", "Marisa",
+            "Tyreek", "Tatianna", "Caleb", "Sheridan", "Nellie", "Barbara", "Wat", "Jayla", "Esmaralda", "Graeme",
+            "Lavena", "Jemima", "Nikolas", "Triston", "Portia", "Kyla", "Marcus", "Raeburn", "Jamison", "Earl", "Wren",
+            "Leighton", "Lagina", "Lucasta", "Dina", "Amaranta", "Jessika", "Claud", "Bernard", "Winifred", "Ebba",
+            "Sammi", "Gall", "Chloe", "Ottoline", "Herbert", "Janice", "Gareth", "Channing", "Caleigh", "Kailee",
+            "Ralphie", "Tamzen", "Quincy", "Beaumont", "Albert", "Jadyn", "Violet", "Luanna", "Moriah", "Humbert",
+            "Jed", "Leona", "Hale", "Mitch", "Marlin", "Nivek", "Darwin", "Dirk", "Liliana", "Meadow", "Bernadine",
+            "Jorie", "Peyton", "Astra", "Roscoe", "Gina", "Lovell", "Jewel", "Romayne", "Rosy", "Imogene",
+            "Margaretta", "Lorinda", "Hopkin", "Bobby", "Flossie", "Bennie", "Horatio", "Jonah", "Lyn", "Deana",
+            "Juliana", "Blanch", "Wright", "Kendal", "Woodrow", "Tania", "Austyn", "Val", "Mona", "Charla", "Rudyard",
+            "Pamela", "Raven", "Zena", "Nicola", "Kaelea", "Conor", "Virgil", "Sonnie", "Goodwin", "Christianne",
+            "Linford", "Myron", "Denton", "Charita", "Brody", "Ginnie", "Harrison", "Jeanine", "Quin", "Isolda",
+            "Zoie", "Pearce", "Margie", "Larrie", "Angelina", "Marcia", "Jessamine", "Delilah", "Dick", "Luana",
+            "Delicia", "Lake", "Luvenia", "Vaughan", "Concordia", "Gayelord", "Cheyenne", "Felix", "Dorris", "Pen",
+            "Kristeen", "Parris", "Everitt", "Josephina", "Amy", "Tommie", "Adrian", "April", "Rosaline", "Zachery",
+            "Trace", "Phoebe", "Jenelle", "Kameron", "Katharine", "Media", "Colton", "Tad", "Quianna", "Kerenza",
+            "Greta", "Luvinia", "Pete", "Tonya", "Beckah", "Barbra", "Jon", "Tetty", "Corey", "Sylvana", "Kizzy",
+            "Korey", "Trey", "Haydee", "Penny", "Mandy", "Panda", "Coline", "Ramsey", "Sukie", "Annabel", "Sarina",
+            "Corbin", "Suzanna", "Rob", "Duana", "Shell", "Jason", "Eddy", "Rube", "Roseann", "Celia", "Brianne",
+            "Nerissa", "Jera", "Humphry", "Ashlynn", "Terrence", "Philippina", "Coreen", "Kolour", "Indiana", "Paget",
+            "Marlyn", "Hester", "Isbel", "Ocean", "Harris", "Leslie", "Vere", "Monroe", "Isabelle", "Bertie", "Clitus",
+            "Dave", "Alethea", "Lessie", "Louiza", "Madlyn", "Garland", "Wolf", "Lalo", "Donny", "Amabel", "Tianna",
+            "Louie", "Susie", "Mackenzie", "Renie", "Tess", "Marmaduke", "Gwendolen", "Bettina", "Beatrix", "Esmund",
+            "Minnie", "Carlie", "Barnabas", "Ruthie", "Honour", "Haylie", "Xavior", "Freddie", "Ericka", "Aretha",
+            "Edie", "Madelina", "Anson", "Tabby", "Derrick", "Jocosa", "Deirdre", "Aislin", "Chastity", "Abigail",
+            "Wynonna", "Zo", "Eldon", "Krystine", "Ghislaine", "Zavia", "Nolene", "Marigold", "Kelley", "Sylvester",
+            "Odell", "George", "Laurene", "Franklyn", "Clarice", "Mo", "Dustin", "Debbi", "Lina", "Tony", "Acacia",
+            "Hettie", "Natalee", "Marcie", "Brittany", "Elnora", "Rachel", "Dawn", "Basil", "Christal", "Anjelica",
+            "Fran", "Tawny", "Delroy", "Tameka", "Lillie", "Ceara", "Deanna", "Deshaun", "Ken", "Bradford", "Justina",
+            "Merle", "Draven", "Gretta", "Harriette", "Webster", "Nathaniel", "Anemone", "Coleen", "Ruth", "Chryssa",
+            "Hortensia", "Saffie", "Deonne", "Leopold", "Harlan", "Lea", "Eppie", "Lucinda", "Tilda", "Fanny", "Titty",
+            "Lockie", "Jepson", "Sherisse", "Maralyn", "Ethel", "Sly", "Ebenezer", "Canute", "Ella", "Freeman",
+            "Reuben", "Olivette", "Nona", "Rik", "Amice", "Kristine", "Kathie", "Jayne", "Jeri", "Mckenna", "Bertram",
+            "Kaylee", "Livia", "Gil", "Wallace", "Maryann", "Keeleigh", "Laurinda", "Doran", "Khloe", "Dakota",
+            "Yaron", "Kimberleigh", "Gytha", "Doris", "Marylyn", "Benton", "Linnette", "Esther", "Jakki", "Rowina",
+            "Marian", "Roselyn", "Norbert", "Maggie", "Caesar", "Phinehas", "Jerry", "Jasmine", "Antonette", "Miriam",
+            "Monna", "Maryvonne", "Jacquetta", "Bernetta", "Napier", "Annie", "Gladwin", "Sheldon", "Aric", "Elouise",
+            "Gawain", "Kristia", "Gabe", "Kyra", "Red", "Tod", "Dudley", "Lorraine", "Ryley", "Sabina", "Poppy",
+            "Leland", "Aileen", "Eglantine", "Alicia", "Jeni", "Addy", "Tiffany", "Geffrey", "Lavina", "Collin",
+            "Clover", "Vin", "Jerome", "Doug", "Vincent", "Florence", "Scarlet", "Celeste", "Desdemona", "Tiphanie",
+            "Kassandra", "Ashton", "Madison", "Art", "Magdalene", "Iona", "Josepha", "Anise", "Ferne", "Derek",
+            "Huffie", "Qiana", "Ysabel", "Tami", "Shannah", "Xavier", "Willard", "Winthrop", "Vickie", "Maura",
+            "Placid", "Tiara", "Reggie", "Elissa", "Isa", "Chrysanta", "Jeff", "Bessie", "Terri", "Amilia", "Brett",
+            "Daniella", "Damion", "Carolina", "Maximillian", "Travers", "Benjamin", "Oprah", "Darcy", "Yolanda",
+            "Nicolina", "Crofton", "Jarrett", "Kaitlin", "Shauna", "Keren", "Bevis", "Kalysta", "Sharron", "Alyssa",
+            "Blythe", "Zelma", "Caelie", "Norwood", "Billie", "Patrick", "Gary", "Cambria", "Tylar", "Mason", "Helen",
+            "Melyssa", "Gene", "Gilberta", "Carter", "Herbie", "Harmonie", "Leola", "Eugenia", "Clint", "Pauletta",
+            "Edwyna", "Georgina", "Teal", "Harper", "Izzy", "Dillon", "Kezia", "Evangeline", "Colene", "Madelaine",
+            "Zilla", "Rudy", "Dottie", "Caris", "Morton", "Marge", "Tacey", "Parker", "Troy", "Liza", "Lewin",
+            "Tracie", "Justine", "Dallas", "Linden", "Ray", "Loretta", "Teri", "Elvis", "Diane", "Julianna", "Manfred",
+            "Denise", "Eireen", "Ann", "Kenith", "Linwood", "Kathlyn", "Bernice", "Shelley", "Oswald", "Amedeus",
+            "Homer", "Tanzi", "Ted", "Ralphina", "Hyacinth", "Lotus", "Matthias", "Arlette", "Clark", "Cecil",
+            "Elspeth", "Alvena", "Noah", "Millard", "Brenden", "Cole", "Philipa", "Nina", "Thelma", "Iantha", "Reid",
+            "Jefferson", "Meg", "Elsie", "Shirlee", "Nathan", "Nancy", "Simona", "Racheal", "Carin", "Emory", "Delice",
+            "Kristi", "Karaugh", "Kaety", "Tilly", "Em", "Alanis", "Darrin", "Jerrie", "Hollis", "Cary", "Marly",
+            "Carita", "Jody", "Farley", "Hervey", "Rosalin", "Cuthbert", "Stewart", "Jodene", "Caileigh", "Briscoe",
+            "Dolores", "Sheree", "Eustace", "Nigel", "Detta", "Barret", "Rowland", "Kenny", "Githa", "Zoey", "Adela",
+            "Petronella", "Opal", "Coleman", "Niles", "Cyril", "Dona", "Alberic", "Allannah", "Jules", "Avalon",
+            "Hadley", "Thomas", "Renita", "Calanthe", "Heron", "Shawnda", "Chet", "Malina", "Manny", "Rina", "Frieda",
+            "Eveleen", "Deshawn", "Amos", "Raelene", "Paige", "Molly", "Nannie", "Ileen", "Brendon", "Milford",
+            "Unice", "Rebeccah", "Caedmon", "Gae", "Doreen", "Vivian", "Louis", "Raphael", "Vergil", "Lise", "Glenn",
+            "Karyn", "Terance", "Reina", "Jake", "Gordon", "Wisdom", "Isiah", "Gervase", "Fern", "Marylou", "Roddy",
+            "Justy", "Derick", "Shantelle", "Adam", "Chantel", "Madoline", "Emmerson", "Lexie", "Mickey", "Stephen",
+            "Dane", "Stacee", "Elwin", "Tracey", "Alexandra", "Ricky", "Ian", "Kasey", "Rita", "Alanna", "Georgene",
+            "Deon", "Zavier", "Ophelia", "Deforest", "Lowell", "Zubin", "Hardy", "Osmund", "Tabatha", "Debby",
+            "Katlyn", "Tallulah", "Priscilla", "Braden", "Wil", "Keziah", "Jen", "Aggie", "Korbin", "Lemoine",
+            "Barnaby", "Tranter", "Goldie", "Roderick", "Trina", "Emery", "Pris", "Sidony", "Adelle", "Tate", "Wilf",
+            "Zola", "Brande", "Chris", "Calanthia", "Lilly", "Kaycee", "Lashonda", "Jasmin", "Elijah", "Shantel",
+            "Simon", "Rosalind", "Jarod", "Kaylie", "Corrine", "Joselyn", "Archibald", "Mariabella", "Winton",
+            "Merlin", "Chad", "Ursula", "Kristopher", "Hewie", "Adrianna", "Lyndsay", "Jasmyn", "Tim", "Evette",
+            "Margaret", "Samson", "Bronte", "Terence", "Leila", "Candice", "Tori", "Jamey", "Coriander", "Conrad",
+            "Floyd", "Karen", "Lorin", "Maximilian", "Cairo", "Emily", "Yasmin", "Karolyn", "Bryan", "Lanny",
+            "Kimberly", "Rick", "Chaz", "Krystle", "Lyric", "Laura", "Garrick", "Flip", "Monty", "Brendan",
+            "Ermintrude", "Rayner", "Merla", "Titus", "Marva", "Patricia", "Leone", "Tracy", "Jaqueline", "Hallam",
+            "Delores", "Cressida", "Carlyle", "Leann", "Kelcey", "Laurence", "Ryan", "Reynold", "Mark", "Collyn",
+            "Audie", "Sammy", "Ellery", "Sallie", "Pamelia", "Adolph", "Lydia", "Titania", "Ron", "Bridger", "Aline",
+            "Read", "Kelleigh", "Weldon", "Irving", "Garey", "Diggory", "Evander", "Kylee", "Deidre", "Ormond",
+            "Laurine", "Reannon", "Arline", "Pat"
+
+    };
+
+    public static String[] jargon = { "wireless", "signal", "network", "3G", "plan", "touch-screen",
+            "customer-service", "reachability", "voice-command", "shortcut-menu", "customization", "platform", "speed",
+            "voice-clarity", "voicemail-service" };
+
+    public static String[] vendors = { "at&t", "verizon", "t-mobile", "sprint", "motorola", "samsung", "iphone" };
+
+    public static String[] org_list = { "Latsonity", "ganjalax", "Zuncan", "Lexitechno", "Hot-tech", "subtam",
+            "Coneflex", "Ganjatax", "physcane", "Tranzap", "Qvohouse", "Zununoing", "jaydax", "Keytech", "goldendexon",
+            "Villa-tech", "Trustbam", "Newcom", "Voltlane", "Ontohothex", "Ranhotfan", "Alphadax", "Transhigh",
+            "kin-ron", "Doublezone", "Solophase", "Vivaace", "silfind", "Basecone", "sonstreet", "Freshfix",
+            "Techitechi", "Kanelectrics", "linedexon", "Goldcity", "Newfase", "Technohow", "Zimcone", "Salthex",
+            "U-ron", "Solfix", "whitestreet", "Xx-technology", "Hexviafind", "over-it", "Strongtone", "Tripplelane",
+            "geomedia", "Scotcity", "Inchex", "Vaiatech", "Striptaxon", "Hatcom", "tresline", "Sanjodax", "freshdox",
+            "Sumlane", "Quadlane", "Newphase", "overtech", "Voltbam", "Icerunin", "Fixdintex", "Hexsanhex", "Statcode",
+            "Greencare", "U-electrics", "Zamcorporation", "Ontotanin", "Tanzimcare", "Groovetex", "Ganjastrip",
+            "Redelectronics", "Dandamace", "Whitemedia", "strongex", "Streettax", "highfax", "Mathtech", "Xx-drill",
+            "Sublamdox", "Unijobam", "Rungozoom", "Fixelectrics", "Villa-dox", "Ransaofan", "Plexlane", "itlab",
+            "Lexicone", "Fax-fax", "Viatechi", "Inchdox", "Kongreen", "Doncare", "Y-geohex", "Opeelectronics",
+            "Medflex", "Dancode", "Roundhex", "Labzatron", "Newhotplus", "Sancone", "Ronholdings", "Quoline",
+            "zoomplus", "Fix-touch", "Codetechno", "Tanzumbam", "Indiex", "Canline" };
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/util/DataflowUtils.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/DataflowUtils.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/DataflowUtils.java
index ea13f25..90c74e1 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/DataflowUtils.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/DataflowUtils.java
@@ -24,6 +24,7 @@ import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.parse.ITupleForwarder;
 import org.apache.asterix.common.parse.ITupleForwarder.TupleForwardPolicy;
 import org.apache.asterix.external.dataflow.CounterTimerTupleForwarder;
+import org.apache.asterix.external.dataflow.FeedTupleForwarder;
 import org.apache.asterix.external.dataflow.FrameFullTupleForwarder;
 import org.apache.asterix.external.dataflow.RateControlledTupleForwarder;
 import org.apache.hyracks.api.comm.IFrameWriter;
@@ -37,7 +38,7 @@ public class DataflowUtils {
         if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
             appender.flush(writer, true);
             if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                throw new IllegalStateException();
+                throw new HyracksDataException("Tuple is too large for a frame");
             }
         }
     }
@@ -46,12 +47,18 @@ public class DataflowUtils {
         ITupleForwarder policy = null;
         ITupleForwarder.TupleForwardPolicy policyType = null;
         String propValue = configuration.get(ITupleForwarder.FORWARD_POLICY);
-        if (propValue == null) {
+        if (ExternalDataUtils.isFeed(configuration)) {
+            //TODO pass this value in the configuration and avoid this check for feeds
+            policyType = TupleForwardPolicy.FEED;
+        } else if (propValue == null) {
             policyType = TupleForwardPolicy.FRAME_FULL;
         } else {
             policyType = TupleForwardPolicy.valueOf(propValue.trim().toUpperCase());
         }
         switch (policyType) {
+            case FEED:
+                policy = new FeedTupleForwarder();
+                break;
             case FRAME_FULL:
                 policy = new FrameFullTupleForwarder();
                 break;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/util/Datatypes.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/Datatypes.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/Datatypes.java
index a544638..17b194c 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/Datatypes.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/Datatypes.java
@@ -22,7 +22,6 @@ public class Datatypes {
 
     /*
         The following assumes this DDL (but ignoring the field name orders):
-
         create type TwitterUser if not exists as open{
             screen_name: string,
             language: string,
@@ -31,7 +30,6 @@ public class Datatypes {
             name: string,
             followers_count: int32
         };
-
         create type Tweet if not exists as open{
             id: string,
             user: TwitterUser,
@@ -40,7 +38,6 @@ public class Datatypes {
             created_at:string,
             message_text:string
         };
-
     */
     public static class Tweet {
         public static final String ID = "id";
@@ -62,10 +59,8 @@ public class Datatypes {
 
     }
 
-
     /*
         The following assumes this DDL (but ignoring the field name orders):
-
         create type ProcessedTweet if not exists as open {
             id: string,
             user_name:string,
@@ -75,7 +70,6 @@ public class Datatypes {
             country: string,
             topics: [string]
         };
-
     */
     public static final class ProcessedTweet {
         public static final String USER_NAME = "user_name";
@@ -83,5 +77,4 @@ public class Datatypes {
         public static final String TOPICS = "topics";
     }
 
-
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataCompatibilityUtils.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataCompatibilityUtils.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataCompatibilityUtils.java
index 7f91a2b..4e6401a 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataCompatibilityUtils.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataCompatibilityUtils.java
@@ -49,10 +49,11 @@ public class ExternalDataCompatibilityUtils {
     }
 
     //TODO:Add remaining aliases
-    public static void addCompatabilityParameters(String adapterClassname, ARecordType itemType,
+    public static void addCompatabilityParameters(String adapterName, ARecordType itemType,
             Map<String, String> configuration) throws AsterixException {
-        if (adapterClassname.equals(ExternalDataConstants.ALIAS_HDFS_ADAPTER)
-                || adapterClassname.equalsIgnoreCase(ExternalDataConstants.ADAPTER_HDFS_CLASSNAME)) {
+        // HDFS
+        if (adapterName.equals(ExternalDataConstants.ALIAS_HDFS_ADAPTER)
+                || adapterName.equalsIgnoreCase(ExternalDataConstants.ADAPTER_HDFS_CLASSNAME)) {
             if (configuration.get(ExternalDataConstants.KEY_FORMAT) == null) {
                 throw new AsterixException("Unspecified format parameter for HDFS adapter");
             }
@@ -65,21 +66,45 @@ public class ExternalDataCompatibilityUtils {
                 configuration.put(ExternalDataConstants.KEY_READER_STREAM, ExternalDataConstants.ALIAS_HDFS_ADAPTER);
             }
         }
-        if (adapterClassname.equals(ExternalDataConstants.ALIAS_LOCALFS_ADAPTER)
-                || adapterClassname.contains(ExternalDataConstants.ADAPTER_LOCALFS_CLASSNAME)) {
+
+        // Local Filesystem
+        if (adapterName.equals(ExternalDataConstants.ALIAS_LOCALFS_ADAPTER)
+                || adapterName.contains(ExternalDataConstants.ADAPTER_LOCALFS_CLASSNAME)
+                || adapterName.contains(ExternalDataConstants.ALIAS_LOCALFS_PUSH_ADAPTER)) {
             if (configuration.get(ExternalDataConstants.KEY_FORMAT) == null) {
                 throw new AsterixException("Unspecified format parameter for local file system adapter");
             }
             configuration.put(ExternalDataConstants.KEY_READER, configuration.get(ExternalDataConstants.KEY_FORMAT));
             configuration.put(ExternalDataConstants.KEY_READER_STREAM, ExternalDataConstants.ALIAS_LOCALFS_ADAPTER);
         }
+
+        // Twitter (Pull)
+        if (adapterName.equals(ExternalDataConstants.ALIAS_TWITTER_PULL_ADAPTER)) {
+            configuration.put(ExternalDataConstants.KEY_READER, ExternalDataConstants.READER_TWITTER_PULL);
+            configuration.put(ExternalDataConstants.KEY_PULL, ExternalDataConstants.TRUE);
+            ExternalDataUtils.setRecordFormat(configuration, ExternalDataConstants.FORMAT_TWEET);
+        }
+
+        // Twitter (Push)
+        if (adapterName.equals(ExternalDataConstants.ALIAS_TWITTER_PUSH_ADAPTER)) {
+            configuration.put(ExternalDataConstants.KEY_READER, ExternalDataConstants.READER_TWITTER_PUSH);
+            configuration.put(ExternalDataConstants.KEY_PUSH, ExternalDataConstants.TRUE);
+            ExternalDataUtils.setRecordFormat(configuration, ExternalDataConstants.FORMAT_TWEET);
+        }
+
+        // Hive Parser
         if (configuration.get(ExternalDataConstants.KEY_PARSER) != null
                 && configuration.get(ExternalDataConstants.KEY_PARSER).equals(ExternalDataConstants.PARSER_HIVE)) {
             configuration.put(ExternalDataConstants.KEY_PARSER, ExternalDataConstants.FORMAT_HIVE);
         }
+
+        // FileSystem for Feed adapter
         if (configuration.get(ExternalDataConstants.KEY_FILESYSTEM) != null) {
             configuration.put(ExternalDataConstants.KEY_STREAM,
                     configuration.get(ExternalDataConstants.KEY_FILESYSTEM));
+            if (adapterName.equalsIgnoreCase(ExternalDataConstants.ALIAS_FILE_FEED_ADAPTER)) {
+                configuration.put(ExternalDataConstants.KEY_WAIT_FOR_DATA, ExternalDataConstants.FALSE);
+            }
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
index 2050e6a..fb2688f 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
@@ -24,7 +24,7 @@ import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
 
 public class ExternalDataConstants {
-    //TODO: Remove unused variables.
+    // TODO: Remove unused variables.
     /**
      * Keys
      */
@@ -70,6 +70,9 @@ public class ExternalDataConstants {
     public static final String KEY_INTERVAL = "interval";
     public static final String KEY_PULL = "pull";
     public static final String KEY_PUSH = "push";
+    public static final String KEY_IS_FEED = "is-feed";
+    public static final String KEY_WAIT_FOR_DATA = "wait-for-data";
+    public static final String KEY_FEED_NAME = "feed";
     /**
      * HDFS class names
      */
@@ -94,6 +97,8 @@ public class ExternalDataConstants {
     public static final String READER_ADM = "adm";
     public static final String READER_SEMISTRUCTURED = "semi-structured";
     public static final String READER_DELIMITED = "delimited-text";
+    public static final String READER_TWITTER_PUSH = "twitter-push";
+    public static final String READER_TWITTER_PULL = "twitter-pull";
 
     public static final String CLUSTER_LOCATIONS = "cluster-locations";
     public static final String SCHEDULER = "hdfs-scheduler";
@@ -128,6 +133,7 @@ public class ExternalDataConstants {
      */
     public static final String ALIAS_GENERIC_ADAPTER = "adapter";
     public static final String ALIAS_LOCALFS_ADAPTER = "localfs";
+    public static final String ALIAS_LOCALFS_PUSH_ADAPTER = "push_localfs";
     public static final String ALIAS_HDFS_ADAPTER = "hdfs";
     public static final String ALIAS_SOCKET_ADAPTER = "socket_adapter";
     public static final String ALIAS_TWITTER_FIREHOSE_ADAPTER = "twitter_firehose";
@@ -136,7 +142,6 @@ public class ExternalDataConstants {
     public static final String ALIAS_FILE_FEED_ADAPTER = "file_feed";
     public static final String ALIAS_TWITTER_PUSH_ADAPTER = "push_twitter";
     public static final String ALIAS_TWITTER_PULL_ADAPTER = "pull_twitter";
-    public static final String ALIAS_TWITTER_AZURE_ADAPTER = "azure_twitter";
     public static final String ALIAS_CNN_ADAPTER = "cnn_feed";
 
     /**
@@ -146,6 +151,12 @@ public class ExternalDataConstants {
     public static final String ADAPTER_HDFS_CLASSNAME = "org.apache.asterix.external.dataset.adapter.HDFSAdapter";
 
     /**
+     * Constant String values
+     */
+    public static final String TRUE = "true";
+    public static final String FALSE = "false";
+
+    /**
      * Constant characters
      */
     public static final char ESCAPE = '\\';
@@ -160,7 +171,7 @@ public class ExternalDataConstants {
     /**
      * Constant byte characters
      */
-    public static final byte EOL = '\n';
+    public static final byte BYTE_LF = '\n';
     public static final byte BYTE_CR = '\r';
     /**
      * Size default values
@@ -172,5 +183,4 @@ public class ExternalDataConstants {
      * Expected parameter values
      */
     public static final String PARAMETER_OF_SIZE_ONE = "Value of size 1";
-
 }



[16/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMemoryManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMemoryManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMemoryManager.java
new file mode 100644
index 0000000..fb81373
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMemoryManager.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import org.apache.asterix.external.feed.api.IFeedMemoryComponent.Type;
+
+/**
+ * Provides management of memory allocated for handling feed data flow through the node controller
+ */
+public interface IFeedMemoryManager {
+
+    public static final int START_COLLECTION_SIZE = 20;
+    public static final int START_POOL_SIZE = 10;
+
+    /**
+     * Gets a memory component allocated from the feed memory budget
+     * 
+     * @param type
+     *            the kind of memory component that needs to be allocated
+     * @return
+     * @see Type
+     */
+    public IFeedMemoryComponent getMemoryComponent(Type type);
+
+    /**
+     * Expand a memory component by the default increment
+     * 
+     * @param memoryComponent
+     * @return true if the expansion succeeded
+     *         false if the requested expansion violates the configured budget
+     */
+    public boolean expandMemoryComponent(IFeedMemoryComponent memoryComponent);
+
+    /**
+     * Releases the given memory component to reclaim the memory allocated for the component
+     * 
+     * @param memoryComponent
+     *            the memory component that is being reclaimed/released
+     */
+    public void releaseMemoryComponent(IFeedMemoryComponent memoryComponent);
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMessage.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMessage.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMessage.java
new file mode 100644
index 0000000..aa1af3a
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMessage.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import java.io.Serializable;
+
+import org.apache.hyracks.api.dataflow.value.JSONSerializable;
+
+/**
+ * A control message exchanged between {@Link IFeedManager} and {@Link CentralFeedManager} that requests for an action or reporting of an event
+ */
+public interface IFeedMessage extends Serializable, JSONSerializable {
+
+    public enum MessageType {
+        END,
+        XAQL,
+        FEED_REPORT,
+        NODE_REPORT,
+        STORAGE_REPORT,
+        CONGESTION,
+        PREPARE_STALL,
+        TERMINATE_FLOW,
+        SCALE_IN_REQUEST,
+        COMMIT_ACK,
+        COMMIT_ACK_RESPONSE,
+        THROTTLING_ENABLED
+    }
+
+    /**
+     * Gets the type associated with this message
+     * 
+     * @return MessageType type associated with this message
+     */
+    public MessageType getMessageType();
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMessageService.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMessageService.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMessageService.java
new file mode 100644
index 0000000..42f71a7
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMessageService.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+/**
+ * Provides the functionality of sending a meesage ({@code IFeedMessage} to the {@code CentralFeedManager}
+ */
+public interface IFeedMessageService extends IFeedService {
+
+    /**
+     * Sends a message ({@code IFeedMessage} to the {@code CentralFeedManager} running at the CC
+     * The message is sent asynchronously.
+     * 
+     * @param message
+     *            the message to be sent
+     */
+    public void sendMessage(IFeedMessage message);
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMetadataManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMetadataManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMetadataManager.java
new file mode 100644
index 0000000..3712678
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMetadataManager.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+
+public interface IFeedMetadataManager {
+
+    /**
+     * @param feedConnectionId
+     *            connection id corresponding to the feed connection
+     * @param tuple
+     *            the erroneous tuple that raised an exception
+     * @param message
+     *            the message corresponding to the exception being raised
+     * @param feedManager
+     * @throws AsterixException
+     */
+    public void logTuple(FeedConnectionId feedConnectionId, String tuple, String message, IFeedManager feedManager)
+            throws AsterixException;
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMetricCollector.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMetricCollector.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMetricCollector.java
new file mode 100644
index 0000000..c35587c
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMetricCollector.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+
+public interface IFeedMetricCollector {
+
+    public enum ValueType {
+        CPU_USAGE,
+        INFLOW_RATE,
+        OUTFLOW_RATE
+    }
+
+    public enum MetricType {
+        AVG,
+        RATE
+    }
+
+    public boolean sendReport(int senderId, int value);
+
+    public int getMetric(int senderId);
+
+    public int getMetric(FeedConnectionId connectionId, FeedRuntimeId runtimeId, ValueType valueType);
+
+    int createReportSender(FeedConnectionId connectionId, FeedRuntimeId runtimeId, ValueType valueType,
+            MetricType metricType);
+
+    public void removeReportSender(int senderId);
+
+    public void resetReportSender(int senderId);
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedOperatorOutputSideHandler.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedOperatorOutputSideHandler.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedOperatorOutputSideHandler.java
new file mode 100644
index 0000000..a8d0552
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedOperatorOutputSideHandler.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.hyracks.api.comm.IFrameWriter;
+
+/**
+ * Provides for output-side buffering for a feed runtime.
+ * Right now, we only have a single output side handler
+ * and we can probably remove it completely.
+ *              ______
+ *             |      |
+ * ============|core  |============
+ * ============| op   |============
+ *             |______|^^^^^^^^^^^^
+ *                     Output Side
+ *                       Handler
+ *
+ **/
+public interface IFeedOperatorOutputSideHandler extends IFrameWriter {
+
+    public enum Type {
+        BASIC_FEED_OUTPUT_HANDLER,
+        DISTRIBUTE_FEED_OUTPUT_HANDLER,
+        COLLECT_TRANSFORM_FEED_OUTPUT_HANDLER
+    }
+
+    public FeedId getFeedId();
+
+    public Type getType();
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedProvider.java
new file mode 100644
index 0000000..9eced07
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedProvider.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import org.apache.asterix.external.feed.management.FeedId;
+
+public interface IFeedProvider {
+
+    public void subscribeFeed(FeedId sourceDeedId);
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedRuntime.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedRuntime.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedRuntime.java
new file mode 100644
index 0000000..269725d
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedRuntime.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import org.apache.asterix.external.feed.dataflow.FeedRuntimeInputHandler;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+import org.apache.hyracks.api.comm.IFrameWriter;
+
+public interface IFeedRuntime {
+
+    public enum FeedRuntimeType {
+        INTAKE,
+        COLLECT,
+        COMPUTE_COLLECT,
+        COMPUTE,
+        STORE,
+        OTHER,
+        ETS,
+        JOIN
+    }
+
+    public enum Mode {
+        PROCESS,
+        SPILL,
+        PROCESS_SPILL,
+        DISCARD,
+        POST_SPILL_DISCARD,
+        PROCESS_BACKLOG,
+        STALL,
+        FAIL,
+        END
+    }
+
+    /**
+     * @return the unique runtime id associated with the feedRuntime
+     */
+    public FeedRuntimeId getRuntimeId();
+
+    /**
+     * @return the frame writer associated with the feed runtime.
+     */
+    public IFrameWriter getFeedFrameWriter();
+
+    public FeedRuntimeInputHandler getInputHandler();
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedService.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedService.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedService.java
new file mode 100644
index 0000000..3d3e0e5
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedService.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+public interface IFeedService {
+
+    public void start() throws Exception;
+
+    public void stop();
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedSubscriptionManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedSubscriptionManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedSubscriptionManager.java
new file mode 100644
index 0000000..ec4c396
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedSubscriptionManager.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import org.apache.asterix.external.feed.runtime.SubscribableFeedRuntimeId;
+
+public interface IFeedSubscriptionManager {
+
+    /**
+     * @param subscribableRuntime
+     */
+    public void registerFeedSubscribableRuntime(ISubscribableRuntime subscribableRuntime);
+
+    /**
+     * @param subscribableRuntimeId
+     */
+    public void deregisterFeedSubscribableRuntime(SubscribableFeedRuntimeId subscribableRuntimeId);
+
+    /**
+     * @param subscribableRuntimeId
+     * @return
+     */
+    public ISubscribableRuntime getSubscribableRuntime(SubscribableFeedRuntimeId subscribableRuntimeId);
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedTrackingManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedTrackingManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedTrackingManager.java
new file mode 100644
index 0000000..6576e09
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedTrackingManager.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.message.FeedTupleCommitAckMessage;
+
+public interface IFeedTrackingManager {
+
+    public void submitAckReport(FeedTupleCommitAckMessage ackMessage);
+
+    public void disableAcking(FeedConnectionId connectionId);
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedWork.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedWork.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedWork.java
new file mode 100644
index 0000000..2f5379b
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedWork.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+/**
+ * Represents a feed management task. The task is executed asynchronously.
+ */
+public interface IFeedWork {
+
+    public Runnable getRunnable();
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedWorkEventListener.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedWorkEventListener.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedWorkEventListener.java
new file mode 100644
index 0000000..e5797d0
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedWorkEventListener.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+/**
+ * Provides a callback mechanism that in invoked for events related to
+ * the execution of a feed management task.
+ */
+public interface IFeedWorkEventListener {
+
+    /**
+     * A call back that is invoked after successful completion of a feed
+     * management task.
+     */
+    public void workCompleted(IFeedWork work);
+
+    /**
+     * A call back that is invokved after a failed execution of a feed
+     * management task.
+     * 
+     * @param e
+     *            exception encountered during execution of the task.
+     */
+    public void workFailed(IFeedWork work, Exception e);
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedWorkManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedWorkManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedWorkManager.java
new file mode 100644
index 0000000..37661b7
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedWorkManager.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+public interface IFeedWorkManager {
+
+    public void submitWork(IFeedWork work, IFeedWorkEventListener listener);
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFrameEventCallback.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFrameEventCallback.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFrameEventCallback.java
new file mode 100644
index 0000000..647d847
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFrameEventCallback.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+public interface IFrameEventCallback {
+
+    public enum FrameEvent {
+        FINISHED_PROCESSING,
+        PENDING_WORK_THRESHOLD_REACHED,
+        PENDING_WORK_DONE,
+        NO_OP,
+        FINISHED_PROCESSING_SPILLAGE
+    }
+
+    public void frameEvent(FrameEvent frameEvent);
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFramePostProcessor.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFramePostProcessor.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFramePostProcessor.java
new file mode 100644
index 0000000..eab7a64
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFramePostProcessor.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import java.nio.ByteBuffer;
+
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+
+public interface IFramePostProcessor {
+
+    public void postProcessFrame(ByteBuffer frame, FrameTupleAccessor frameAccessor);
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFramePreprocessor.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFramePreprocessor.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFramePreprocessor.java
new file mode 100644
index 0000000..55461b7
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFramePreprocessor.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import java.nio.ByteBuffer;
+
+public interface IFramePreprocessor {
+
+    public void preProcess(ByteBuffer frame) throws Exception;
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IIntakeProgressTracker.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IIntakeProgressTracker.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IIntakeProgressTracker.java
new file mode 100644
index 0000000..4848ed8
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IIntakeProgressTracker.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import java.util.Map;
+
+public interface IIntakeProgressTracker {
+
+    public void configure(Map<String, String> configuration);
+
+    public void notifyIngestedTupleTimestamp(long timestamp);
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IMessageReceiver.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IMessageReceiver.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IMessageReceiver.java
new file mode 100644
index 0000000..bdfbfdb
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IMessageReceiver.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+public interface IMessageReceiver<T> {
+
+    public void sendMessage(T message);
+
+    public void close(boolean processPending);
+
+    public void start();
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/ISubscribableRuntime.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/ISubscribableRuntime.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/ISubscribableRuntime.java
new file mode 100644
index 0000000..ee07188
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/ISubscribableRuntime.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import java.util.List;
+
+import org.apache.asterix.external.feed.dataflow.DistributeFeedFrameWriter;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.external.feed.runtime.CollectionRuntime;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+
+/**
+ * Represent a feed runtime whose output can be routed along other parallel path(s).
+ */
+public interface ISubscribableRuntime extends IFeedRuntime {
+
+    /**
+     * @param collectionRuntime
+     * @throws Exception
+     */
+    public void subscribeFeed(FeedPolicyAccessor fpa, CollectionRuntime collectionRuntime) throws Exception;
+
+    /**
+     * @param collectionRuntime
+     * @throws Exception
+     */
+    public void unsubscribeFeed(CollectionRuntime collectionRuntime) throws Exception;
+
+    /**
+     * @return
+     * @throws Exception
+     */
+    public List<ISubscriberRuntime> getSubscribers();
+
+    /**
+     * @return
+     */
+    public DistributeFeedFrameWriter getFeedFrameWriter();
+
+    /**
+     * @return
+     */
+    public RecordDescriptor getRecordDescriptor();
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/ISubscriberRuntime.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/ISubscriberRuntime.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/ISubscriberRuntime.java
new file mode 100644
index 0000000..4d3e607
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/ISubscriberRuntime.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import java.util.Map;
+
+import org.apache.asterix.external.feed.dataflow.FeedFrameCollector;
+
+public interface ISubscriberRuntime {
+
+    public Map<String, String> getFeedPolicy();
+
+    public FeedFrameCollector getFrameCollector();
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/ISubscriptionProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/ISubscriptionProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/ISubscriptionProvider.java
new file mode 100644
index 0000000..b94a52e
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/ISubscriptionProvider.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import org.apache.asterix.external.feed.management.FeedId;
+
+public interface ISubscriptionProvider {
+
+    public void subscribeFeed(FeedId sourceFeedId, FeedId recipientFeedId);
+
+    public void unsubscribeFeed(FeedId sourceFeedId, FeedId recipientFeedId);
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/CollectTransformFeedFrameWriter.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/CollectTransformFeedFrameWriter.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/CollectTransformFeedFrameWriter.java
new file mode 100644
index 0000000..18b6ec0
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/CollectTransformFeedFrameWriter.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.dataflow;
+
+import java.nio.ByteBuffer;
+
+import org.apache.asterix.external.feed.api.IFeedOperatorOutputSideHandler;
+import org.apache.asterix.external.feed.api.ISubscribableRuntime;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.hyracks.api.comm.IFrame;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.comm.VSizeFrame;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import org.apache.hyracks.dataflow.common.comm.util.FrameUtils;
+
+// Simply a delivery frame writer. I think we can simply get rid of this at some point {TODO}.
+public class CollectTransformFeedFrameWriter implements IFeedOperatorOutputSideHandler {
+
+    private final FeedConnectionId connectionId;                // [Dataverse - Feed - Dataset]
+    private IFrameWriter downstreamWriter;                      // Writer to next (Operator/Connector)
+    private final FrameTupleAccessor inputFrameTupleAccessor;   // Accessing input frame tuples
+    private final FrameTupleAppender tupleAppender;             // Append tuples to output frame
+    private final IFrame frame;                                 // Output frame
+
+    private ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(1);
+
+    public CollectTransformFeedFrameWriter(IHyracksTaskContext ctx, IFrameWriter downstreamWriter,
+            ISubscribableRuntime sourceRuntime, RecordDescriptor outputRecordDescriptor, FeedConnectionId connectionId)
+                    throws HyracksDataException {
+        this.connectionId = connectionId;
+        this.downstreamWriter = downstreamWriter;
+        inputFrameTupleAccessor = new FrameTupleAccessor(sourceRuntime.getRecordDescriptor());
+        frame = new VSizeFrame(ctx);
+        tupleAppender = new FrameTupleAppender(frame);
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        downstreamWriter.open();
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        inputFrameTupleAccessor.reset(buffer);
+        int nTuple = inputFrameTupleAccessor.getTupleCount();
+        for (int t = 0; t < nTuple; t++) {
+            tupleBuilder.addField(inputFrameTupleAccessor, t, 0);
+            appendTupleToFrame();
+            tupleBuilder.reset();
+        }
+    }
+
+    private void appendTupleToFrame() throws HyracksDataException {
+        if (!tupleAppender.append(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray(), 0,
+                tupleBuilder.getSize())) {
+            FrameUtils.flushFrame(frame.getBuffer(), downstreamWriter);
+            tupleAppender.reset(frame, true);
+            if (!tupleAppender.append(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray(), 0,
+                    tupleBuilder.getSize())) {
+                throw new IllegalStateException();
+            }
+        }
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+        downstreamWriter.fail();
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        downstreamWriter.close();
+    }
+
+    @Override
+    public FeedId getFeedId() {
+        return connectionId.getFeedId();
+    }
+
+    @Override
+    public Type getType() {
+        return Type.COLLECT_TRANSFORM_FEED_OUTPUT_HANDLER;
+    }
+
+    public IFrameWriter getDownstreamWriter() {
+        return downstreamWriter;
+    }
+
+    public FeedConnectionId getConnectionId() {
+        return connectionId;
+    }
+
+    public void reset(IFrameWriter writer) {
+        this.downstreamWriter = writer;
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/DataBucket.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/DataBucket.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/DataBucket.java
new file mode 100644
index 0000000..3943ced
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/DataBucket.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.dataflow;
+
+import java.nio.ByteBuffer;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * A {@DataBucket} is a wrapper around {@ByteBuffer} that expects certain number of receipients
+ */
+public class DataBucket {
+
+    private static final AtomicInteger globalBucketId = new AtomicInteger(0);
+
+    private final ByteBuffer content;       // Content
+    private final AtomicInteger readCount;  // How many reads?
+    private final int bucketId;             // Id
+    private int desiredReadCount;           // Number of expected readers
+    private ContentType contentType;        // Data, End of stream, or End of spilled data
+    private final DataBucketPool pool;      // Pool of buckets
+
+    public enum ContentType {
+        DATA, // data (feed tuple)
+        EOD, // A signal indicating that there shall be no more data
+        EOSD // End of processing of spilled data
+    }
+
+    public DataBucket(DataBucketPool pool) {
+        this.content = ByteBuffer.allocate(pool.getFrameSize());
+        this.readCount = new AtomicInteger(0);
+        this.pool = pool;
+        this.contentType = ContentType.DATA;
+        this.bucketId = globalBucketId.incrementAndGet();
+    }
+
+    public synchronized void reset(ByteBuffer frame) {
+        if (frame != null) {
+            content.flip();
+            System.arraycopy(frame.array(), 0, content.array(), 0, frame.limit());
+            content.limit(frame.limit());
+            content.position(0);
+        }
+    }
+
+    public synchronized void doneReading() {
+        if (readCount.incrementAndGet() == desiredReadCount) {
+            readCount.set(0);
+            pool.returnDataBucket(this);
+        }
+    }
+
+    public void setDesiredReadCount(int rCount) {
+        this.desiredReadCount = rCount;
+    }
+
+    public ContentType getContentType() {
+        return contentType;
+    }
+
+    public void setContentType(ContentType contentType) {
+        this.contentType = contentType;
+    }
+
+    public synchronized ByteBuffer getContent() {
+        return content;
+    }
+
+    @Override
+    public String toString() {
+        return "DataBucket [" + bucketId + "]" + " (" + readCount + "," + desiredReadCount + ")";
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/DataBucketPool.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/DataBucketPool.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/DataBucketPool.java
new file mode 100644
index 0000000..339469e
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/DataBucketPool.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.dataflow;
+
+import java.util.Stack;
+
+import org.apache.asterix.external.feed.api.IFeedMemoryComponent;
+import org.apache.asterix.external.feed.api.IFeedMemoryManager;
+
+/**
+ * Represents a pool of reusable {@link DataBucket}
+ */
+public class DataBucketPool implements IFeedMemoryComponent {
+
+    /** A unique identifier for the memory component **/
+    private final int componentId;
+
+    /** The {@link IFeedMemoryManager} for the NodeController **/
+    private final IFeedMemoryManager memoryManager;
+
+    /** A collection of available data buckets {@link DataBucket} **/
+    private final Stack<DataBucket> pool;
+
+    /** The total number of data buckets {@link DataBucket} allocated **/
+    private int totalAllocation;
+
+    /** The fixed frame size as configured for the asterix runtime **/
+    private final int frameSize;
+
+    public DataBucketPool(int componentId, IFeedMemoryManager memoryManager, int size, int frameSize) {
+        this.componentId = componentId;
+        this.memoryManager = memoryManager;
+        this.pool = new Stack<DataBucket>();
+        this.frameSize = frameSize;
+        expand(size);
+    }
+
+    public synchronized void returnDataBucket(DataBucket bucket) {
+        pool.push(bucket);
+    }
+
+    public synchronized DataBucket getDataBucket() {
+        if (pool.size() == 0) {
+            if (!memoryManager.expandMemoryComponent(this)) {
+                return null;
+            }
+        }
+        return pool.pop();
+    }
+
+    @Override
+    public Type getType() {
+        return Type.POOL;
+    }
+
+    @Override
+    public int getTotalAllocation() {
+        return totalAllocation;
+    }
+
+    @Override
+    public int getComponentId() {
+        return componentId;
+    }
+
+    @Override
+    public void expand(int delta) {
+        for (int i = 0; i < delta; i++) {
+            DataBucket bucket = new DataBucket(this);
+            pool.add(bucket);
+        }
+        totalAllocation += delta;
+    }
+
+    @Override
+    public void reset() {
+        totalAllocation -= pool.size();
+        pool.clear();
+    }
+
+    @Override
+    public String toString() {
+        return "DataBucketPool" + "[" + componentId + "]" + "(" + totalAllocation + ")";
+    }
+
+    public int getSize() {
+        return pool.size();
+    }
+
+    public int getFrameSize() {
+        return frameSize;
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/DistributeFeedFrameWriter.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/DistributeFeedFrameWriter.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/DistributeFeedFrameWriter.java
new file mode 100644
index 0000000..7367d5a
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/DistributeFeedFrameWriter.java
@@ -0,0 +1,159 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.dataflow;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.external.feed.api.IFeedManager;
+import org.apache.asterix.external.feed.api.IFeedOperatorOutputSideHandler;
+import org.apache.asterix.external.feed.api.IFeedOperatorOutputSideHandler.Type;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+
+/**
+ * Provides mechanism for distributing the frames, as received from an operator to a
+ * set of registered readers. Each reader typically operates at a different pace. Readers
+ * are isolated from each other to ensure that a slow reader does not impact the progress of
+ * others.
+ **/
+public class DistributeFeedFrameWriter implements IFrameWriter {
+
+    private static final Logger LOGGER = Logger.getLogger(DistributeFeedFrameWriter.class.getName());
+
+    /** A unique identifier for the feed to which the incoming tuples belong. **/
+    private final FeedId feedId;
+
+    /**
+     * An instance of FrameDistributor that provides the mechanism for distributing a frame to multiple readers, each
+     * operating in isolation.
+     **/
+    private final FrameDistributor frameDistributor;
+
+    /** The original frame writer instantiated as part of job creation **/
+    private IFrameWriter writer;
+
+    /** The feed operation whose output is being distributed by the DistributeFeedFrameWriter **/
+    private final FeedRuntimeType feedRuntimeType;
+
+    /** The value of the partition 'i' if this is the i'th instance of the associated operator **/
+    private final int partition;
+
+    public DistributeFeedFrameWriter(IHyracksTaskContext ctx, FeedId feedId, IFrameWriter writer,
+            FeedRuntimeType feedRuntimeType, int partition, FrameTupleAccessor fta, IFeedManager feedManager)
+                    throws IOException {
+        this.feedId = feedId;
+        this.frameDistributor = new FrameDistributor(feedId, feedRuntimeType, partition, true,
+                feedManager.getFeedMemoryManager(), fta);
+        this.feedRuntimeType = feedRuntimeType;
+        this.partition = partition;
+        this.writer = writer;
+    }
+
+    /**
+     * @param fpa
+     *            Feed policy accessor
+     * @param frameWriter
+     *            the writer which will deliver the buffers
+     * @param connectionId
+     *            (Dataverse - Dataset - Feed)
+     * @return A frame collector.
+     * @throws Exception
+     */
+    public FeedFrameCollector subscribeFeed(FeedPolicyAccessor fpa, IFrameWriter frameWriter,
+            FeedConnectionId connectionId) throws Exception {
+        FeedFrameCollector collector = null;
+        if (!frameDistributor.isRegistered(frameWriter)) {
+            collector = new FeedFrameCollector(frameDistributor, fpa, frameWriter, connectionId);
+            frameDistributor.registerFrameCollector(collector);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Registered subscriber, new mode " + frameDistributor.getMode());
+            }
+            return collector;
+        } else {
+            throw new IllegalStateException("subscriber " + feedId + " already registered");
+        }
+    }
+
+    public void unsubscribeFeed(IFrameWriter recipientFeedFrameWriter) throws Exception {
+        boolean success = frameDistributor.deregisterFrameCollector(recipientFeedFrameWriter);
+        if (!success) {
+            throw new IllegalStateException(
+                    "Invalid attempt to unregister FeedFrameWriter " + recipientFeedFrameWriter + " not registered.");
+        }
+    }
+
+    public void notifyEndOfFeed() {
+        frameDistributor.notifyEndOfFeed();
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        try {
+            frameDistributor.close();
+        } finally {
+            writer.close();
+        }
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+        writer.fail();
+    }
+
+    @Override
+    public void nextFrame(ByteBuffer frame) throws HyracksDataException {
+        frameDistributor.nextFrame(frame);
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        writer.open();
+    }
+
+    public Map<IFrameWriter, FeedFrameCollector> getRegisteredReaders() {
+        return frameDistributor.getRegisteredReaders();
+    }
+
+    public void setWriter(IFrameWriter writer) {
+        this.writer = writer;
+    }
+
+    public Type getType() {
+        return IFeedOperatorOutputSideHandler.Type.DISTRIBUTE_FEED_OUTPUT_HANDLER;
+    }
+
+    @Override
+    public String toString() {
+        return feedId.toString() + feedRuntimeType + "[" + partition + "]";
+    }
+
+    public FrameDistributor.DistributionMode getDistributionMode() {
+        return frameDistributor.getDistributionMode();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedCollectRuntimeInputHandler.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedCollectRuntimeInputHandler.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedCollectRuntimeInputHandler.java
new file mode 100644
index 0000000..6761d9a
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedCollectRuntimeInputHandler.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.dataflow;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.asterix.external.feed.api.IFeedManager;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+
+public class FeedCollectRuntimeInputHandler extends FeedRuntimeInputHandler {
+
+    private final FeedFrameCache feedFrameCache;
+
+    public FeedCollectRuntimeInputHandler(IHyracksTaskContext ctx, FeedConnectionId connectionId, FeedRuntimeId runtimeId,
+            IFrameWriter coreOperator, FeedPolicyAccessor fpa, boolean bufferingEnabled,
+            FrameTupleAccessor fta, RecordDescriptor recordDesc, IFeedManager feedManager, int nPartitions)
+            throws IOException {
+        super(ctx, connectionId, runtimeId, coreOperator, fpa, bufferingEnabled, fta, recordDesc, feedManager,
+                nPartitions);
+        this.feedFrameCache = new FeedFrameCache(ctx, fta, coreOperator);
+    }
+
+    public void process(ByteBuffer frame) throws HyracksDataException {
+        feedFrameCache.sendMessage(frame);
+        super.process(frame);
+    }
+
+    public void replayFrom(int recordId) throws HyracksDataException {
+        feedFrameCache.replayRecords(recordId);
+    }
+
+    public void dropTill(int recordId) {
+        feedFrameCache.dropTillRecordId(recordId);
+    }
+    
+    public void replayCached() throws HyracksDataException{
+        feedFrameCache.replayAll();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedExceptionHandler.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedExceptionHandler.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedExceptionHandler.java
new file mode 100644
index 0000000..483ba19
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedExceptionHandler.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.dataflow;
+
+import java.nio.ByteBuffer;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.exceptions.FrameDataException;
+import org.apache.asterix.external.feed.api.IExceptionHandler;
+import org.apache.asterix.external.feed.api.IFeedManager;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.util.FeedFrameUtil;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+
+public class FeedExceptionHandler implements IExceptionHandler {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedExceptionHandler.class.getName());
+
+    //TODO: Enable logging
+    private final IHyracksTaskContext ctx;
+    private final FrameTupleAccessor fta;
+
+    public FeedExceptionHandler(IHyracksTaskContext ctx, FrameTupleAccessor fta, RecordDescriptor recordDesc,
+            IFeedManager feedManager, FeedConnectionId connectionId) {
+        this.ctx = ctx;
+        this.fta = fta;
+    }
+
+    @Override
+    public ByteBuffer handleException(Exception e, ByteBuffer frame) {
+        try {
+            if (e instanceof FrameDataException) {
+                fta.reset(frame);
+                FrameDataException fde = (FrameDataException) e;
+                int tupleIndex = fde.getTupleIndex();
+                try {
+                    logExceptionCausingTuple(tupleIndex, e);
+                } catch (Exception ex) {
+                    ex.addSuppressed(e);
+                    if (LOGGER.isLoggable(Level.WARNING)) {
+                        LOGGER.warning("Unable to log exception causing tuple due to..." + ex.getMessage());
+                    }
+                }
+                return FeedFrameUtil.removeBadTuple(ctx, tupleIndex, fta);
+            } else {
+                return null;
+            }
+        } catch (Exception exception) {
+            exception.printStackTrace();
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Unable to handle exception " + exception.getMessage());
+            }
+            return null;
+        }
+    }
+
+    private void logExceptionCausingTuple(int tupleIndex, Exception e) throws HyracksDataException, AsterixException {
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameCache.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameCache.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameCache.java
new file mode 100644
index 0000000..cd040c9
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameCache.java
@@ -0,0 +1,172 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.dataflow;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.asterix.external.feed.message.MessageReceiver;
+import org.apache.asterix.external.util.FeedConstants.StatisticsConstants;
+import org.apache.hyracks.api.comm.IFrame;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.comm.VSizeFrame;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+
+/**
+ * Allows caching of feed frames. This class is used in providing upstream backup.
+ * The tuples at the intake layer are held in this cache until these are acked by
+ * the storage layer post their persistence. On receiving an ack, appropriate tuples
+ * (recordsId < ackedRecordId) are dropped from the cache.
+ */
+public class FeedFrameCache extends MessageReceiver<ByteBuffer> {
+
+    /**
+     * Value represents a cache feed frame
+     * Key represents the largest record Id in the frame.
+     * At the intake side, the largest record id corresponds to the last record in the frame
+     **/
+    private final Map<Integer, ByteBuffer> orderedCache;
+    private final FrameTupleAccessor tupleAccessor;
+    private final IFrameWriter frameWriter;
+    private final IHyracksTaskContext ctx;
+
+    public FeedFrameCache(IHyracksTaskContext ctx, FrameTupleAccessor tupleAccessor, IFrameWriter frameWriter) {
+        this.tupleAccessor = tupleAccessor;
+        this.frameWriter = frameWriter;
+        /** A LinkedHashMap ensures entries are retrieved in order of their insertion **/
+        this.orderedCache = new LinkedHashMap<Integer, ByteBuffer>();
+        this.ctx = ctx;
+    }
+
+    @Override
+    public void processMessage(ByteBuffer frame) throws Exception {
+        int lastRecordId = getLastRecordId(frame);
+        ByteBuffer clone = cloneFrame(frame);
+        orderedCache.put(lastRecordId, clone);
+    }
+
+    public void dropTillRecordId(int recordId) {
+        List<Integer> dropRecordIds = new ArrayList<Integer>();
+        for (Entry<Integer, ByteBuffer> entry : orderedCache.entrySet()) {
+            int recId = entry.getKey();
+            if (recId <= recordId) {
+                dropRecordIds.add(recId);
+            } else {
+                break;
+            }
+        }
+        for (Integer r : dropRecordIds) {
+            orderedCache.remove(r);
+        }
+    }
+
+    public void replayRecords(int startingRecordId) throws HyracksDataException {
+        boolean replayPositionReached = false;
+        for (Entry<Integer, ByteBuffer> entry : orderedCache.entrySet()) {
+            // the key increases monotonically
+            int maxRecordIdInFrame = entry.getKey();
+            if (!replayPositionReached) {
+                if (startingRecordId < maxRecordIdInFrame) {
+                    replayFrame(startingRecordId, entry.getValue());
+                    break;
+                } else {
+                    continue;
+                }
+            }
+        }
+    }
+
+    /**
+     * Replay the frame from the tuple (inclusive) with recordId as specified.
+     * 
+     * @param recordId
+     * @param frame
+     * @throws HyracksDataException
+     */
+    private void replayFrame(int recordId, ByteBuffer frame) throws HyracksDataException {
+        tupleAccessor.reset(frame);
+        int nTuples = tupleAccessor.getTupleCount();
+        for (int i = 0; i < nTuples; i++) {
+            int rid = getRecordIdAtTupleIndex(i, frame);
+            if (rid == recordId) {
+                ByteBuffer slicedFrame = splitFrame(i, frame);
+                replayFrame(slicedFrame);
+                break;
+            }
+        }
+    }
+
+    private ByteBuffer splitFrame(int beginTupleIndex, ByteBuffer frame) throws HyracksDataException {
+        IFrame slicedFrame = new VSizeFrame(ctx);
+        FrameTupleAppender appender = new FrameTupleAppender();
+        appender.reset(slicedFrame, true);
+        int totalTuples = tupleAccessor.getTupleCount();
+        for (int ti = beginTupleIndex; ti < totalTuples; ti++) {
+            appender.append(tupleAccessor, ti);
+        }
+        return slicedFrame.getBuffer();
+    }
+
+    /**
+     * Replay the frame
+     * 
+     * @param frame
+     * @throws HyracksDataException
+     */
+    private void replayFrame(ByteBuffer frame) throws HyracksDataException {
+        frameWriter.nextFrame(frame);
+    }
+
+    private int getLastRecordId(ByteBuffer frame) {
+        tupleAccessor.reset(frame);
+        int nTuples = tupleAccessor.getTupleCount();
+        return getRecordIdAtTupleIndex(nTuples - 1, frame);
+    }
+
+    private int getRecordIdAtTupleIndex(int tupleIndex, ByteBuffer frame) {
+        tupleAccessor.reset(frame);
+        int recordStart = tupleAccessor.getTupleStartOffset(tupleIndex) + tupleAccessor.getFieldSlotsLength();
+        int openPartOffset = frame.getInt(recordStart + 6);
+        int numOpenFields = frame.getInt(recordStart + openPartOffset);
+        int recordIdOffset = frame.getInt(recordStart + openPartOffset + 4 + numOpenFields * 8
+                + StatisticsConstants.INTAKE_TUPLEID.length() + 2 + 1);
+        int lastRecordId = frame.getInt(recordStart + recordIdOffset);
+        return lastRecordId;
+    }
+
+    private ByteBuffer cloneFrame(ByteBuffer frame) {
+        ByteBuffer clone = ByteBuffer.allocate(frame.capacity());
+        System.arraycopy(frame.array(), 0, clone.array(), 0, frame.limit());
+        return clone;
+    }
+
+    public void replayAll() throws HyracksDataException {
+        for (Entry<Integer, ByteBuffer> entry : orderedCache.entrySet()) {
+            ByteBuffer frame = entry.getValue();
+            frameWriter.nextFrame(frame);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameCollector.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameCollector.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameCollector.java
new file mode 100644
index 0000000..0d53524
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameCollector.java
@@ -0,0 +1,160 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.dataflow;
+
+import java.nio.ByteBuffer;
+import java.util.logging.Level;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.message.MessageReceiver;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class FeedFrameCollector extends MessageReceiver<DataBucket> {
+
+    private final FeedConnectionId connectionId;
+    private final FrameDistributor frameDistributor;
+    private FeedPolicyAccessor fpa;
+    private IFrameWriter frameWriter;
+    private State state;
+
+    public enum State {
+        ACTIVE,
+        FINISHED,
+        TRANSITION,
+        HANDOVER
+    }
+
+    public FeedFrameCollector(FrameDistributor frameDistributor, FeedPolicyAccessor feedPolicyAccessor,
+            IFrameWriter frameWriter, FeedConnectionId connectionId) {
+        super();
+        this.frameDistributor = frameDistributor;
+        this.fpa = feedPolicyAccessor;
+        this.connectionId = connectionId;
+        this.frameWriter = frameWriter;
+        this.state = State.ACTIVE;
+    }
+
+    @Override
+    public void processMessage(DataBucket bucket) throws Exception {
+        try {
+            ByteBuffer frame = bucket.getContent();
+            switch (bucket.getContentType()) {
+                case DATA:
+                    frameWriter.nextFrame(frame);
+                    break;
+                case EOD:
+                    closeCollector();
+                    break;
+                case EOSD:
+                    throw new AsterixException("Received data bucket with content of type " + bucket.getContentType());
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Unable to process data bucket " + bucket + ", encountered exception " + e.getMessage());
+            }
+        } finally {
+            bucket.doneReading();
+        }
+    }
+
+    public void closeCollector() {
+        if (state.equals(State.TRANSITION)) {
+            super.close(true);
+            setState(State.ACTIVE);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info(this + " is now " + State.ACTIVE + " mode, processing frames synchronously");
+            }
+        } else {
+            flushPendingMessages();
+            setState(State.FINISHED);
+            synchronized (frameDistributor.getRegisteredCollectors()) {
+                frameDistributor.getRegisteredCollectors().notifyAll();
+            }
+            disconnect();
+        }
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Closed collector " + this);
+        }
+    }
+
+    public synchronized void disconnect() {
+        setState(State.FINISHED);
+    }
+
+    public synchronized void nextFrame(ByteBuffer frame) throws HyracksDataException {
+        frameWriter.nextFrame(frame);
+    }
+
+    public FeedPolicyAccessor getFeedPolicyAccessor() {
+        return fpa;
+    }
+
+    public synchronized State getState() {
+        return state;
+    }
+
+    public synchronized void setState(State state) {
+        this.state = state;
+        switch (state) {
+            case FINISHED:
+            case HANDOVER:
+                notifyAll();
+                break;
+            default:
+                break;
+        }
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Frame Collector " + this.frameDistributor.getFeedRuntimeType() + " switched to " + state);
+        }
+    }
+
+    public IFrameWriter getFrameWriter() {
+        return frameWriter;
+    }
+
+    public void setFrameWriter(IFrameWriter frameWriter) {
+        this.frameWriter = frameWriter;
+    }
+
+    @Override
+    public String toString() {
+        return "FrameCollector " + connectionId + "," + state + "]";
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) {
+            return true;
+        }
+        if (o instanceof FeedFrameCollector) {
+            return connectionId.equals(((FeedFrameCollector) o).connectionId);
+        }
+        return false;
+    }
+
+    @Override
+    public int hashCode() {
+        return connectionId.toString().hashCode();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameDiscarder.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameDiscarder.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameDiscarder.java
new file mode 100644
index 0000000..53ee475
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameDiscarder.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.dataflow;
+
+import java.nio.ByteBuffer;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class FeedFrameDiscarder {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedFrameSpiller.class.getName());
+
+    private final FeedRuntimeInputHandler inputHandler;
+    private final FeedConnectionId connectionId;
+    private final FeedRuntimeId runtimeId;
+    private final FeedPolicyAccessor policyAccessor;
+    private final float maxFractionDiscard;
+    private int nDiscarded;
+
+    public FeedFrameDiscarder(FeedConnectionId connectionId, FeedRuntimeId runtimeId, FeedPolicyAccessor policyAccessor,
+            FeedRuntimeInputHandler inputHandler) throws HyracksDataException {
+        this.connectionId = connectionId;
+        this.runtimeId = runtimeId;
+        this.policyAccessor = policyAccessor;
+        this.inputHandler = inputHandler;
+        this.maxFractionDiscard = policyAccessor.getMaxFractionDiscard();
+    }
+
+    public boolean processMessage(ByteBuffer message) {
+        if (policyAccessor.getMaxFractionDiscard() != 0) {
+            long nProcessed = inputHandler.getProcessed();
+            long discardLimit = (long) (nProcessed * maxFractionDiscard);
+            if (nDiscarded >= discardLimit) {
+                return false;
+            }
+            nDiscarded++;
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Discarded frame by " + connectionId + " (" + runtimeId + ")" + " count so far  ("
+                        + nDiscarded + ") Limit [" + discardLimit + "]");
+            }
+            return true;
+        }
+        return false;
+    }
+
+}



[03/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedIntakeOperatorNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedIntakeOperatorNodePushable.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedIntakeOperatorNodePushable.java
deleted file mode 100644
index 5085087..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedIntakeOperatorNodePushable.java
+++ /dev/null
@@ -1,215 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
-import org.apache.asterix.common.feeds.CollectionRuntime;
-import org.apache.asterix.common.feeds.DistributeFeedFrameWriter;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.FeedPolicyAccessor;
-import org.apache.asterix.common.feeds.IngestionRuntime;
-import org.apache.asterix.common.feeds.SubscribableFeedRuntimeId;
-import org.apache.asterix.common.feeds.api.IAdapterRuntimeManager;
-import org.apache.asterix.common.feeds.api.IAdapterRuntimeManager.State;
-import org.apache.asterix.common.feeds.api.IDataSourceAdapter;
-import org.apache.asterix.common.feeds.api.IFeedManager;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.asterix.common.feeds.api.IFeedSubscriptionManager;
-import org.apache.asterix.common.feeds.api.IIntakeProgressTracker;
-import org.apache.asterix.common.feeds.api.ISubscriberRuntime;
-import org.apache.asterix.external.api.IAdapterFactory;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import org.apache.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
-
-/**
- * The runtime for @see{FeedIntakeOperationDescriptor}.
- * Provides the core functionality to set up the artifacts for ingestion of a feed.
- * The artifacts are lazily activated when a feed receives a subscription request.
- */
-public class FeedIntakeOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
-
-    private static Logger LOGGER = Logger.getLogger(FeedIntakeOperatorNodePushable.class.getName());
-
-    private final FeedId feedId;
-    private final int partition;
-    private final IFeedSubscriptionManager feedSubscriptionManager;
-    private final IFeedManager feedManager;
-    private final IHyracksTaskContext ctx;
-    private final IAdapterFactory adapterFactory;
-
-    private IngestionRuntime ingestionRuntime;
-    private IDataSourceAdapter adapter;
-    private IIntakeProgressTracker tracker;
-    private DistributeFeedFrameWriter feedFrameWriter;
-
-    public FeedIntakeOperatorNodePushable(IHyracksTaskContext ctx, FeedId feedId, IAdapterFactory adapterFactory,
-            int partition, IngestionRuntime ingestionRuntime, FeedPolicyAccessor policyAccessor) {
-        this.ctx = ctx;
-        this.feedId = feedId;
-        this.partition = partition;
-        this.ingestionRuntime = ingestionRuntime;
-        this.adapterFactory = adapterFactory;
-        IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
-                .getApplicationContext().getApplicationObject();
-        this.feedSubscriptionManager = runtimeCtx.getFeedManager().getFeedSubscriptionManager();
-        this.feedManager = runtimeCtx.getFeedManager();
-    }
-
-    @Override
-    public void initialize() throws HyracksDataException {
-        IAdapterRuntimeManager adapterRuntimeManager = null;
-        try {
-            if (ingestionRuntime == null) {
-                try {
-                    adapter = adapterFactory.createAdapter(ctx, partition);
-                    //TODO: Fix record tracking
-                    //                    if (adapterFactory.isRecordTrackingEnabled()) {
-                    //                        tracker = adapterFactory.createIntakeProgressTracker();
-                    //                    }
-                } catch (Exception e) {
-                    LOGGER.severe("Unable to create adapter : " + adapterFactory.getAlias() + "[" + partition + "]"
-                            + " Exception " + e);
-                    throw new HyracksDataException(e);
-                }
-                FrameTupleAccessor fta = new FrameTupleAccessor(recordDesc);
-                feedFrameWriter = new DistributeFeedFrameWriter(ctx, feedId, writer, FeedRuntimeType.INTAKE, partition,
-                        fta, feedManager);
-                adapterRuntimeManager = new AdapterRuntimeManager(feedId, adapter, tracker, feedFrameWriter, partition);
-                SubscribableFeedRuntimeId runtimeId = new SubscribableFeedRuntimeId(feedId, FeedRuntimeType.INTAKE,
-                        partition);
-                ingestionRuntime = new IngestionRuntime(feedId, runtimeId, feedFrameWriter, recordDesc,
-                        adapterRuntimeManager);
-                feedSubscriptionManager.registerFeedSubscribableRuntime(ingestionRuntime);
-                feedFrameWriter.open();
-            } else {
-                if (ingestionRuntime.getAdapterRuntimeManager().getState().equals(State.INACTIVE_INGESTION)) {
-                    ingestionRuntime.getAdapterRuntimeManager().setState(State.ACTIVE_INGESTION);
-                    adapter = ingestionRuntime.getAdapterRuntimeManager().getFeedAdapter();
-                    if (LOGGER.isLoggable(Level.INFO)) {
-                        LOGGER.info(" Switching to " + State.ACTIVE_INGESTION + " for ingestion runtime "
-                                + ingestionRuntime);
-                        LOGGER.info(" Adaptor " + adapter.getClass().getName() + "[" + partition + "]"
-                                + " connected to backend for feed " + feedId);
-                    }
-                    feedFrameWriter = ingestionRuntime.getFeedFrameWriter();
-                } else {
-                    String message = "Feed Ingestion Runtime for feed " + feedId
-                            + " is already registered and is active!.";
-                    LOGGER.severe(message);
-                    throw new IllegalStateException(message);
-                }
-            }
-
-            waitTillIngestionIsOver(adapterRuntimeManager);
-            feedSubscriptionManager
-                    .deregisterFeedSubscribableRuntime((SubscribableFeedRuntimeId) ingestionRuntime.getRuntimeId());
-            if (adapterRuntimeManager.getState().equals(IAdapterRuntimeManager.State.FAILED_INGESTION)) {
-                throw new HyracksDataException("Unable to ingest data");
-            }
-
-        } catch (InterruptedException ie) {
-            /*
-             * An Interrupted Exception is thrown if the Intake job cannot progress further due to failure of another node involved in the Hyracks job.
-             * As the Intake job involves only the intake operator, the exception is indicative of a failure at the sibling intake operator location.
-             * The surviving intake partitions must continue to live and receive data from the external source.
-             */
-            List<ISubscriberRuntime> subscribers = ingestionRuntime.getSubscribers();
-            FeedPolicyAccessor policyAccessor = new FeedPolicyAccessor(new HashMap<String, String>());
-            boolean needToHandleFailure = false;
-            List<ISubscriberRuntime> failingSubscribers = new ArrayList<ISubscriberRuntime>();
-            for (ISubscriberRuntime subscriber : subscribers) {
-                policyAccessor.reset(subscriber.getFeedPolicy());
-                if (!policyAccessor.continueOnHardwareFailure()) {
-                    failingSubscribers.add(subscriber);
-                } else {
-                    needToHandleFailure = true;
-                }
-            }
-
-            for (ISubscriberRuntime failingSubscriber : failingSubscribers) {
-                try {
-                    ingestionRuntime.unsubscribeFeed((CollectionRuntime) failingSubscriber);
-                } catch (Exception e) {
-                    if (LOGGER.isLoggable(Level.WARNING)) {
-                        LOGGER.warning(
-                                "Excpetion in unsubscribing " + failingSubscriber + " message " + e.getMessage());
-                    }
-                }
-            }
-
-            if (needToHandleFailure) {
-                ingestionRuntime.getAdapterRuntimeManager().setState(State.INACTIVE_INGESTION);
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Switching to " + State.INACTIVE_INGESTION + " on occurrence of failure.");
-                }
-            } else {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info(
-                            "Interrupted Exception. None of the subscribers need to handle failures. Shutting down feed ingestion");
-                }
-                feedSubscriptionManager
-                        .deregisterFeedSubscribableRuntime((SubscribableFeedRuntimeId) ingestionRuntime.getRuntimeId());
-                throw new HyracksDataException(ie);
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-            throw new HyracksDataException(e);
-        } finally {
-            if (ingestionRuntime != null
-                    && !ingestionRuntime.getAdapterRuntimeManager().getState().equals(State.INACTIVE_INGESTION)) {
-                feedFrameWriter.close();
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Closed Frame Writer " + feedFrameWriter + " adapter state "
-                            + ingestionRuntime.getAdapterRuntimeManager().getState());
-                }
-            } else {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Ending intake operator node pushable in state " + State.INACTIVE_INGESTION
-                            + " Will resume after correcting failure");
-                }
-            }
-
-        }
-    }
-
-    private void waitTillIngestionIsOver(IAdapterRuntimeManager adapterRuntimeManager) throws InterruptedException {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Waiting for adaptor [" + partition + "]" + "to be done with ingestion of feed " + feedId);
-        }
-        synchronized (adapterRuntimeManager) {
-            while (!(adapterRuntimeManager.getState().equals(IAdapterRuntimeManager.State.FINISHED_INGESTION)
-                    || (adapterRuntimeManager.getState().equals(IAdapterRuntimeManager.State.FAILED_INGESTION)))) {
-                adapterRuntimeManager.wait();
-            }
-        }
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info(" Adaptor " + adapter.getClass().getName() + "[" + partition + "]"
-                    + " done with ingestion of feed " + feedId);
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedLifecycleEventSubscriber.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedLifecycleEventSubscriber.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedLifecycleEventSubscriber.java
deleted file mode 100644
index 2add90d..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedLifecycleEventSubscriber.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.util.Iterator;
-import java.util.concurrent.LinkedBlockingQueue;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.feeds.api.IFeedLifecycleEventSubscriber;
-
-public class FeedLifecycleEventSubscriber implements IFeedLifecycleEventSubscriber {
-
-    private LinkedBlockingQueue<FeedLifecycleEvent> inbox;
-
-    public FeedLifecycleEventSubscriber() {
-        this.inbox = new LinkedBlockingQueue<FeedLifecycleEvent>();
-    }
-
-    @Override
-    public void handleFeedEvent(FeedLifecycleEvent event) {
-        inbox.add(event);
-    }
-
-    @Override
-    public void assertEvent(FeedLifecycleEvent event) throws AsterixException, InterruptedException {
-        boolean eventOccurred = false;
-        FeedLifecycleEvent e = null;
-        Iterator<FeedLifecycleEvent> eventsSoFar = inbox.iterator();
-        while (eventsSoFar.hasNext()) {
-            e = eventsSoFar.next();
-            assertNoFailure(e);
-            eventOccurred = e.equals(event);
-        }
-
-        while (!eventOccurred) {
-            e = inbox.take();
-            eventOccurred = e.equals(event);
-            if (!eventOccurred) {
-                assertNoFailure(e);
-            }
-        }
-    }
-
-    private void assertNoFailure(FeedLifecycleEvent e) throws AsterixException {
-        if (e.equals(FeedLifecycleEvent.FEED_INTAKE_FAILURE) || e.equals(FeedLifecycleEvent.FEED_COLLECT_FAILURE)) {
-            throw new AsterixException("Failure in feed");
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMessageOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMessageOperatorDescriptor.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMessageOperatorDescriptor.java
deleted file mode 100644
index a1e9917..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMessageOperatorDescriptor.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.api.IFeedMessage;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.dataflow.IOperatorNodePushable;
-import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.job.JobSpecification;
-import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-
-/**
- * Sends a control message to the registered message queue for feed specified by its feedId.
- */
-public class FeedMessageOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    private final FeedConnectionId connectionId;
-    private final IFeedMessage feedMessage;
-
-    public FeedMessageOperatorDescriptor(JobSpecification spec, FeedConnectionId connectionId,
-            IFeedMessage feedMessage) {
-        super(spec, 0, 1);
-        this.connectionId = connectionId;
-        this.feedMessage = feedMessage;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
-        return new FeedMessageOperatorNodePushable(ctx, connectionId, feedMessage, partition, nPartitions);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMessageOperatorNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMessageOperatorNodePushable.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMessageOperatorNodePushable.java
deleted file mode 100644
index 313fa1a..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMessageOperatorNodePushable.java
+++ /dev/null
@@ -1,302 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
-import org.apache.asterix.common.feeds.CollectionRuntime;
-import org.apache.asterix.common.feeds.DistributeFeedFrameWriter;
-import org.apache.asterix.common.feeds.FeedCollectRuntimeInputHandler;
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedFrameCollector;
-import org.apache.asterix.common.feeds.FeedFrameCollector.State;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.FeedRuntime;
-import org.apache.asterix.common.feeds.FeedRuntimeId;
-import org.apache.asterix.common.feeds.FeedRuntimeInputHandler;
-import org.apache.asterix.common.feeds.FeedRuntimeManager;
-import org.apache.asterix.common.feeds.FeedTupleCommitResponseMessage;
-import org.apache.asterix.common.feeds.IngestionRuntime;
-import org.apache.asterix.common.feeds.IntakePartitionStatistics;
-import org.apache.asterix.common.feeds.MonitoredBufferTimerTasks.MonitoredBufferStorageTimerTask;
-import org.apache.asterix.common.feeds.StorageSideMonitoredBuffer;
-import org.apache.asterix.common.feeds.SubscribableFeedRuntimeId;
-import org.apache.asterix.common.feeds.api.IAdapterRuntimeManager;
-import org.apache.asterix.common.feeds.api.IFeedManager;
-import org.apache.asterix.common.feeds.api.IFeedMessage;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.Mode;
-import org.apache.asterix.common.feeds.api.IIntakeProgressTracker;
-import org.apache.asterix.common.feeds.api.ISubscribableRuntime;
-import org.apache.asterix.common.feeds.message.EndFeedMessage;
-import org.apache.asterix.common.feeds.message.ThrottlingEnabledFeedMessage;
-import org.apache.hyracks.api.comm.IFrameWriter;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
-
-/**
- * Runtime for the FeedMessageOpertorDescriptor. This operator is responsible for communicating
- * a feed message to the local feed manager on the host node controller.
- *
- * @see FeedMessageOperatorDescriptor
- *      IFeedMessage
- *      IFeedManager
- */
-public class FeedMessageOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedMessageOperatorNodePushable.class.getName());
-
-    private final FeedConnectionId connectionId;
-    private final IFeedMessage message;
-    private final IFeedManager feedManager;
-    private final int partition;
-
-    public FeedMessageOperatorNodePushable(IHyracksTaskContext ctx, FeedConnectionId connectionId,
-            IFeedMessage feedMessage, int partition, int nPartitions) {
-        this.connectionId = connectionId;
-        this.message = feedMessage;
-        this.partition = partition;
-        IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
-                .getApplicationContext().getApplicationObject();
-        this.feedManager = runtimeCtx.getFeedManager();
-    }
-
-    @Override
-    public void initialize() throws HyracksDataException {
-        try {
-            writer.open();
-            switch (message.getMessageType()) {
-                case END:
-                    EndFeedMessage endFeedMessage = (EndFeedMessage) message;
-                    switch (endFeedMessage.getEndMessageType()) {
-                        case DISCONNECT_FEED:
-                            hanldeDisconnectFeedTypeMessage(endFeedMessage);
-                            break;
-                        case DISCONTINUE_SOURCE:
-                            handleDiscontinueFeedTypeMessage(endFeedMessage);
-                            break;
-                    }
-                    break;
-                case PREPARE_STALL: {
-                    handlePrepareStallMessage((PrepareStallMessage) message);
-                    break;
-                }
-                case TERMINATE_FLOW: {
-                    FeedConnectionId connectionId = ((TerminateDataFlowMessage) message).getConnectionId();
-                    handleTerminateFlowMessage(connectionId);
-                    break;
-                }
-                case COMMIT_ACK_RESPONSE: {
-                    handleFeedTupleCommitResponseMessage((FeedTupleCommitResponseMessage) message);
-                    break;
-                }
-                case THROTTLING_ENABLED: {
-                    handleThrottlingEnabledMessage((ThrottlingEnabledFeedMessage) message);
-                    break;
-                }
-                default:
-                    break;
-
-            }
-
-        } catch (Exception e) {
-            throw new HyracksDataException(e);
-        } finally {
-            writer.close();
-        }
-    }
-
-    private void handleThrottlingEnabledMessage(ThrottlingEnabledFeedMessage throttlingMessage) {
-        FeedConnectionId connectionId = throttlingMessage.getConnectionId();
-        FeedRuntimeManager runtimeManager = feedManager.getFeedConnectionManager().getFeedRuntimeManager(connectionId);
-        Set<FeedRuntimeId> runtimes = runtimeManager.getFeedRuntimes();
-        for (FeedRuntimeId runtimeId : runtimes) {
-            if (runtimeId.getFeedRuntimeType().equals(FeedRuntimeType.STORE)) {
-                FeedRuntime storeRuntime = runtimeManager.getFeedRuntime(runtimeId);
-                ((StorageSideMonitoredBuffer) (storeRuntime.getInputHandler().getmBuffer())).setAcking(false);
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Acking Disabled in view of throttling that has been activted upfron in the pipeline "
-                            + connectionId);
-                }
-            }
-        }
-    }
-
-    private void handleFeedTupleCommitResponseMessage(FeedTupleCommitResponseMessage commitResponseMessage) {
-        FeedConnectionId connectionId = commitResponseMessage.getConnectionId();
-        FeedRuntimeManager runtimeManager = feedManager.getFeedConnectionManager().getFeedRuntimeManager(connectionId);
-        Set<FeedRuntimeId> runtimes = runtimeManager.getFeedRuntimes();
-        for (FeedRuntimeId runtimeId : runtimes) {
-            FeedRuntime runtime = runtimeManager.getFeedRuntime(runtimeId);
-            switch (runtimeId.getFeedRuntimeType()) {
-                case COLLECT:
-                    FeedCollectRuntimeInputHandler inputHandler = (FeedCollectRuntimeInputHandler) runtime
-                            .getInputHandler();
-                    int maxBasePersisted = commitResponseMessage.getMaxWindowAcked();
-                    inputHandler.dropTill(IntakePartitionStatistics.ACK_WINDOW_SIZE * (maxBasePersisted + 1));
-                    break;
-                case STORE:
-                    MonitoredBufferStorageTimerTask sTask = runtime.getInputHandler().getmBuffer()
-                            .getStorageTimeTrackingRateTask();
-                    sTask.receiveCommitAckResponse(commitResponseMessage);
-                    break;
-                default:
-                    break;
-            }
-        }
-
-        commitResponseMessage.getIntakePartition();
-        SubscribableFeedRuntimeId sid = new SubscribableFeedRuntimeId(connectionId.getFeedId(), FeedRuntimeType.INTAKE,
-                partition);
-        IngestionRuntime ingestionRuntime = (IngestionRuntime) feedManager.getFeedSubscriptionManager()
-                .getSubscribableRuntime(sid);
-        if (ingestionRuntime != null) {
-            IIntakeProgressTracker tracker = ingestionRuntime.getAdapterRuntimeManager().getProgressTracker();
-            if (tracker != null) {
-                tracker.notifyIngestedTupleTimestamp(System.currentTimeMillis());
-            }
-        }
-    }
-
-    private void handleTerminateFlowMessage(FeedConnectionId connectionId) throws HyracksDataException {
-        FeedRuntimeManager runtimeManager = feedManager.getFeedConnectionManager().getFeedRuntimeManager(connectionId);
-        Set<FeedRuntimeId> feedRuntimes = runtimeManager.getFeedRuntimes();
-
-        boolean found = false;
-        for (FeedRuntimeId runtimeId : feedRuntimes) {
-            FeedRuntime runtime = runtimeManager.getFeedRuntime(runtimeId);
-            if (runtime.getRuntimeId().getRuntimeType().equals(FeedRuntimeType.COLLECT)) {
-                ((CollectionRuntime) runtime).getFrameCollector().setState(State.HANDOVER);
-                found = true;
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Switched " + runtime + " to Hand Over stage");
-                }
-            }
-        }
-        if (!found) {
-            throw new HyracksDataException("COLLECT Runtime  not found!");
-        }
-    }
-
-    private void handlePrepareStallMessage(PrepareStallMessage prepareStallMessage) throws HyracksDataException {
-        FeedConnectionId connectionId = prepareStallMessage.getConnectionId();
-        int computePartitionsRetainLimit = prepareStallMessage.getComputePartitionsRetainLimit();
-        FeedRuntimeManager runtimeManager = feedManager.getFeedConnectionManager().getFeedRuntimeManager(connectionId);
-        Set<FeedRuntimeId> feedRuntimes = runtimeManager.getFeedRuntimes();
-        for (FeedRuntimeId runtimeId : feedRuntimes) {
-            FeedRuntime runtime = runtimeManager.getFeedRuntime(runtimeId);
-            switch (runtimeId.getFeedRuntimeType()) {
-                case COMPUTE:
-                    Mode requiredMode = runtimeId.getPartition() <= computePartitionsRetainLimit ? Mode.STALL
-                            : Mode.END;
-                    runtime.setMode(requiredMode);
-                    break;
-                default:
-                    runtime.setMode(Mode.STALL);
-                    break;
-            }
-        }
-    }
-
-    private void handleDiscontinueFeedTypeMessage(EndFeedMessage endFeedMessage) throws Exception {
-        FeedId sourceFeedId = endFeedMessage.getSourceFeedId();
-        SubscribableFeedRuntimeId subscribableRuntimeId = new SubscribableFeedRuntimeId(sourceFeedId,
-                FeedRuntimeType.INTAKE, partition);
-        ISubscribableRuntime feedRuntime = feedManager.getFeedSubscriptionManager()
-                .getSubscribableRuntime(subscribableRuntimeId);
-        IAdapterRuntimeManager adapterRuntimeManager = ((IngestionRuntime) feedRuntime).getAdapterRuntimeManager();
-        adapterRuntimeManager.stop();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Stopped Adapter " + adapterRuntimeManager);
-        }
-    }
-
-    private void hanldeDisconnectFeedTypeMessage(EndFeedMessage endFeedMessage) throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Ending feed:" + endFeedMessage.getFeedConnectionId());
-        }
-        FeedRuntimeId runtimeId = null;
-        FeedRuntimeType subscribableRuntimeType = ((EndFeedMessage) message).getSourceRuntimeType();
-        if (endFeedMessage.isCompleteDisconnection()) {
-            // subscribableRuntimeType represents the location at which the feed connection receives data
-            FeedRuntimeType runtimeType = null;
-            switch (subscribableRuntimeType) {
-                case INTAKE:
-                    runtimeType = FeedRuntimeType.COLLECT;
-                    break;
-                case COMPUTE:
-                    runtimeType = FeedRuntimeType.COMPUTE_COLLECT;
-                    break;
-                default:
-                    throw new IllegalStateException("Invalid subscribable runtime type " + subscribableRuntimeType);
-            }
-
-            runtimeId = new FeedRuntimeId(runtimeType, partition, FeedRuntimeId.DEFAULT_OPERAND_ID);
-            CollectionRuntime feedRuntime = (CollectionRuntime) feedManager.getFeedConnectionManager()
-                    .getFeedRuntime(connectionId, runtimeId);
-            feedRuntime.getSourceRuntime().unsubscribeFeed(feedRuntime);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Complete Unsubscription of " + endFeedMessage.getFeedConnectionId());
-            }
-        } else {
-            // subscribaleRuntimeType represents the location for data hand-off in presence of subscribers
-            switch (subscribableRuntimeType) {
-                case INTAKE:
-                    // illegal state as data hand-off from one feed to another does not happen at intake
-                    throw new IllegalStateException("Illegal State, invalid runtime type  " + subscribableRuntimeType);
-                case COMPUTE:
-                    // feed could be primary or secondary, doesn't matter
-                    SubscribableFeedRuntimeId feedSubscribableRuntimeId = new SubscribableFeedRuntimeId(
-                            connectionId.getFeedId(), FeedRuntimeType.COMPUTE, partition);
-                    ISubscribableRuntime feedRuntime = feedManager.getFeedSubscriptionManager()
-                            .getSubscribableRuntime(feedSubscribableRuntimeId);
-                    DistributeFeedFrameWriter dWriter = feedRuntime.getFeedFrameWriter();
-                    Map<IFrameWriter, FeedFrameCollector> registeredCollectors = dWriter.getRegisteredReaders();
-
-                    IFrameWriter unsubscribingWriter = null;
-                    for (Entry<IFrameWriter, FeedFrameCollector> entry : registeredCollectors.entrySet()) {
-                        IFrameWriter frameWriter = entry.getKey();
-                        FeedRuntimeInputHandler feedFrameWriter = (FeedRuntimeInputHandler) frameWriter;
-                        if (feedFrameWriter.getConnectionId().equals(endFeedMessage.getFeedConnectionId())) {
-                            unsubscribingWriter = feedFrameWriter;
-                            dWriter.unsubscribeFeed(unsubscribingWriter);
-                            if (LOGGER.isLoggable(Level.INFO)) {
-                                LOGGER.info("Partial Unsubscription of " + unsubscribingWriter);
-                            }
-                            break;
-                        }
-                    }
-                    break;
-                default:
-                    break;
-            }
-
-        }
-
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Unsubscribed from feed :" + connectionId);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetaComputeNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetaComputeNodePushable.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetaComputeNodePushable.java
deleted file mode 100644
index f833019..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetaComputeNodePushable.java
+++ /dev/null
@@ -1,227 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.nio.ByteBuffer;
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
-import org.apache.asterix.common.feeds.DistributeFeedFrameWriter;
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedRuntime;
-import org.apache.asterix.common.feeds.FeedRuntimeId;
-import org.apache.asterix.common.feeds.FeedRuntimeInputHandler;
-import org.apache.asterix.common.feeds.SubscribableFeedRuntimeId;
-import org.apache.asterix.common.feeds.SubscribableRuntime;
-import org.apache.asterix.common.feeds.api.IFeedManager;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.Mode;
-import org.apache.asterix.external.feeds.FeedPolicyEnforcer;
-import org.apache.asterix.common.feeds.api.ISubscribableRuntime;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.dataflow.IActivity;
-import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
-import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import org.apache.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
-
-/*
- * This IFrameWriter doesn't follow the contract
- */
-public class FeedMetaComputeNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedMetaComputeNodePushable.class.getName());
-
-    /** Runtime node pushable corresponding to the core feed operator **/
-    private AbstractUnaryInputUnaryOutputOperatorNodePushable coreOperator;
-
-    /**
-     * A policy enforcer that ensures dynamic decisions for a feed are taken
-     * in accordance with the associated ingestion policy
-     **/
-    private FeedPolicyEnforcer policyEnforcer;
-
-    /**
-     * The Feed Runtime instance associated with the operator. Feed Runtime
-     * captures the state of the operator while the feed is active.
-     */
-    private FeedRuntime feedRuntime;
-
-    /**
-     * A unique identifier for the feed instance. A feed instance represents
-     * the flow of data from a feed to a dataset.
-     **/
-    private FeedConnectionId connectionId;
-
-    /**
-     * Denotes the i'th operator instance in a setting where K operator
-     * instances are scheduled to run in parallel
-     **/
-    private int partition;
-
-    private int nPartitions;
-
-    /** The (singleton) instance of IFeedManager **/
-    private IFeedManager feedManager;
-
-    private FrameTupleAccessor fta;
-
-    private final IHyracksTaskContext ctx;
-
-    private final FeedRuntimeType runtimeType = FeedRuntimeType.COMPUTE;
-
-    private FeedRuntimeInputHandler inputSideHandler;
-
-    public FeedMetaComputeNodePushable(IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider,
-            int partition, int nPartitions, IOperatorDescriptor coreOperator, FeedConnectionId feedConnectionId,
-            Map<String, String> feedPolicyProperties, String operationId) throws HyracksDataException {
-        this.ctx = ctx;
-        this.coreOperator = (AbstractUnaryInputUnaryOutputOperatorNodePushable) ((IActivity) coreOperator)
-                .createPushRuntime(ctx, recordDescProvider, partition, nPartitions);
-        this.policyEnforcer = new FeedPolicyEnforcer(feedConnectionId, feedPolicyProperties);
-        this.partition = partition;
-        this.nPartitions = nPartitions;
-        this.connectionId = feedConnectionId;
-        this.feedManager = ((IAsterixAppRuntimeContext) ctx.getJobletContext().getApplicationContext()
-                .getApplicationObject()).getFeedManager();
-        IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
-                .getApplicationContext().getApplicationObject();
-        this.feedManager = runtimeCtx.getFeedManager();
-    }
-
-    @Override
-    public void open() throws HyracksDataException {
-        FeedRuntimeId runtimeId = new SubscribableFeedRuntimeId(connectionId.getFeedId(), runtimeType, partition);
-        try {
-            feedRuntime = feedManager.getFeedConnectionManager().getFeedRuntime(connectionId, runtimeId);
-            if (feedRuntime == null) {
-                initializeNewFeedRuntime(runtimeId);
-            } else {
-                reviveOldFeedRuntime(runtimeId);
-            }
-            writer.open();
-            coreOperator.open();
-        } catch (Exception e) {
-            e.printStackTrace();
-            throw new HyracksDataException(e);
-        }
-    }
-
-    private void initializeNewFeedRuntime(FeedRuntimeId runtimeId) throws Exception {
-        this.fta = new FrameTupleAccessor(recordDesc);
-        this.inputSideHandler = new FeedRuntimeInputHandler(ctx, connectionId, runtimeId, coreOperator,
-                policyEnforcer.getFeedPolicyAccessor(), true, fta, recordDesc, feedManager, nPartitions);
-
-        DistributeFeedFrameWriter distributeWriter = new DistributeFeedFrameWriter(ctx, connectionId.getFeedId(),
-                writer, runtimeType, partition, new FrameTupleAccessor(recordDesc), feedManager);
-        coreOperator.setOutputFrameWriter(0, distributeWriter, recordDesc);
-
-        feedRuntime = new SubscribableRuntime(connectionId.getFeedId(), runtimeId, inputSideHandler, distributeWriter,
-                recordDesc);
-        feedManager.getFeedSubscriptionManager().registerFeedSubscribableRuntime((ISubscribableRuntime) feedRuntime);
-        feedManager.getFeedConnectionManager().registerFeedRuntime(connectionId, feedRuntime);
-
-        distributeWriter.subscribeFeed(policyEnforcer.getFeedPolicyAccessor(), writer, connectionId);
-    }
-
-    private void reviveOldFeedRuntime(FeedRuntimeId runtimeId) throws Exception {
-        this.fta = new FrameTupleAccessor(recordDesc);
-        this.inputSideHandler = feedRuntime.getInputHandler();
-        this.inputSideHandler.setCoreOperator(coreOperator);
-
-        DistributeFeedFrameWriter distributeWriter = new DistributeFeedFrameWriter(ctx, connectionId.getFeedId(),
-                writer, runtimeType, partition, new FrameTupleAccessor(recordDesc), feedManager);
-        coreOperator.setOutputFrameWriter(0, distributeWriter, recordDesc);
-        distributeWriter.subscribeFeed(policyEnforcer.getFeedPolicyAccessor(), writer, connectionId);
-
-        inputSideHandler.reset(nPartitions);
-        feedRuntime.setMode(Mode.PROCESS);
-    }
-
-    @Override
-    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        try {
-            inputSideHandler.nextFrame(buffer);
-        } catch (Exception e) {
-            e.printStackTrace();
-            throw new HyracksDataException(e);
-        }
-    }
-
-    @Override
-    public void fail() throws HyracksDataException {
-        if (LOGGER.isLoggable(Level.WARNING)) {
-            LOGGER.warning("Core Op:" + coreOperator.getDisplayName() + " fail ");
-        }
-        feedRuntime.setMode(Mode.FAIL);
-        coreOperator.fail();
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        boolean stalled = inputSideHandler.getMode().equals(Mode.STALL);
-        boolean end = inputSideHandler.getMode().equals(Mode.END);
-        try {
-            if (inputSideHandler != null) {
-                if (!(stalled || end)) {
-                    inputSideHandler.nextFrame(null); // signal end of data
-                    while (!inputSideHandler.isFinished()) {
-                        synchronized (coreOperator) {
-                            coreOperator.wait();
-                        }
-                    }
-                } else {
-                    inputSideHandler.setFinished(true);
-                }
-            }
-            coreOperator.close();
-            System.out.println("CLOSED " + coreOperator + " STALLED ?" + stalled + " ENDED " + end);
-        } catch (Exception e) {
-            e.printStackTrace();
-        } finally {
-            if (!stalled) {
-                deregister();
-                System.out.println("DEREGISTERING " + this.feedRuntime.getRuntimeId());
-            } else {
-                System.out.println("NOT DEREGISTERING " + this.feedRuntime.getRuntimeId());
-            }
-            if (inputSideHandler != null) {
-                inputSideHandler.close();
-            }
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Ending Operator  " + this.feedRuntime.getRuntimeId());
-            }
-        }
-    }
-
-    private void deregister() {
-        if (feedRuntime != null) {
-            // deregister from subscription manager
-            SubscribableFeedRuntimeId runtimeId = (SubscribableFeedRuntimeId) feedRuntime.getRuntimeId();
-            feedManager.getFeedSubscriptionManager().deregisterFeedSubscribableRuntime(runtimeId);
-
-            // deregister from connection manager
-            feedManager.getFeedConnectionManager().deRegisterFeedRuntime(connectionId, feedRuntime.getRuntimeId());
-        }
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetaNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetaNodePushable.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetaNodePushable.java
deleted file mode 100644
index 86f8750..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetaNodePushable.java
+++ /dev/null
@@ -1,189 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.nio.ByteBuffer;
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedRuntime;
-import org.apache.asterix.common.feeds.FeedRuntimeId;
-import org.apache.asterix.common.feeds.FeedRuntimeInputHandler;
-import org.apache.asterix.common.feeds.api.IFeedManager;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.Mode;
-import org.apache.asterix.external.feeds.FeedPolicyEnforcer;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.dataflow.IActivity;
-import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
-import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import org.apache.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
-
-public class FeedMetaNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedMetaNodePushable.class.getName());
-
-    /** Runtime node pushable corresponding to the core feed operator **/
-    private AbstractUnaryInputUnaryOutputOperatorNodePushable coreOperator;
-
-    /**
-     * A policy enforcer that ensures dyanmic decisions for a feed are taken
-     * in accordance with the associated ingestion policy
-     **/
-    private FeedPolicyEnforcer policyEnforcer;
-
-    /**
-     * The Feed Runtime instance associated with the operator. Feed Runtime
-     * captures the state of the operator while the feed is active.
-     */
-    private FeedRuntime feedRuntime;
-
-    /**
-     * A unique identifier for the feed instance. A feed instance represents
-     * the flow of data from a feed to a dataset.
-     **/
-    private FeedConnectionId connectionId;
-
-    /**
-     * Denotes the i'th operator instance in a setting where K operator
-     * instances are scheduled to run in parallel
-     **/
-    private int partition;
-
-    /** Total number of partitions available **/
-    private int nPartitions;
-
-    /** Type associated with the core feed operator **/
-    private final FeedRuntimeType runtimeType = FeedRuntimeType.OTHER;
-
-    /** The (singleton) instance of IFeedManager **/
-    private IFeedManager feedManager;
-
-    private FrameTupleAccessor fta;
-
-    private final IHyracksTaskContext ctx;
-
-    private final String operandId;
-
-    /** The pre-processor associated with this runtime **/
-    private FeedRuntimeInputHandler inputSideHandler;
-
-    public FeedMetaNodePushable(IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider, int partition,
-            int nPartitions, IOperatorDescriptor coreOperator, FeedConnectionId feedConnectionId,
-            Map<String, String> feedPolicyProperties, String operationId) throws HyracksDataException {
-        this.ctx = ctx;
-        this.coreOperator = (AbstractUnaryInputUnaryOutputOperatorNodePushable) ((IActivity) coreOperator)
-                .createPushRuntime(ctx, recordDescProvider, partition, nPartitions);
-        this.policyEnforcer = new FeedPolicyEnforcer(feedConnectionId, feedPolicyProperties);
-        this.partition = partition;
-        this.nPartitions = nPartitions;
-        this.connectionId = feedConnectionId;
-        this.feedManager = ((IAsterixAppRuntimeContext) (IAsterixAppRuntimeContext) ctx.getJobletContext()
-                .getApplicationContext().getApplicationObject()).getFeedManager();
-        IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
-                .getApplicationContext().getApplicationObject();
-        this.feedManager = runtimeCtx.getFeedManager();
-        this.operandId = operationId;
-    }
-
-    @Override
-    public void open() throws HyracksDataException {
-        FeedRuntimeId runtimeId = new FeedRuntimeId(runtimeType, partition, operandId);
-        try {
-            feedRuntime = feedManager.getFeedConnectionManager().getFeedRuntime(connectionId, runtimeId);
-            if (feedRuntime == null) {
-                initializeNewFeedRuntime(runtimeId);
-            } else {
-                reviveOldFeedRuntime(runtimeId);
-            }
-            coreOperator.open();
-        } catch (Exception e) {
-            e.printStackTrace();
-            throw new HyracksDataException(e);
-        }
-    }
-
-    private void initializeNewFeedRuntime(FeedRuntimeId runtimeId) throws Exception {
-        this.fta = new FrameTupleAccessor(recordDesc);
-        this.inputSideHandler = new FeedRuntimeInputHandler(ctx, connectionId, runtimeId,
-                (AbstractUnaryInputUnaryOutputOperatorNodePushable) coreOperator,
-                policyEnforcer.getFeedPolicyAccessor(), false, fta, recordDesc, feedManager,
-                nPartitions);
-
-        setupBasicRuntime(inputSideHandler);
-    }
-
-    private void reviveOldFeedRuntime(FeedRuntimeId runtimeId) throws Exception {
-        this.inputSideHandler = feedRuntime.getInputHandler();
-        this.fta = new FrameTupleAccessor(recordDesc);
-        coreOperator.setOutputFrameWriter(0, writer, recordDesc);
-        feedRuntime.setMode(Mode.PROCESS);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Retreived state from the zombie instance " + runtimeType + " node.");
-        }
-    }
-
-    private void setupBasicRuntime(FeedRuntimeInputHandler inputHandler) throws Exception {
-        coreOperator.setOutputFrameWriter(0, writer, recordDesc);
-        FeedRuntimeId runtimeId = new FeedRuntimeId(runtimeType, partition, operandId);
-        feedRuntime = new FeedRuntime(runtimeId, inputHandler, writer);
-    }
-
-    @Override
-    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        try {
-            inputSideHandler.nextFrame(buffer);
-        } catch (Exception e) {
-            e.printStackTrace();
-            throw new HyracksDataException(e);
-        }
-    }
-
-    @Override
-    public void fail() throws HyracksDataException {
-        if (LOGGER.isLoggable(Level.WARNING)) {
-            LOGGER.info("Core Op:" + coreOperator.getDisplayName() + " fail ");
-        }
-        feedRuntime.setMode(Mode.FAIL);
-        coreOperator.fail();
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        try {
-            coreOperator.close();
-        } catch (Exception e) {
-            e.printStackTrace();
-            // ignore
-        } finally {
-            if (inputSideHandler != null) {
-                inputSideHandler.close();
-            }
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Ending Operator  " + this.feedRuntime.getRuntimeId());
-            }
-        }
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetaOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetaOperatorDescriptor.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetaOperatorDescriptor.java
deleted file mode 100644
index 5d88a9e..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetaOperatorDescriptor.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.util.Map;
-
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
-import org.apache.hyracks.api.dataflow.IOperatorNodePushable;
-import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.job.JobSpecification;
-import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-
-/**
- * FeedMetaOperatorDescriptor is a wrapper operator that provides a sanboox like
- * environment for an hyracks operator that is part of a feed ingestion
- * pipeline. The MetaFeed operator provides an interface iden- tical to that
- * offered by the underlying wrapped operator, hereafter referred to as the core
- * operator. As seen by Hyracks, the altered pipeline is identical to the
- * earlier version formed from core operators. The MetaFeed operator enhances
- * each core operator by providing functionality for handling runtime
- * exceptions, saving any state for future retrieval, and measuring/reporting of
- * performance characteristics. We next describe how the added functionality
- * contributes to providing fault- tolerance.
- */
-
-public class FeedMetaOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    /**
-     * The actual (Hyracks) operator that is wrapped around by the MetaFeed
-     * operator.
-     **/
-    private IOperatorDescriptor coreOperator;
-
-    /**
-     * A unique identifier for the feed instance. A feed instance represents the
-     * flow of data from a feed to a dataset.
-     **/
-    private final FeedConnectionId feedConnectionId;
-
-    /**
-     * The policy associated with the feed instance.
-     **/
-    private final Map<String, String> feedPolicyProperties;
-
-    /**
-     * type for the feed runtime associated with the operator.
-     * Possible values: COMPUTE, STORE, OTHER
-     **/
-    private final FeedRuntimeType runtimeType;
-
-    private final String operandId;
-
-    public FeedMetaOperatorDescriptor(JobSpecification spec, FeedConnectionId feedConnectionId,
-            IOperatorDescriptor coreOperatorDescriptor, Map<String, String> feedPolicyProperties,
-            FeedRuntimeType runtimeType, boolean enableSubscriptionMode, String operandId) {
-        super(spec, coreOperatorDescriptor.getInputArity(), coreOperatorDescriptor.getOutputArity());
-        this.feedConnectionId = feedConnectionId;
-        this.feedPolicyProperties = feedPolicyProperties;
-        if (coreOperatorDescriptor.getOutputRecordDescriptors().length == 1) {
-            recordDescriptors[0] = coreOperatorDescriptor.getOutputRecordDescriptors()[0];
-        }
-        this.coreOperator = coreOperatorDescriptor;
-        this.runtimeType = runtimeType;
-        this.operandId = operandId;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
-        IOperatorNodePushable nodePushable = null;
-        switch (runtimeType) {
-            case COMPUTE:
-                nodePushable = new FeedMetaComputeNodePushable(ctx, recordDescProvider, partition, nPartitions,
-                        coreOperator, feedConnectionId, feedPolicyProperties, operandId);
-                break;
-            case STORE:
-                nodePushable = new FeedMetaStoreNodePushable(ctx, recordDescProvider, partition, nPartitions,
-                        coreOperator, feedConnectionId, feedPolicyProperties, operandId);
-                break;
-            case OTHER:
-                nodePushable = new FeedMetaNodePushable(ctx, recordDescProvider, partition, nPartitions, coreOperator,
-                        feedConnectionId, feedPolicyProperties, operandId);
-                break;
-            case ETS:
-                nodePushable = ((AlgebricksMetaOperatorDescriptor) coreOperator).createPushRuntime(ctx,
-                        recordDescProvider, partition, nPartitions);
-                break;
-            case JOIN:
-                break;
-            default:
-                throw new HyracksDataException(new IllegalArgumentException("Invalid feed runtime: " + runtimeType));
-        }
-        return nodePushable;
-    }
-
-    @Override
-    public String toString() {
-        return "FeedMeta [" + coreOperator + " ]";
-    }
-
-    public IOperatorDescriptor getCoreOperator() {
-        return coreOperator;
-    }
-
-    public FeedRuntimeType getRuntimeType() {
-        return runtimeType;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetaStoreNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetaStoreNodePushable.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetaStoreNodePushable.java
deleted file mode 100644
index b409745..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetaStoreNodePushable.java
+++ /dev/null
@@ -1,224 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.nio.ByteBuffer;
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
-import org.apache.asterix.common.dataflow.AsterixLSMInsertDeleteOperatorNodePushable;
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedRuntime;
-import org.apache.asterix.common.feeds.FeedRuntimeId;
-import org.apache.asterix.common.feeds.FeedRuntimeInputHandler;
-import org.apache.asterix.common.feeds.api.IFeedManager;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.Mode;
-import org.apache.asterix.external.feeds.FeedPolicyEnforcer;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.dataflow.IActivity;
-import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
-import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import org.apache.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
-
-public class FeedMetaStoreNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedMetaStoreNodePushable.class.getName());
-
-    /** Runtime node pushable corresponding to the core feed operator **/
-    private AbstractUnaryInputUnaryOutputOperatorNodePushable coreOperator;
-
-    /**
-     * A policy enforcer that ensures dyanmic decisions for a feed are taken
-     * in accordance with the associated ingestion policy
-     **/
-    private FeedPolicyEnforcer policyEnforcer;
-
-    /**
-     * The Feed Runtime instance associated with the operator. Feed Runtime
-     * captures the state of the operator while the feed is active.
-     */
-    private FeedRuntime feedRuntime;
-
-    /**
-     * A unique identifier for the feed instance. A feed instance represents
-     * the flow of data from a feed to a dataset.
-     **/
-    private FeedConnectionId connectionId;
-
-    /**
-     * Denotes the i'th operator instance in a setting where K operator
-     * instances are scheduled to run in parallel
-     **/
-    private int partition;
-
-    private int nPartitions;
-
-    /** Type associated with the core feed operator **/
-    private final FeedRuntimeType runtimeType = FeedRuntimeType.STORE;
-
-    /** The (singleton) instance of IFeedManager **/
-    private IFeedManager feedManager;
-
-    private FrameTupleAccessor fta;
-
-    private final IHyracksTaskContext ctx;
-
-    private final String operandId;
-
-    private FeedRuntimeInputHandler inputSideHandler;
-
-    public FeedMetaStoreNodePushable(IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider,
-            int partition, int nPartitions, IOperatorDescriptor coreOperator, FeedConnectionId feedConnectionId,
-            Map<String, String> feedPolicyProperties, String operationId) throws HyracksDataException {
-        this.ctx = ctx;
-        this.coreOperator = (AbstractUnaryInputUnaryOutputOperatorNodePushable) ((IActivity) coreOperator)
-                .createPushRuntime(ctx, recordDescProvider, partition, nPartitions);
-        this.policyEnforcer = new FeedPolicyEnforcer(feedConnectionId, feedPolicyProperties);
-        this.partition = partition;
-        this.nPartitions = nPartitions;
-        this.connectionId = feedConnectionId;
-        this.feedManager = ((IAsterixAppRuntimeContext) (IAsterixAppRuntimeContext) ctx.getJobletContext()
-                .getApplicationContext().getApplicationObject()).getFeedManager();
-        IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
-                .getApplicationContext().getApplicationObject();
-        this.feedManager = runtimeCtx.getFeedManager();
-        this.operandId = operationId;
-    }
-
-    @Override
-    public void open() throws HyracksDataException {
-        FeedRuntimeId runtimeId = new FeedRuntimeId(runtimeType, partition, operandId);
-        try {
-            feedRuntime = feedManager.getFeedConnectionManager().getFeedRuntime(connectionId, runtimeId);
-            if (feedRuntime == null) {
-                initializeNewFeedRuntime(runtimeId);
-            } else {
-                reviveOldFeedRuntime(runtimeId);
-            }
-
-            coreOperator.open();
-        } catch (Exception e) {
-            e.printStackTrace();
-            throw new HyracksDataException(e);
-        }
-    }
-
-    private void initializeNewFeedRuntime(FeedRuntimeId runtimeId) throws Exception {
-        if (LOGGER.isLoggable(Level.WARNING)) {
-            LOGGER.warning("Runtime not found for  " + runtimeId + " connection id " + connectionId);
-        }
-        this.fta = new FrameTupleAccessor(recordDesc);
-        this.inputSideHandler = new FeedRuntimeInputHandler(ctx, connectionId, runtimeId, coreOperator,
-                policyEnforcer.getFeedPolicyAccessor(), true, fta, recordDesc, feedManager,
-                nPartitions);
-        if(coreOperator instanceof AsterixLSMInsertDeleteOperatorNodePushable){
-            AsterixLSMInsertDeleteOperatorNodePushable indexOp = (AsterixLSMInsertDeleteOperatorNodePushable) coreOperator;
-            if(!indexOp.isPrimary()){
-                inputSideHandler.setBufferingEnabled(false);
-            }
-        }
-        setupBasicRuntime(inputSideHandler);
-    }
-
-    private void reviveOldFeedRuntime(FeedRuntimeId runtimeId) throws Exception {
-        this.inputSideHandler = feedRuntime.getInputHandler();
-        this.fta = new FrameTupleAccessor(recordDesc);
-        coreOperator.setOutputFrameWriter(0, writer, recordDesc);
-        this.inputSideHandler.reset(nPartitions);
-        this.inputSideHandler.setCoreOperator(coreOperator);
-        feedRuntime.setMode(Mode.PROCESS);
-        if (LOGGER.isLoggable(Level.WARNING)) {
-            LOGGER.warning("Retreived state from the zombie instance from previous execution for " + runtimeType
-                    + " node.");
-        }
-    }
-
-    private void setupBasicRuntime(FeedRuntimeInputHandler inputHandler) throws Exception {
-        coreOperator.setOutputFrameWriter(0, writer, recordDesc);
-        FeedRuntimeId runtimeId = new FeedRuntimeId(runtimeType, partition, operandId);
-        feedRuntime = new FeedRuntime(runtimeId, inputHandler, writer);
-        feedManager.getFeedConnectionManager().registerFeedRuntime(connectionId, (FeedRuntime) feedRuntime);
-    }
-
-    @Override
-    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        try {
-            inputSideHandler.nextFrame(buffer);
-        } catch (Exception e) {
-            e.printStackTrace();
-            throw new HyracksDataException(e);
-        }
-    }
-
-    @Override
-    public void fail() throws HyracksDataException {
-        if (LOGGER.isLoggable(Level.WARNING)) {
-            LOGGER.info("Core Op:" + coreOperator.getDisplayName() + " fail ");
-        }
-        feedRuntime.setMode(Mode.FAIL);
-        coreOperator.fail();
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        System.out.println("CLOSE CALLED FOR " + this.feedRuntime.getRuntimeId());
-        boolean stalled = inputSideHandler.getMode().equals(Mode.STALL);
-        try {
-            if (!stalled) {
-                System.out.println("SIGNALLING END OF DATA for " + this.feedRuntime.getRuntimeId() + " mode is "
-                        + inputSideHandler.getMode() + " WAITING ON " + coreOperator);
-                inputSideHandler.nextFrame(null); // signal end of data
-                while (!inputSideHandler.isFinished()) {
-                    synchronized (coreOperator) {
-                        coreOperator.wait();
-                    }
-                }
-                System.out.println("ABOUT TO CLOSE OPERATOR  " + coreOperator);
-            }
-            coreOperator.close();
-        } catch (Exception e) {
-            e.printStackTrace();
-            // ignore
-        } finally {
-            if (!stalled) {
-                deregister();
-                System.out.println("DEREGISTERING " + this.feedRuntime.getRuntimeId());
-            } else {
-                System.out.println("NOT DEREGISTERING " + this.feedRuntime.getRuntimeId());
-            }
-            inputSideHandler.close();
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Ending Operator  " + this.feedRuntime.getRuntimeId());
-            }
-        }
-    }
-
-    private void deregister() {
-        if (feedRuntime != null) {
-            feedManager.getFeedConnectionManager().deRegisterFeedRuntime(connectionId,
-                    ((FeedRuntime) feedRuntime).getRuntimeId());
-        }
-    }
-
-}
\ No newline at end of file


[21/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedExceptionHandler.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedExceptionHandler.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedExceptionHandler.java
deleted file mode 100644
index f1728ce..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedExceptionHandler.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.io.DataInputStream;
-import java.nio.ByteBuffer;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.exceptions.FrameDataException;
-import org.apache.asterix.common.feeds.api.IExceptionHandler;
-import org.apache.asterix.common.feeds.api.IFeedManager;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import org.apache.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
-
-public class FeedExceptionHandler implements IExceptionHandler {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedExceptionHandler.class.getName());
-
-    private final IHyracksTaskContext ctx;
-    private final FrameTupleAccessor fta;
-    private final RecordDescriptor recordDesc;
-    private final IFeedManager feedManager;
-    private final FeedConnectionId connectionId;
-
-    public FeedExceptionHandler(IHyracksTaskContext ctx, FrameTupleAccessor fta, RecordDescriptor recordDesc,
-            IFeedManager feedManager, FeedConnectionId connectionId) {
-        this.ctx = ctx;
-        this.fta = fta;
-        this.recordDesc = recordDesc;
-        this.feedManager = feedManager;
-        this.connectionId = connectionId;
-    }
-
-    public ByteBuffer handleException(Exception e, ByteBuffer frame) {
-        try {
-            if (e instanceof FrameDataException) {
-                fta.reset(frame);
-                FrameDataException fde = (FrameDataException) e;
-                int tupleIndex = fde.getTupleIndex();
-
-                // logging 
-                try {
-                    logExceptionCausingTuple(tupleIndex, e);
-                } catch (Exception ex) {
-                    ex.addSuppressed(e);
-                    if (LOGGER.isLoggable(Level.WARNING)) {
-                        LOGGER.warning("Unable to log exception causing tuple due to..." + ex.getMessage());
-                    }
-                }
-                // slicing
-                return FeedFrameUtil.getSlicedFrame(ctx, tupleIndex, fta);
-            } else {
-                return null;
-            }
-        } catch (Exception exception) {
-            exception.printStackTrace();
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Unable to handle exception " + exception.getMessage());
-            }
-            return null;
-        }
-    }
-
-    private void logExceptionCausingTuple(int tupleIndex, Exception e) throws HyracksDataException, AsterixException {
-
-        ByteBufferInputStream bbis = new ByteBufferInputStream();
-        DataInputStream di = new DataInputStream(bbis);
-
-        int start = fta.getTupleStartOffset(tupleIndex) + fta.getFieldSlotsLength();
-        bbis.setByteBuffer(fta.getBuffer(), start);
-
-        Object[] record = new Object[recordDesc.getFieldCount()];
-
-        for (int i = 0; i < record.length; ++i) {
-            Object instance = recordDesc.getFields()[i].deserialize(di);
-            if (i == 0) {
-                String tuple = String.valueOf(instance);
-                feedManager.getFeedMetadataManager().logTuple(connectionId, tuple, e.getMessage(), feedManager);
-            } else {
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning(", " + String.valueOf(instance));
-                }
-            }
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameCache.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameCache.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameCache.java
deleted file mode 100644
index 55a7fb8..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameCache.java
+++ /dev/null
@@ -1,171 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.asterix.common.feeds.FeedConstants.StatisticsConstants;
-import org.apache.hyracks.api.comm.IFrame;
-import org.apache.hyracks.api.comm.IFrameWriter;
-import org.apache.hyracks.api.comm.VSizeFrame;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-
-/**
- * Allows caching of feed frames. This class is used in providing upstream backup.
- * The tuples at the intake layer are held in this cache until these are acked by
- * the storage layer post their persistence. On receiving an ack, appropriate tuples
- * (recordsId < ackedRecordId) are dropped from the cache.
- */
-public class FeedFrameCache extends MessageReceiver<ByteBuffer> {
-
-    /**
-     * Value represents a cache feed frame
-     * Key represents the largest record Id in the frame.
-     * At the intake side, the largest record id corresponds to the last record in the frame
-     **/
-    private final Map<Integer, ByteBuffer> orderedCache;
-    private final FrameTupleAccessor tupleAccessor;
-    private final IFrameWriter frameWriter;
-    private final IHyracksTaskContext ctx;
-
-    public FeedFrameCache(IHyracksTaskContext ctx, FrameTupleAccessor tupleAccessor, IFrameWriter frameWriter) {
-        this.tupleAccessor = tupleAccessor;
-        this.frameWriter = frameWriter;
-        /** A LinkedHashMap ensures entries are retrieved in order of their insertion **/
-        this.orderedCache = new LinkedHashMap<Integer, ByteBuffer>();
-        this.ctx = ctx;
-    }
-
-    @Override
-    public void processMessage(ByteBuffer frame) throws Exception {
-        int lastRecordId = getLastRecordId(frame);
-        ByteBuffer clone = cloneFrame(frame);
-        orderedCache.put(lastRecordId, clone);
-    }
-
-    public void dropTillRecordId(int recordId) {
-        List<Integer> dropRecordIds = new ArrayList<Integer>();
-        for (Entry<Integer, ByteBuffer> entry : orderedCache.entrySet()) {
-            int recId = entry.getKey();
-            if (recId <= recordId) {
-                dropRecordIds.add(recId);
-            } else {
-                break;
-            }
-        }
-        for (Integer r : dropRecordIds) {
-            orderedCache.remove(r);
-        }
-    }
-
-    public void replayRecords(int startingRecordId) throws HyracksDataException {
-        boolean replayPositionReached = false;
-        for (Entry<Integer, ByteBuffer> entry : orderedCache.entrySet()) {
-            // the key increases monotonically
-            int maxRecordIdInFrame = entry.getKey();
-            if (!replayPositionReached) {
-                if (startingRecordId < maxRecordIdInFrame) {
-                    replayFrame(startingRecordId, entry.getValue());
-                    break;
-                } else {
-                    continue;
-                }
-            }
-        }
-    }
-
-    /**
-     * Replay the frame from the tuple (inclusive) with recordId as specified.
-     * 
-     * @param recordId
-     * @param frame
-     * @throws HyracksDataException
-     */
-    private void replayFrame(int recordId, ByteBuffer frame) throws HyracksDataException {
-        tupleAccessor.reset(frame);
-        int nTuples = tupleAccessor.getTupleCount();
-        for (int i = 0; i < nTuples; i++) {
-            int rid = getRecordIdAtTupleIndex(i, frame);
-            if (rid == recordId) {
-                ByteBuffer slicedFrame = splitFrame(i, frame);
-                replayFrame(slicedFrame);
-                break;
-            }
-        }
-    }
-
-    private ByteBuffer splitFrame(int beginTupleIndex, ByteBuffer frame) throws HyracksDataException {
-        IFrame slicedFrame = new VSizeFrame(ctx);
-        FrameTupleAppender appender = new FrameTupleAppender();
-        appender.reset(slicedFrame, true);
-        int totalTuples = tupleAccessor.getTupleCount();
-        for (int ti = beginTupleIndex; ti < totalTuples; ti++) {
-            appender.append(tupleAccessor, ti);
-        }
-        return slicedFrame.getBuffer();
-    }
-
-    /**
-     * Replay the frame
-     * 
-     * @param frame
-     * @throws HyracksDataException
-     */
-    private void replayFrame(ByteBuffer frame) throws HyracksDataException {
-        frameWriter.nextFrame(frame);
-    }
-
-    private int getLastRecordId(ByteBuffer frame) {
-        tupleAccessor.reset(frame);
-        int nTuples = tupleAccessor.getTupleCount();
-        return getRecordIdAtTupleIndex(nTuples - 1, frame);
-    }
-
-    private int getRecordIdAtTupleIndex(int tupleIndex, ByteBuffer frame) {
-        tupleAccessor.reset(frame);
-        int recordStart = tupleAccessor.getTupleStartOffset(tupleIndex) + tupleAccessor.getFieldSlotsLength();
-        int openPartOffset = frame.getInt(recordStart + 6);
-        int numOpenFields = frame.getInt(recordStart + openPartOffset);
-        int recordIdOffset = frame.getInt(recordStart + openPartOffset + 4 + numOpenFields * 8
-                + StatisticsConstants.INTAKE_TUPLEID.length() + 2 + 1);
-        int lastRecordId = frame.getInt(recordStart + recordIdOffset);
-        return lastRecordId;
-    }
-
-    private ByteBuffer cloneFrame(ByteBuffer frame) {
-        ByteBuffer clone = ByteBuffer.allocate(frame.capacity());
-        System.arraycopy(frame.array(), 0, clone.array(), 0, frame.limit());
-        return clone;
-    }
-
-    public void replayAll() throws HyracksDataException {
-        for (Entry<Integer, ByteBuffer> entry : orderedCache.entrySet()) {
-            ByteBuffer frame = entry.getValue();
-            frameWriter.nextFrame(frame);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameCollector.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameCollector.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameCollector.java
deleted file mode 100644
index a8c0e8f..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameCollector.java
+++ /dev/null
@@ -1,158 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.nio.ByteBuffer;
-import java.util.logging.Level;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.feeds.api.IMessageReceiver;
-import org.apache.hyracks.api.comm.IFrameWriter;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-public class FeedFrameCollector extends MessageReceiver<DataBucket> implements IMessageReceiver<DataBucket> {
-
-    private final FeedConnectionId connectionId;
-    private final FrameDistributor frameDistributor;
-    private FeedPolicyAccessor fpa;
-    private IFrameWriter frameWriter;
-    private State state;
-
-    public enum State {
-        ACTIVE,
-        FINISHED,
-        TRANSITION,
-        HANDOVER
-    }
-
-    public FeedFrameCollector(FrameDistributor frameDistributor, FeedPolicyAccessor feedPolicyAccessor,
-            IFrameWriter frameWriter, FeedConnectionId connectionId) {
-        super();
-        this.frameDistributor = frameDistributor;
-        this.fpa = feedPolicyAccessor;
-        this.connectionId = connectionId;
-        this.frameWriter = frameWriter;
-        this.state = State.ACTIVE;
-    }
-
-    @Override
-    public void processMessage(DataBucket bucket) throws Exception {
-        try {
-            ByteBuffer frame = bucket.getContent();
-            switch (bucket.getContentType()) {
-                case DATA:
-                    frameWriter.nextFrame(frame);
-                    break;
-                case EOD:
-                    closeCollector();
-                    break;
-                case EOSD:
-                    throw new AsterixException("Received data bucket with content of type " + bucket.getContentType());
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Unable to process data bucket " + bucket + ", encountered exception " + e.getMessage());
-            }
-        } finally {
-            bucket.doneReading();
-        }
-    }
-
-    public void closeCollector() {
-        if (state.equals(State.TRANSITION)) {
-            super.close(true);
-            setState(State.ACTIVE);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info(this + " is now " + State.ACTIVE + " mode, processing frames synchronously");
-            }
-        } else {
-            flushPendingMessages();
-            setState(State.FINISHED);
-            synchronized (frameDistributor.getRegisteredCollectors()) {
-                frameDistributor.getRegisteredCollectors().notifyAll();
-            }
-            disconnect();
-        }
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Closed collector " + this);
-        }
-    }
-
-    public synchronized void disconnect() {
-        setState(State.FINISHED);
-    }
-
-    public synchronized void nextFrame(ByteBuffer frame) throws HyracksDataException {
-        frameWriter.nextFrame(frame);
-    }
-
-    public FeedPolicyAccessor getFeedPolicyAccessor() {
-        return fpa;
-    }
-
-    public synchronized State getState() {
-        return state;
-    }
-
-    public synchronized void setState(State state) {
-        this.state = state;
-        switch (state) {
-            case FINISHED:
-            case HANDOVER:
-                notifyAll();
-                break;
-            default:
-                break;
-        }
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Frame Collector " + this.frameDistributor.getFeedRuntimeType() + " switched to " + state);
-        }
-    }
-
-    public IFrameWriter getFrameWriter() {
-        return frameWriter;
-    }
-
-    public void setFrameWriter(IFrameWriter frameWriter) {
-        this.frameWriter = frameWriter;
-    }
-
-    @Override
-    public String toString() {
-        return "FrameCollector " + connectionId + "," + state + "]";
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (this == o) {
-            return true;
-        }
-        if (o instanceof FeedFrameCollector) {
-            return connectionId.equals(((FeedFrameCollector) o).connectionId);
-        }
-        return false;
-    }
-
-    @Override
-    public int hashCode() {
-        return connectionId.toString().hashCode();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameDiscarder.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameDiscarder.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameDiscarder.java
deleted file mode 100644
index 8609366..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameDiscarder.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-public class FeedFrameDiscarder {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedFrameSpiller.class.getName());
-
-    private final FeedRuntimeInputHandler inputHandler;
-    private final FeedConnectionId connectionId;
-    private final FeedRuntimeId runtimeId;
-    private final FeedPolicyAccessor policyAccessor;
-    private final float maxFractionDiscard;
-    private int nDiscarded;
-
-    public FeedFrameDiscarder(FeedConnectionId connectionId, FeedRuntimeId runtimeId, FeedPolicyAccessor policyAccessor,
-            FeedRuntimeInputHandler inputHandler) throws IOException {
-        this.connectionId = connectionId;
-        this.runtimeId = runtimeId;
-        this.policyAccessor = policyAccessor;
-        this.inputHandler = inputHandler;
-        this.maxFractionDiscard = policyAccessor.getMaxFractionDiscard();
-    }
-
-    public boolean processMessage(ByteBuffer message) {
-        if (policyAccessor.getMaxFractionDiscard() != 0) {
-            long nProcessed = inputHandler.getProcessed();
-            long discardLimit = (long) (nProcessed * maxFractionDiscard);
-            if (nDiscarded >= discardLimit) {
-                return false;
-            }
-            nDiscarded++;
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Discarded frame by " + connectionId + " (" + runtimeId + ")" + " count so far  ("
-                        + nDiscarded + ") Limit [" + discardLimit + "]");
-            }
-            return true;
-        }
-        return false;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameHandlers.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameHandlers.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameHandlers.java
deleted file mode 100644
index c4a2ce0..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameHandlers.java
+++ /dev/null
@@ -1,303 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.Collection;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.feeds.api.IFeedFrameHandler;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-
-public class FeedFrameHandlers {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedFrameHandlers.class.getName());
-
-    public enum RoutingMode {
-        IN_MEMORY_ROUTE,
-        SPILL_TO_DISK,
-        DISCARD
-    }
-
-    public static IFeedFrameHandler getFeedFrameHandler(FrameDistributor distributor, FeedId feedId,
-            RoutingMode routingMode, FeedRuntimeType runtimeType, int partition, int frameSize) throws IOException {
-        IFeedFrameHandler handler = null;
-        switch (routingMode) {
-            case IN_MEMORY_ROUTE:
-                handler = new InMemoryRouter(distributor.getRegisteredReaders().values(), runtimeType, partition);
-                break;
-            case SPILL_TO_DISK:
-                handler = new DiskSpiller(distributor, feedId, runtimeType, partition, frameSize);
-                break;
-            case DISCARD:
-                handler = new DiscardRouter(distributor, feedId, runtimeType, partition);
-                break;
-            default:
-                throw new IllegalArgumentException("Invalid routing mode" + routingMode);
-        }
-        return handler;
-    }
-
-    public static class DiscardRouter implements IFeedFrameHandler {
-
-        private final FeedId feedId;
-        private int nDiscarded;
-        private final FeedRuntimeType runtimeType;
-        private final int partition;
-        private final FrameDistributor distributor;
-
-        public DiscardRouter(FrameDistributor distributor, FeedId feedId, FeedRuntimeType runtimeType, int partition)
-                throws HyracksDataException {
-            this.distributor = distributor;
-            this.feedId = feedId;
-            this.nDiscarded = 0;
-            this.runtimeType = runtimeType;
-            this.partition = partition;
-        }
-
-        @Override
-        public void handleFrame(ByteBuffer frame) throws HyracksDataException {
-            FrameTupleAccessor fta = distributor.getFta();
-            fta.reset(frame);
-            int nTuples = fta.getTupleCount();
-            nDiscarded += nTuples;
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Discarded additional [" + runtimeType + "]" + "(" + partition + ")" + "  " + nTuples);
-            }
-        }
-
-        @Override
-        public void handleDataBucket(DataBucket bucket) {
-            nDiscarded++;
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Discard Count" + nDiscarded);
-            }
-        }
-
-        @Override
-        public void close() {
-            // do nothing, no resource to relinquish
-        }
-
-        @Override
-        public Iterator<ByteBuffer> replayData() throws HyracksDataException {
-            throw new IllegalStateException("Invalid operation");
-        }
-
-        @Override
-        public String toString() {
-            return "DiscardRouter" + "[" + feedId + "]" + "(" + nDiscarded + ")";
-        }
-
-        @Override
-        public String getSummary() {
-            return new String("Number of discarded frames (since last reset)" + " feedId " + "[" + feedId + "]" + "("
-                    + nDiscarded + ")");
-        }
-
-    }
-
-    public static class InMemoryRouter implements IFeedFrameHandler {
-
-        private final Collection<FeedFrameCollector> frameCollectors;
-
-        public InMemoryRouter(Collection<FeedFrameCollector> frameCollectors, FeedRuntimeType runtimeType,
-                int partition) {
-            this.frameCollectors = frameCollectors;
-        }
-
-        @Override
-        public void handleFrame(ByteBuffer frame) throws HyracksDataException {
-            throw new IllegalStateException("Operation not supported");
-        }
-
-        @Override
-        public void handleDataBucket(DataBucket bucket) {
-            for (FeedFrameCollector collector : frameCollectors) {
-                collector.sendMessage(bucket); // asynchronous call
-            }
-        }
-
-        @Override
-        public void close() {
-            // do nothing
-        }
-
-        @Override
-        public Iterator<ByteBuffer> replayData() throws HyracksDataException {
-            throw new IllegalStateException("Operation not supported");
-        }
-
-        @Override
-        public String getSummary() {
-            return "InMemoryRouter Summary";
-        }
-    }
-
-    public static class DiskSpiller implements IFeedFrameHandler {
-
-        private FrameSpiller<ByteBuffer> receiver;
-        private Iterator<ByteBuffer> iterator;
-
-        public DiskSpiller(FrameDistributor distributor, FeedId feedId, FeedRuntimeType runtimeType, int partition,
-                int frameSize) throws IOException {
-            receiver = new FrameSpiller<ByteBuffer>(distributor, feedId, frameSize);
-        }
-
-        @Override
-        public void handleFrame(ByteBuffer frame) throws HyracksDataException {
-            receiver.sendMessage(frame);
-        }
-
-        private static class FrameSpiller<T> extends MessageReceiver<ByteBuffer> {
-
-            private final FeedId feedId;
-            private BufferedOutputStream bos;
-            private final ByteBuffer reusableLengthBuffer;
-            private final ByteBuffer reusableDataBuffer;
-            private long offset;
-            private File file;
-            private final FrameDistributor frameDistributor;
-            private boolean fileCreated = false;
-
-            public FrameSpiller(FrameDistributor distributor, FeedId feedId, int frameSize) throws IOException {
-                this.feedId = feedId;
-                this.frameDistributor = distributor;
-                reusableLengthBuffer = ByteBuffer.allocate(4);
-                reusableDataBuffer = ByteBuffer.allocate(frameSize);
-                this.offset = 0;
-            }
-
-            @Override
-            public void processMessage(ByteBuffer message) throws Exception {
-                if (!fileCreated) {
-                    createFile();
-                    fileCreated = true;
-                }
-                reusableLengthBuffer.flip();
-                reusableLengthBuffer.putInt(message.array().length);
-                bos.write(reusableLengthBuffer.array());
-                bos.write(message.array());
-            }
-
-            private void createFile() throws IOException {
-                Date date = new Date();
-                String dateSuffix = date.toString().replace(' ', '_');
-                String fileName = feedId.toString() + "_" + frameDistributor.getFeedRuntimeType() + "_"
-                        + frameDistributor.getPartition() + "_" + dateSuffix;
-
-                file = new File(fileName);
-                if (!file.exists()) {
-                    boolean success = file.createNewFile();
-                    if (!success) {
-                        throw new IOException("Unable to create spill file for feed " + feedId);
-                    }
-                }
-                bos = new BufferedOutputStream(new FileOutputStream(file));
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Created Spill File for feed " + feedId);
-                }
-            }
-
-            @SuppressWarnings("resource")
-            public Iterator<ByteBuffer> replayData() throws Exception {
-                final BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file));
-                bis.skip(offset);
-                return new Iterator<ByteBuffer>() {
-
-                    @Override
-                    public boolean hasNext() {
-                        boolean more = false;
-                        try {
-                            more = bis.available() > 0;
-                            if (!more) {
-                                bis.close();
-                            }
-                        } catch (IOException e) {
-                            e.printStackTrace();
-                        }
-
-                        return more;
-                    }
-
-                    @Override
-                    public ByteBuffer next() {
-                        reusableLengthBuffer.flip();
-                        try {
-                            bis.read(reusableLengthBuffer.array());
-                            reusableLengthBuffer.flip();
-                            int frameSize = reusableLengthBuffer.getInt();
-                            reusableDataBuffer.flip();
-                            bis.read(reusableDataBuffer.array(), 0, frameSize);
-                            offset += 4 + frameSize;
-                        } catch (IOException e) {
-                            e.printStackTrace();
-                        }
-                        return reusableDataBuffer;
-                    }
-
-                    @Override
-                    public void remove() {
-                    }
-
-                };
-            }
-
-        }
-
-        @Override
-        public void handleDataBucket(DataBucket bucket) {
-            throw new IllegalStateException("Operation not supported");
-        }
-
-        @Override
-        public void close() {
-            receiver.close(true);
-        }
-
-        @Override
-        public Iterator<ByteBuffer> replayData() throws HyracksDataException {
-            try {
-                iterator = receiver.replayData();
-            } catch (Exception e) {
-                throw new HyracksDataException(e);
-            }
-            return iterator;
-        }
-
-        //TODO: Form a summary that includes stats related to what has been spilled to disk
-        @Override
-        public String getSummary() {
-            return "Disk Spiller Summary";
-        }
-
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameSpiller.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameSpiller.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameSpiller.java
deleted file mode 100644
index 86187b8..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameSpiller.java
+++ /dev/null
@@ -1,176 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.hyracks.api.comm.IFrame;
-import org.apache.hyracks.api.comm.VSizeFrame;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-
-public class FeedFrameSpiller {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedFrameSpiller.class.getName());
-
-    private final IHyracksTaskContext ctx;
-    private final FeedConnectionId connectionId;
-    private final FeedRuntimeId runtimeId;
-    private final FeedPolicyAccessor policyAccessor;
-    private BufferedOutputStream bos;
-    private File file;
-    private boolean fileCreated = false;
-    private long bytesWritten = 0;
-    private int spilledFrameCount = 0;
-
-    public FeedFrameSpiller(IHyracksTaskContext ctx, FeedConnectionId connectionId, FeedRuntimeId runtimeId,
-            FeedPolicyAccessor policyAccessor) throws IOException {
-        this.ctx = ctx;
-        this.connectionId = connectionId;
-        this.runtimeId = runtimeId;
-        this.policyAccessor = policyAccessor;
-    }
-
-    public boolean processMessage(ByteBuffer message) throws Exception {
-        if (!fileCreated) {
-            createFile();
-            fileCreated = true;
-        }
-        long maxAllowed = policyAccessor.getMaxSpillOnDisk();
-        if (maxAllowed != FeedPolicyAccessor.NO_LIMIT && bytesWritten + message.array().length > maxAllowed) {
-            return false;
-        } else {
-            bos.write(message.array());
-            bytesWritten += message.array().length;
-            spilledFrameCount++;
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Spilled frame by " + runtimeId + " spill count " + spilledFrameCount);
-            }
-            return true;
-        }
-    }
-
-    private void createFile() throws IOException {
-        Date date = new Date();
-        String dateSuffix = date.toString().replace(' ', '_');
-        String fileName = connectionId.getFeedId() + "_" + connectionId.getDatasetName() + "_"
-                + runtimeId.getFeedRuntimeType() + "_" + runtimeId.getPartition() + "_" + dateSuffix;
-
-        file = new File(fileName);
-        if (!file.exists()) {
-            boolean success = file.createNewFile();
-            if (!success) {
-                throw new IOException("Unable to create spill file " + fileName + " for feed " + runtimeId);
-            } else {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Created spill file " + file.getAbsolutePath());
-                }
-            }
-        }
-        bos = new BufferedOutputStream(new FileOutputStream(file));
-
-    }
-
-    public Iterator<ByteBuffer> replayData() throws Exception {
-        bos.flush();
-        return new FrameIterator(ctx, file.getName());
-    }
-
-    private static class FrameIterator implements Iterator<ByteBuffer> {
-
-        private final BufferedInputStream bis;
-        private final IHyracksTaskContext ctx;
-        private int readFrameCount = 0;
-
-        public FrameIterator(IHyracksTaskContext ctx, String filename) throws FileNotFoundException {
-            bis = new BufferedInputStream(new FileInputStream(new File(filename)));
-            this.ctx = ctx;
-        }
-
-        @Override
-        public boolean hasNext() {
-            boolean more = false;
-            try {
-                more = bis.available() > 0;
-                if (!more) {
-                    bis.close();
-                }
-            } catch (IOException e) {
-                e.printStackTrace();
-            }
-
-            return more;
-        }
-
-        @Override
-        public ByteBuffer next() {
-            IFrame frame  = null;
-            try {
-                frame  = new VSizeFrame(ctx);
-                Arrays.fill(frame.getBuffer().array(), (byte) 0);
-                bis.read(frame.getBuffer().array(), 0, frame.getFrameSize());
-                readFrameCount++;
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Read spill frome " + readFrameCount);
-                }
-            } catch (IOException e) {
-                e.printStackTrace();
-            }
-            return frame.getBuffer();
-        }
-
-        @Override
-        public void remove() {
-        }
-
-    }
-
-    public void reset() {
-        bytesWritten = 0;
-        //  file.delete();
-        fileCreated = false;
-        bos = null;
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Resetted the FrameSpiller!");
-        }
-    }
-
-    public void close() {
-        if (bos != null) {
-            try {
-                bos.flush();
-                bos.close();
-            } catch (IOException e) {
-                e.printStackTrace();
-            }
-        }
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameTupleAccessor.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameTupleAccessor.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameTupleAccessor.java
deleted file mode 100644
index 9645bf9..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameTupleAccessor.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.nio.ByteBuffer;
-
-import org.apache.asterix.common.feeds.FeedConstants.StatisticsConstants;
-import org.apache.hyracks.api.comm.IFrameTupleAccessor;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-
-public class FeedFrameTupleAccessor implements IFrameTupleAccessor {
-
-    private final FrameTupleAccessor frameAccessor;
-    private final int numOpenFields;
-
-    public FeedFrameTupleAccessor(FrameTupleAccessor frameAccessor) {
-        this.frameAccessor = frameAccessor;
-        int firstRecordStart = frameAccessor.getTupleStartOffset(0) + frameAccessor.getFieldSlotsLength();
-        int openPartOffsetOrig = frameAccessor.getBuffer().getInt(firstRecordStart + 6);
-        numOpenFields = frameAccessor.getBuffer().getInt(firstRecordStart + openPartOffsetOrig);
-    }
-
-    public int getFeedIntakePartition(int tupleIndex) {
-        ByteBuffer buffer = frameAccessor.getBuffer();
-        int recordStart = frameAccessor.getTupleStartOffset(tupleIndex) + frameAccessor.getFieldSlotsLength();
-        int openPartOffsetOrig = buffer.getInt(recordStart + 6);
-        int partitionOffset = openPartOffsetOrig + 4 + 8 * numOpenFields
-                + StatisticsConstants.INTAKE_PARTITION.length() + 2 + 1;
-        return buffer.getInt(recordStart + partitionOffset);
-    }
-    
-    
-
-    @Override
-    public int getFieldCount() {
-        return frameAccessor.getFieldCount();
-    }
-
-    @Override
-    public int getFieldSlotsLength() {
-        return frameAccessor.getFieldSlotsLength();
-    }
-
-    @Override
-    public int getFieldEndOffset(int tupleIndex, int fIdx) {
-        return frameAccessor.getFieldEndOffset(tupleIndex, fIdx);
-    }
-
-    @Override
-    public int getFieldStartOffset(int tupleIndex, int fIdx) {
-        return frameAccessor.getFieldStartOffset(tupleIndex, fIdx);
-    }
-
-    @Override
-    public int getFieldLength(int tupleIndex, int fIdx) {
-        return frameAccessor.getFieldLength(tupleIndex, fIdx);
-    }
-
-    @Override
-    public int getTupleEndOffset(int tupleIndex) {
-        return frameAccessor.getTupleEndOffset(tupleIndex);
-    }
-
-    @Override
-    public int getTupleStartOffset(int tupleIndex) {
-        return frameAccessor.getTupleStartOffset(tupleIndex);
-    }
-
-    @Override
-    public int getTupleCount() {
-        return frameAccessor.getTupleCount();
-    }
-
-    @Override
-    public ByteBuffer getBuffer() {
-        return frameAccessor.getBuffer();
-    }
-
-    @Override
-    public void reset(ByteBuffer buffer) {
-        frameAccessor.reset(buffer);
-    }
-
-    @Override
-    public int getAbsoluteFieldStartOffset(int tupleIndex, int fIdx) {
-        return frameAccessor.getAbsoluteFieldStartOffset(tupleIndex, fIdx);
-    }
-
-    @Override
-    public int getTupleLength(int tupleIndex) {
-        return frameAccessor.getTupleLength(tupleIndex);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameUtil.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameUtil.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameUtil.java
deleted file mode 100644
index baf7a7c..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedFrameUtil.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.nio.ByteBuffer;
-import java.util.BitSet;
-import java.util.Random;
-
-import org.apache.hyracks.api.comm.IFrame;
-import org.apache.hyracks.api.comm.VSizeFrame;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-
-public class FeedFrameUtil {
-
-    public static ByteBuffer getSlicedFrame(IHyracksTaskContext ctx, int tupleIndex, FrameTupleAccessor fta) throws HyracksDataException {
-        FrameTupleAppender appender = new FrameTupleAppender();
-        IFrame slicedFrame = new VSizeFrame(ctx);
-        appender.reset(slicedFrame, true);
-        int startTupleIndex = tupleIndex + 1;
-        int totalTuples = fta.getTupleCount();
-        for (int ti = startTupleIndex; ti < totalTuples; ti++) {
-            appender.append(fta, ti);
-        }
-        return slicedFrame.getBuffer();
-    }
-
-    public static ByteBuffer getSampledFrame(IHyracksTaskContext ctx, FrameTupleAccessor fta, int sampleSize) throws HyracksDataException {
-        NChooseKIterator it = new NChooseKIterator(fta.getTupleCount(), sampleSize);
-        FrameTupleAppender appender = new FrameTupleAppender();
-        IFrame sampledFrame = new VSizeFrame(ctx);
-        appender.reset(sampledFrame, true);
-        int nextTupleIndex = 0;
-        while (it.hasNext()) {
-            nextTupleIndex = it.next();
-            appender.append(fta, nextTupleIndex);
-        }
-        return sampledFrame.getBuffer();
-    }
-    
-  
-
-    private static class NChooseKIterator {
-
-        private final int n;
-        private final int k;
-        private final BitSet bSet;
-        private final Random random;
-
-        private int traversed = 0;
-
-        public NChooseKIterator(int n, int k) {
-            this.n = n;
-            this.k = k;
-            this.bSet = new BitSet(n);
-            bSet.set(0, n - 1);
-            this.random = new Random();
-        }
-
-        public boolean hasNext() {
-            return traversed < k;
-        }
-
-        public int next() {
-            if (hasNext()) {
-                traversed++;
-                int startOffset = random.nextInt(n);
-                int pos = -1;
-                while (pos < 0) {
-                    pos = bSet.nextSetBit(startOffset);
-                    if (pos < 0) {
-                        startOffset = 0;
-                    }
-                }
-                bSet.clear(pos);
-                return pos;
-            } else {
-                return -1;
-            }
-        }
-
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedId.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedId.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedId.java
deleted file mode 100644
index 81b7c4e..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedId.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.io.Serializable;
-
-/**
- * A unique identifier for a data feed.
- */
-public class FeedId implements Serializable {
-
-    private static final long serialVersionUID = 1L;
-
-    private final String dataverse;
-    private final String feedName;
-
-    public FeedId(String dataverse, String feedName) {
-        this.dataverse = dataverse;
-        this.feedName = feedName;
-    }
-
-    public String getDataverse() {
-        return dataverse;
-    }
-
-    public String getFeedName() {
-        return feedName;
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (o == null || !(o instanceof FeedId)) {
-            return false;
-        }
-        if (this == o || ((FeedId) o).getFeedName().equals(feedName) && ((FeedId) o).getDataverse().equals(dataverse)) {
-            return true;
-        }
-        return false;
-    }
-
-    @Override
-    public int hashCode() {
-        return toString().hashCode();
-    }
-
-    @Override
-    public String toString() {
-        return dataverse + "." + feedName;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedIntakeInfo.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedIntakeInfo.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedIntakeInfo.java
deleted file mode 100644
index 0382b23..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedIntakeInfo.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.util.List;
-
-import org.apache.asterix.common.feeds.api.IFeedJoint;
-import org.apache.hyracks.api.job.JobId;
-import org.apache.hyracks.api.job.JobSpecification;
-
-public class FeedIntakeInfo extends FeedJobInfo {
-
-    private final FeedId feedId;
-    private final IFeedJoint intakeFeedJoint;
-    private final JobSpecification spec;
-    private List<String> intakeLocation;
-
-    public FeedIntakeInfo(JobId jobId, FeedJobState state, JobType jobType, FeedId feedId, IFeedJoint intakeFeedJoint,
-            JobSpecification spec) {
-        super(jobId, state, FeedJobInfo.JobType.INTAKE, spec);
-        this.feedId = feedId;
-        this.intakeFeedJoint = intakeFeedJoint;
-        this.spec = spec;
-    }
-
-    public FeedId getFeedId() {
-        return feedId;
-    }
-
-    public IFeedJoint getIntakeFeedJoint() {
-        return intakeFeedJoint;
-    }
-
-    public JobSpecification getSpec() {
-        return spec;
-    }
-
-    public List<String> getIntakeLocation() {
-        return intakeLocation;
-    }
-
-    public void setIntakeLocation(List<String> intakeLocation) {
-        this.intakeLocation = intakeLocation;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedJobInfo.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedJobInfo.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedJobInfo.java
deleted file mode 100644
index 2db9955..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedJobInfo.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.hyracks.api.job.JobId;
-import org.apache.hyracks.api.job.JobSpecification;
-
-public class FeedJobInfo {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedJobInfo.class.getName());
-
-    public enum JobType {
-        INTAKE,
-        FEED_CONNECT
-    }
-
-    public enum FeedJobState {
-        CREATED,
-        ACTIVE,
-        UNDER_RECOVERY,
-        ENDED
-    }
-
-    protected final JobId jobId;
-    protected final JobType jobType;
-    protected FeedJobState state;
-    protected JobSpecification spec;
-
-    public FeedJobInfo(JobId jobId, FeedJobState state, JobType jobType, JobSpecification spec) {
-        this.jobId = jobId;
-        this.state = state;
-        this.jobType = jobType;
-        this.spec = spec;
-    }
-
-    public JobId getJobId() {
-        return jobId;
-    }
-
-    public FeedJobState getState() {
-        return state;
-    }
-
-    public void setState(FeedJobState state) {
-        this.state = state;
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info(this + " is in " + state + " state.");
-        }
-    }
-
-    public JobType getJobType() {
-        return jobType;
-    }
-
-    public JobSpecification getSpec() {
-        return spec;
-    }
-
-    public void setSpec(JobSpecification spec) {
-        this.spec = spec;
-    }
-
-    public String toString() {
-        return jobId + " [" + jobType + "]";
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedJointKey.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedJointKey.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedJointKey.java
deleted file mode 100644
index 8005967..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedJointKey.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.util.List;
-
-import org.apache.commons.lang3.StringUtils;
-
-/**
- * Represents a unique identifier for a Feed Joint. A Feed joint is a logical entity located
- * along a feed ingestion pipeline at a point where the tuples moving as part of data flow
- * constitute the feed. The feed joint acts as a network tap and allows the flowing data to be
- * routed to multiple paths.
- */
-public class FeedJointKey {
-
-    private final FeedId primaryFeedId;
-    private final List<String> appliedFunctions;
-    private final String stringRep;
-
-    public FeedJointKey(FeedId feedId, List<String> appliedFunctions) {
-        this.primaryFeedId = feedId;
-        this.appliedFunctions = appliedFunctions;
-        StringBuilder builder = new StringBuilder();
-        builder.append(feedId);
-        builder.append(":");
-        builder.append(StringUtils.join(appliedFunctions, ':'));
-        stringRep = builder.toString();
-    }
-
-    public FeedId getFeedId() {
-        return primaryFeedId;
-    }
-
-    public List<String> getAppliedFunctions() {
-        return appliedFunctions;
-    }
-
-    public String getStringRep() {
-        return stringRep;
-    }
-
-    @Override
-    public final String toString() {
-        return stringRep;
-    }
-
-    @Override
-    public int hashCode() {
-        return stringRep.hashCode();
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (this == o)
-            return true;
-        if (o == null || !(o instanceof FeedJointKey)) {
-            return false;
-        }
-        return stringRep.equals(((FeedJointKey) o).stringRep);
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedMemoryManager.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedMemoryManager.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedMemoryManager.java
deleted file mode 100644
index c39d82a..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedMemoryManager.java
+++ /dev/null
@@ -1,112 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.config.AsterixFeedProperties;
-import org.apache.asterix.common.feeds.api.IFeedMemoryComponent;
-import org.apache.asterix.common.feeds.api.IFeedMemoryComponent.Type;
-import org.apache.asterix.common.feeds.api.IFeedMemoryManager;
-
-public class FeedMemoryManager implements IFeedMemoryManager {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedMemoryManager.class.getName());
-    private static final int ALLOCATION_INCREMENT = 10;
-
-    private final AtomicInteger componentId = new AtomicInteger(0);
-    private final String nodeId;
-    private final int budget;
-    private final int frameSize;
-
-    private int committed;
-
-    public FeedMemoryManager(String nodeId, AsterixFeedProperties feedProperties, int frameSize) {
-        this.nodeId = nodeId;
-        this.frameSize = frameSize;
-        budget = (int) feedProperties.getMemoryComponentGlobalBudget() / frameSize;
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Feed Memory budget " + budget + " frames (frame size=" + frameSize + ")");
-        }
-    }
-
-    @Override
-    public synchronized IFeedMemoryComponent getMemoryComponent(Type type) {
-        IFeedMemoryComponent memoryComponent = null;
-        boolean valid = false;
-        switch (type) {
-            case COLLECTION:
-                valid = committed + START_COLLECTION_SIZE <= budget;
-                if (valid) {
-                    memoryComponent = new FrameCollection(componentId.incrementAndGet(), this, START_COLLECTION_SIZE);
-                }
-                break;
-            case POOL:
-                valid = committed + START_POOL_SIZE <= budget;
-                if (valid) {
-                    memoryComponent = new DataBucketPool(componentId.incrementAndGet(), this, START_POOL_SIZE,
-                            frameSize);
-                }
-                committed += START_POOL_SIZE;
-                break;
-        }
-        if (!valid) {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Unable to allocate memory component of type" + type);
-            }
-        }
-        return valid ? memoryComponent : null;
-    }
-
-    @Override
-    public synchronized boolean expandMemoryComponent(IFeedMemoryComponent memoryComponent) {
-        if (committed + ALLOCATION_INCREMENT > budget) {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Memory budget " + budget + " is exhausted. Space left: " + (budget - committed)
-                        + " frames.");
-            }
-            return false;
-        } else {
-            memoryComponent.expand(ALLOCATION_INCREMENT);
-            committed += ALLOCATION_INCREMENT;
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Expanded memory component " + memoryComponent + " by " + ALLOCATION_INCREMENT + " " + this);
-            }
-            return true;
-        }
-    }
-
-    @Override
-    public synchronized void releaseMemoryComponent(IFeedMemoryComponent memoryComponent) {
-        int delta = memoryComponent.getTotalAllocation();
-        committed -= delta;
-        memoryComponent.reset();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Reset " + memoryComponent + " and reclaimed " + delta + " frames " + this);
-        }
-    }
-
-    @Override
-    public String toString() {
-        return "FeedMemoryManager  [" + nodeId + "]" + "(" + committed + "/" + budget + ")";
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedMessageService.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedMessageService.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedMessageService.java
deleted file mode 100644
index 9582602..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedMessageService.java
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.net.Socket;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import org.apache.asterix.common.config.AsterixFeedProperties;
-import org.apache.asterix.common.feeds.api.IFeedMessage;
-import org.apache.asterix.common.feeds.api.IFeedMessageService;
-
-/**
- * Sends feed report messages on behalf of an operator instance
- * to the SuperFeedManager associated with the feed.
- */
-public class FeedMessageService implements IFeedMessageService {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedMessageService.class.getName());
-
-    private final LinkedBlockingQueue<String> inbox;
-    private final FeedMessageHandler mesgHandler;
-    private final String nodeId;
-    private ExecutorService executor;
-
-    public FeedMessageService(AsterixFeedProperties feedProperties, String nodeId, String ccClusterIp) {
-        this.inbox = new LinkedBlockingQueue<String>();
-        this.mesgHandler = new FeedMessageHandler(inbox, ccClusterIp, feedProperties.getFeedCentralManagerPort());
-        this.nodeId = nodeId;
-        this.executor = Executors.newSingleThreadExecutor();
-    }
-
-    public void start() throws Exception {
-
-        executor.execute(mesgHandler);
-    }
-
-    public void stop() {
-        synchronized (mesgHandler.getLock()) {
-            executor.shutdownNow();
-        }
-        mesgHandler.stop();
-    }
-
-    @Override
-    public void sendMessage(IFeedMessage message) {
-        try {
-            JSONObject obj = message.toJSON();
-            obj.put(FeedConstants.MessageConstants.NODE_ID, nodeId);
-            obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, message.getMessageType().name());
-            inbox.add(obj.toString());
-        } catch (JSONException jse) {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("JSON exception in parsing message " + message + " exception [" + jse.getMessage() + "]");
-            }
-        }
-    }
-
-    private static class FeedMessageHandler implements Runnable {
-
-        private final LinkedBlockingQueue<String> inbox;
-        private final String host;
-        private final int port;
-        private final Object lock;
-
-        private Socket cfmSocket;
-
-        private static final byte[] EOL = "\n".getBytes();
-
-        public FeedMessageHandler(LinkedBlockingQueue<String> inbox, String host, int port) {
-            this.inbox = inbox;
-            this.host = host;
-            this.port = port;
-            this.lock = new Object();
-        }
-
-        public void run() {
-            try {
-                cfmSocket = new Socket(host, port);
-                if (cfmSocket != null) {
-                    while (true) {
-                        String message = inbox.take();
-                        synchronized (lock) { // lock prevents message handler from sending incomplete message midst shutdown attempt
-                            cfmSocket.getOutputStream().write(message.getBytes());
-                            cfmSocket.getOutputStream().write(EOL);
-                        }
-                    }
-                } else {
-                    if (LOGGER.isLoggable(Level.WARNING)) {
-                        LOGGER.warning("Unable to start feed message service");
-                    }
-                }
-            } catch (Exception e) {
-                e.printStackTrace();
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("Exception in handling incoming feed messages" + e.getMessage());
-                }
-            } finally {
-                stop();
-            }
-
-        }
-
-        public void stop() {
-            if (cfmSocket != null) {
-                try {
-                    cfmSocket.close();
-                } catch (Exception e) {
-                    if (LOGGER.isLoggable(Level.WARNING)) {
-                        LOGGER.warning("Exception in closing socket " + e.getMessage());
-                    }
-                }
-            }
-        }
-
-        public Object getLock() {
-            return lock;
-        }
-
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedMetricCollector.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedMetricCollector.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedMetricCollector.java
deleted file mode 100644
index 7b76692..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedMetricCollector.java
+++ /dev/null
@@ -1,187 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.feeds.api.IFeedMetricCollector;
-
-public class FeedMetricCollector implements IFeedMetricCollector {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedMetricCollector.class.getName());
-
-    private static final int UNKNOWN = -1;
-
-    private final AtomicInteger globalSenderId = new AtomicInteger(1);
-    private final Map<Integer, Sender> senders = new HashMap<Integer, Sender>();
-    private final Map<Integer, Series> statHistory = new HashMap<Integer, Series>();
-    private final Map<String, Sender> sendersByName = new HashMap<String, Sender>();
-
-    public FeedMetricCollector(String nodeId) {
-    }
-
-    @Override
-    public synchronized int createReportSender(FeedConnectionId connectionId, FeedRuntimeId runtimeId,
-            ValueType valueType, MetricType metricType) {
-        Sender sender = new Sender(globalSenderId.getAndIncrement(), connectionId, runtimeId, valueType, metricType);
-        senders.put(sender.senderId, sender);
-        sendersByName.put(sender.getDisplayName(), sender);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Sender id " + sender.getSenderId() + " created for " + sender);
-        }
-        return sender.senderId;
-    }
-
-    @Override
-    public void removeReportSender(int senderId) {
-        Sender sender = senders.get(senderId);
-        if (sender != null) {
-            statHistory.remove(senderId);
-            senders.remove(senderId);
-        } else {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Unable to remove sender Id");
-            }
-            throw new IllegalStateException("Unable to remove sender Id " + senderId + " senders " + senders);
-        }
-    }
-
-    @Override
-    public boolean sendReport(int senderId, int value) {
-        Sender sender = senders.get(senderId);
-        if (sender != null) {
-            Series series = statHistory.get(sender.senderId);
-            if (series == null) {
-                switch (sender.mType) {
-                    case AVG:
-                        series = new SeriesAvg();
-                        break;
-                    case RATE:
-                        series = new SeriesRate();
-                        break;
-                }
-                statHistory.put(sender.senderId, series);
-            }
-            series.addValue(value);
-            return true;
-        }
-        throw new IllegalStateException("Unable to send report sender Id " + senderId + " senders " + senders);
-    }
-
-    @Override
-    public void resetReportSender(int senderId) {
-        Sender sender = senders.get(senderId);
-        if (sender != null) {
-            Series series = statHistory.get(sender.senderId);
-            if (series != null) {
-                series.reset();
-            }
-        } else {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Sender with id " + senderId + " not found. Unable to reset!");
-            }
-            throw new IllegalStateException("Unable to reset sender Id " + senderId + " senders " + senders);
-        }
-    }
-
-    private static class Sender {
-
-        private final int senderId;
-        private final MetricType mType;
-        private final String displayName;
-
-        public Sender(int senderId, FeedConnectionId connectionId, FeedRuntimeId runtimeId, ValueType valueType,
-                MetricType mType) {
-            this.senderId = senderId;
-            this.mType = mType;
-            this.displayName = createDisplayName(connectionId, runtimeId, valueType);
-        }
-
-        @Override
-        public String toString() {
-            return displayName + "[" + senderId + "]" + "(" + mType + ")";
-        }
-
-        @Override
-        public boolean equals(Object o) {
-            if (this == o) {
-                return true;
-            }
-            if (!(o instanceof Sender)) {
-                return false;
-            }
-            return ((Sender) o).senderId == senderId;
-        }
-
-        @Override
-        public int hashCode() {
-            return senderId;
-        }
-
-        public static String createDisplayName(FeedConnectionId connectionId, FeedRuntimeId runtimeId,
-                ValueType valueType) {
-            return connectionId + " (" + runtimeId.getFeedRuntimeType() + " )" + "[" + runtimeId.getPartition() + "]"
-                    + "{" + valueType + "}";
-        }
-
-        public String getDisplayName() {
-            return displayName;
-        }
-
-        public int getSenderId() {
-            return senderId;
-        }
-    }
-
-    @Override
-    public int getMetric(int senderId) {
-        Sender sender = senders.get(senderId);
-        return getMetric(sender);
-    }
-
-    @Override
-    public int getMetric(FeedConnectionId connectionId, FeedRuntimeId runtimeId, ValueType valueType) {
-        String displayName = Sender.createDisplayName(connectionId, runtimeId, valueType);
-        Sender sender = sendersByName.get(displayName);
-        return getMetric(sender);
-    }
-
-    private int getMetric(Sender sender) {
-        if (sender == null || statHistory.get(sender.getSenderId()) == null) {
-            return UNKNOWN;
-        }
-
-        float result = -1;
-        Series series = statHistory.get(sender.getSenderId());
-        switch (sender.mType) {
-            case AVG:
-                result = ((SeriesAvg) series).getAvg();
-                break;
-            case RATE:
-                result = ((SeriesRate) series).getRate();
-                break;
-        }
-        return (int) result;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedPolicyAccessor.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedPolicyAccessor.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedPolicyAccessor.java
deleted file mode 100644
index cd7d598..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedPolicyAccessor.java
+++ /dev/null
@@ -1,168 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.io.Serializable;
-import java.util.Map;
-
-/**
- * A utility class to access the configuration parameters of a feed ingestion policy.
- */
-public class FeedPolicyAccessor implements Serializable {
-
-    private static final long serialVersionUID = 1L;
-
-    /** failure configuration **/
-    /** continue feed ingestion after a soft (runtime) failure **/
-    public static final String SOFT_FAILURE_CONTINUE = "soft.failure.continue";
-
-    /** log failed tuple to an asterixdb dataset for future reference **/
-    public static final String SOFT_FAILURE_LOG_DATA = "soft.failure.log.data";
-
-    /** continue feed ingestion after loss of one or more machines (hardware failure) **/
-    public static final String HARDWARE_FAILURE_CONTINUE = "hardware.failure.continue";
-
-    /** auto-start a loser feed when the asterixdb instance is restarted **/
-    public static final String CLUSTER_REBOOT_AUTO_RESTART = "cluster.reboot.auto.restart";
-
-    /** framework provides guarantee that each received feed record will be processed through the ingestion pipeline at least once **/
-    public static final String AT_LEAST_ONE_SEMANTICS = "atleast.once.semantics";
-
-    /** flow control configuration **/
-    /** spill excess tuples to disk if an operator cannot process incoming data at its arrival rate **/
-    public static final String SPILL_TO_DISK_ON_CONGESTION = "spill.to.disk.on.congestion";
-
-    /** the maximum size of data (tuples) that can be spilled to disk **/
-    public static final String MAX_SPILL_SIZE_ON_DISK = "max.spill.size.on.disk";
-
-    /** discard tuples altogether if an operator cannot process incoming data at its arrival rate **/
-    public static final String DISCARD_ON_CONGESTION = "discard.on.congestion";
-
-    /** maximum fraction of ingested data that can be discarded **/
-    public static final String MAX_FRACTION_DISCARD = "max.fraction.discard";
-
-    /** maximum end-to-end delay/latency in persisting a tuple through the feed ingestion pipeline **/
-    public static final String MAX_DELAY_RECORD_PERSISTENCE = "max.delay.record.persistence";
-
-    /** rate limit the inflow of tuples in accordance with the maximum capacity of the pipeline **/
-    public static final String THROTTLING_ENABLED = "throttling.enabled";
-
-    /** elasticity **/
-    public static final String ELASTIC = "elastic";
-
-    /** statistics **/
-    public static final String TIME_TRACKING = "time.tracking";
-
-    /** logging of statistics **/
-    public static final String LOGGING_STATISTICS = "logging.statistics";
-
-    public static final long NO_LIMIT = -1;
-
-    private Map<String, String> feedPolicy;
-
-    public Map<String, String> getFeedPolicy() {
-        return feedPolicy;
-    }
-
-    public FeedPolicyAccessor(Map<String, String> feedPolicy) {
-        this.feedPolicy = feedPolicy;
-    }
-
-    public void reset(Map<String, String> feedPolicy) {
-        this.feedPolicy = feedPolicy;
-    }
-
-    /** Failure recover/reporting **/
-
-    public boolean logDataOnSoftFailure() {
-        return getBooleanPropertyValue(SOFT_FAILURE_LOG_DATA, false);
-    }
-
-    public boolean continueOnSoftFailure() {
-        return getBooleanPropertyValue(SOFT_FAILURE_CONTINUE, false);
-    }
-
-    public boolean continueOnHardwareFailure() {
-        return getBooleanPropertyValue(HARDWARE_FAILURE_CONTINUE, false);
-    }
-
-    public boolean autoRestartOnClusterReboot() {
-        return getBooleanPropertyValue(CLUSTER_REBOOT_AUTO_RESTART, false);
-    }
-
-    public boolean atleastOnceSemantics() {
-        return getBooleanPropertyValue(AT_LEAST_ONE_SEMANTICS, false);
-    }
-
-    /** flow control **/
-    public boolean spillToDiskOnCongestion() {
-        return getBooleanPropertyValue(SPILL_TO_DISK_ON_CONGESTION, false);
-    }
-
-    public boolean discardOnCongestion() {
-        return getMaxFractionDiscard() > 0;
-    }
-
-    public boolean throttlingEnabled() {
-        return getBooleanPropertyValue(THROTTLING_ENABLED, false);
-    }
-
-    public long getMaxSpillOnDisk() {
-        return getLongPropertyValue(MAX_SPILL_SIZE_ON_DISK, NO_LIMIT);
-    }
-
-    public float getMaxFractionDiscard() {
-        return getFloatPropertyValue(MAX_FRACTION_DISCARD, 0);
-    }
-
-    public long getMaxDelayRecordPersistence() {
-        return getLongPropertyValue(MAX_DELAY_RECORD_PERSISTENCE, Long.MAX_VALUE);
-    }
-
-    /** Elasticity **/
-    public boolean isElastic() {
-        return getBooleanPropertyValue(ELASTIC, false);
-    }
-
-    /** Statistics **/
-    public boolean isTimeTrackingEnabled() {
-        return getBooleanPropertyValue(TIME_TRACKING, false);
-    }
-
-    /** Logging of statistics **/
-    public boolean isLoggingStatisticsEnabled() {
-        return getBooleanPropertyValue(LOGGING_STATISTICS, false);
-    }
-
-    private boolean getBooleanPropertyValue(String key, boolean defValue) {
-        String v = feedPolicy.get(key);
-        return v == null ? false : Boolean.valueOf(v);
-    }
-
-    private long getLongPropertyValue(String key, long defValue) {
-        String v = feedPolicy.get(key);
-        return v != null ? Long.parseLong(v) : defValue;
-    }
-
-    private float getFloatPropertyValue(String key, float defValue) {
-        String v = feedPolicy.get(key);
-        return v != null ? Float.parseFloat(v) : defValue;
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedRuntime.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedRuntime.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedRuntime.java
deleted file mode 100644
index 87276ec..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedRuntime.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import org.apache.asterix.common.feeds.api.IFeedOperatorOutputSideHandler;
-import org.apache.asterix.common.feeds.api.IFeedRuntime;
-import org.apache.hyracks.api.comm.IFrameWriter;
-
-public class FeedRuntime implements IFeedRuntime {
-
-    /** A unique identifier for the runtime **/
-    protected final FeedRuntimeId runtimeId;
-
-    /** The output frame writer associated with the runtime **/
-    protected IFrameWriter frameWriter;
-
-    /** The pre-processor associated with the runtime **/
-    protected FeedRuntimeInputHandler inputHandler;
-
-    public FeedRuntime(FeedRuntimeId runtimeId, FeedRuntimeInputHandler inputHandler, IFrameWriter frameWriter) {
-        this.runtimeId = runtimeId;
-        this.frameWriter = frameWriter;
-        this.inputHandler = inputHandler;
-    }
-
-    public void setFrameWriter(IFeedOperatorOutputSideHandler frameWriter) {
-        this.frameWriter = frameWriter;
-    }
-
-    @Override
-    public FeedRuntimeId getRuntimeId() {
-        return runtimeId;
-    }
-
-    @Override
-    public IFrameWriter getFeedFrameWriter() {
-        return frameWriter;
-    }
-
-    @Override
-    public String toString() {
-        return runtimeId.toString();
-    }
-
-    @Override
-    public FeedRuntimeInputHandler getInputHandler() {
-        return inputHandler;
-    }
-
-    public Mode getMode() {
-        return inputHandler != null ? inputHandler.getMode() : Mode.PROCESS;
-    }
-
-    public void setMode(Mode mode) {
-        this.inputHandler.setMode(mode);
-    }
-
-}


[15/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameHandlers.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameHandlers.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameHandlers.java
new file mode 100644
index 0000000..6ad00f1
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameHandlers.java
@@ -0,0 +1,305 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.dataflow;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Collection;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.external.feed.api.IFeedFrameHandler;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.message.MessageReceiver;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+
+public class FeedFrameHandlers {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedFrameHandlers.class.getName());
+
+    public enum RoutingMode {
+        IN_MEMORY_ROUTE,
+        SPILL_TO_DISK,
+        DISCARD
+    }
+
+    public static IFeedFrameHandler getFeedFrameHandler(FrameDistributor distributor, FeedId feedId,
+            RoutingMode routingMode, FeedRuntimeType runtimeType, int partition, int frameSize) throws IOException {
+        IFeedFrameHandler handler = null;
+        switch (routingMode) {
+            case IN_MEMORY_ROUTE:
+                handler = new InMemoryRouter(distributor.getRegisteredReaders().values(), runtimeType, partition);
+                break;
+            case SPILL_TO_DISK:
+                handler = new DiskSpiller(distributor, feedId, runtimeType, partition, frameSize);
+                break;
+            case DISCARD:
+                handler = new DiscardRouter(distributor, feedId, runtimeType, partition);
+                break;
+            default:
+                throw new IllegalArgumentException("Invalid routing mode" + routingMode);
+        }
+        return handler;
+    }
+
+    public static class DiscardRouter implements IFeedFrameHandler {
+
+        private final FeedId feedId;
+        private int nDiscarded;
+        private final FeedRuntimeType runtimeType;
+        private final int partition;
+        private final FrameDistributor distributor;
+
+        public DiscardRouter(FrameDistributor distributor, FeedId feedId, FeedRuntimeType runtimeType, int partition)
+                throws HyracksDataException {
+            this.distributor = distributor;
+            this.feedId = feedId;
+            this.nDiscarded = 0;
+            this.runtimeType = runtimeType;
+            this.partition = partition;
+        }
+
+        @Override
+        public void handleFrame(ByteBuffer frame) throws HyracksDataException {
+            FrameTupleAccessor fta = distributor.getFta();
+            fta.reset(frame);
+            int nTuples = fta.getTupleCount();
+            nDiscarded += nTuples;
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Discarded additional [" + runtimeType + "]" + "(" + partition + ")" + "  " + nTuples);
+            }
+        }
+
+        @Override
+        public void handleDataBucket(DataBucket bucket) {
+            nDiscarded++;
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Discard Count" + nDiscarded);
+            }
+        }
+
+        @Override
+        public void close() {
+            // do nothing, no resource to relinquish
+        }
+
+        @Override
+        public Iterator<ByteBuffer> replayData() throws HyracksDataException {
+            throw new IllegalStateException("Invalid operation");
+        }
+
+        @Override
+        public String toString() {
+            return "DiscardRouter" + "[" + feedId + "]" + "(" + nDiscarded + ")";
+        }
+
+        @Override
+        public String getSummary() {
+            return new String("Number of discarded frames (since last reset)" + " feedId " + "[" + feedId + "]" + "("
+                    + nDiscarded + ")");
+        }
+
+    }
+
+    public static class InMemoryRouter implements IFeedFrameHandler {
+
+        private final Collection<FeedFrameCollector> frameCollectors;
+
+        public InMemoryRouter(Collection<FeedFrameCollector> frameCollectors, FeedRuntimeType runtimeType,
+                int partition) {
+            this.frameCollectors = frameCollectors;
+        }
+
+        @Override
+        public void handleFrame(ByteBuffer frame) throws HyracksDataException {
+            throw new IllegalStateException("Operation not supported");
+        }
+
+        @Override
+        public void handleDataBucket(DataBucket bucket) {
+            for (FeedFrameCollector collector : frameCollectors) {
+                collector.sendMessage(bucket); // asynchronous call
+            }
+        }
+
+        @Override
+        public void close() {
+            // do nothing
+        }
+
+        @Override
+        public Iterator<ByteBuffer> replayData() throws HyracksDataException {
+            throw new IllegalStateException("Operation not supported");
+        }
+
+        @Override
+        public String getSummary() {
+            return "InMemoryRouter Summary";
+        }
+    }
+
+    public static class DiskSpiller implements IFeedFrameHandler {
+
+        private FrameSpiller<ByteBuffer> receiver;
+        private Iterator<ByteBuffer> iterator;
+
+        public DiskSpiller(FrameDistributor distributor, FeedId feedId, FeedRuntimeType runtimeType, int partition,
+                int frameSize) throws IOException {
+            receiver = new FrameSpiller<ByteBuffer>(distributor, feedId, frameSize);
+        }
+
+        @Override
+        public void handleFrame(ByteBuffer frame) throws HyracksDataException {
+            receiver.sendMessage(frame);
+        }
+
+        private static class FrameSpiller<T> extends MessageReceiver<ByteBuffer> {
+
+            private final FeedId feedId;
+            private BufferedOutputStream bos;
+            private final ByteBuffer reusableLengthBuffer;
+            private final ByteBuffer reusableDataBuffer;
+            private long offset;
+            private File file;
+            private final FrameDistributor frameDistributor;
+            private boolean fileCreated = false;
+
+            public FrameSpiller(FrameDistributor distributor, FeedId feedId, int frameSize) throws IOException {
+                this.feedId = feedId;
+                this.frameDistributor = distributor;
+                reusableLengthBuffer = ByteBuffer.allocate(4);
+                reusableDataBuffer = ByteBuffer.allocate(frameSize);
+                this.offset = 0;
+            }
+
+            @Override
+            public void processMessage(ByteBuffer message) throws Exception {
+                if (!fileCreated) {
+                    createFile();
+                    fileCreated = true;
+                }
+                reusableLengthBuffer.flip();
+                reusableLengthBuffer.putInt(message.array().length);
+                bos.write(reusableLengthBuffer.array());
+                bos.write(message.array());
+            }
+
+            private void createFile() throws IOException {
+                Date date = new Date();
+                String dateSuffix = date.toString().replace(' ', '_');
+                String fileName = feedId.toString() + "_" + frameDistributor.getFeedRuntimeType() + "_"
+                        + frameDistributor.getPartition() + "_" + dateSuffix;
+
+                file = new File(fileName);
+                if (!file.exists()) {
+                    boolean success = file.createNewFile();
+                    if (!success) {
+                        throw new IOException("Unable to create spill file for feed " + feedId);
+                    }
+                }
+                bos = new BufferedOutputStream(new FileOutputStream(file));
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Created Spill File for feed " + feedId);
+                }
+            }
+
+            @SuppressWarnings("resource")
+            public Iterator<ByteBuffer> replayData() throws Exception {
+                final BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file));
+                bis.skip(offset);
+                return new Iterator<ByteBuffer>() {
+
+                    @Override
+                    public boolean hasNext() {
+                        boolean more = false;
+                        try {
+                            more = bis.available() > 0;
+                            if (!more) {
+                                bis.close();
+                            }
+                        } catch (IOException e) {
+                            e.printStackTrace();
+                        }
+
+                        return more;
+                    }
+
+                    @Override
+                    public ByteBuffer next() {
+                        reusableLengthBuffer.flip();
+                        try {
+                            bis.read(reusableLengthBuffer.array());
+                            reusableLengthBuffer.flip();
+                            int frameSize = reusableLengthBuffer.getInt();
+                            reusableDataBuffer.flip();
+                            bis.read(reusableDataBuffer.array(), 0, frameSize);
+                            offset += 4 + frameSize;
+                        } catch (IOException e) {
+                            e.printStackTrace();
+                        }
+                        return reusableDataBuffer;
+                    }
+
+                    @Override
+                    public void remove() {
+                    }
+
+                };
+            }
+
+        }
+
+        @Override
+        public void handleDataBucket(DataBucket bucket) {
+            throw new IllegalStateException("Operation not supported");
+        }
+
+        @Override
+        public void close() {
+            receiver.close(true);
+        }
+
+        @Override
+        public Iterator<ByteBuffer> replayData() throws HyracksDataException {
+            try {
+                iterator = receiver.replayData();
+            } catch (Exception e) {
+                throw new HyracksDataException(e);
+            }
+            return iterator;
+        }
+
+        //TODO: Form a summary that includes stats related to what has been spilled to disk
+        @Override
+        public String getSummary() {
+            return "Disk Spiller Summary";
+        }
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameSpiller.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameSpiller.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameSpiller.java
new file mode 100644
index 0000000..c9a29ac
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameSpiller.java
@@ -0,0 +1,188 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.dataflow;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+import org.apache.hyracks.api.comm.IFrame;
+import org.apache.hyracks.api.comm.VSizeFrame;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class FeedFrameSpiller {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedFrameSpiller.class.getName());
+
+    private final IHyracksTaskContext ctx;
+    private final FeedConnectionId connectionId;
+    private final FeedRuntimeId runtimeId;
+    private final FeedPolicyAccessor policyAccessor;
+    private BufferedOutputStream bos;
+    private File file;
+    private boolean fileCreated = false;
+    private long bytesWritten = 0;
+    private int spilledFrameCount = 0;
+
+    public FeedFrameSpiller(IHyracksTaskContext ctx, FeedConnectionId connectionId, FeedRuntimeId runtimeId,
+            FeedPolicyAccessor policyAccessor) throws HyracksDataException {
+        this.ctx = ctx;
+        this.connectionId = connectionId;
+        this.runtimeId = runtimeId;
+        this.policyAccessor = policyAccessor;
+    }
+
+    public boolean processMessage(ByteBuffer message) throws HyracksDataException {
+        if (!fileCreated) {
+            createFile();
+            fileCreated = true;
+        }
+        long maxAllowed = policyAccessor.getMaxSpillOnDisk();
+        if (maxAllowed != FeedPolicyAccessor.NO_LIMIT && bytesWritten + message.array().length > maxAllowed) {
+            return false;
+        } else {
+            try {
+                bos.write(message.array());
+            } catch (IOException e) {
+                throw new HyracksDataException(e);
+            }
+            bytesWritten += message.array().length;
+            spilledFrameCount++;
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Spilled frame by " + runtimeId + " spill count " + spilledFrameCount);
+            }
+            return true;
+        }
+    }
+
+    private void createFile() throws HyracksDataException {
+        try {
+            Date date = new Date();
+            String dateSuffix = date.toString().replace(' ', '_');
+            String fileName = connectionId.getFeedId() + "_" + connectionId.getDatasetName() + "_"
+                    + runtimeId.getFeedRuntimeType() + "_" + runtimeId.getPartition() + "_" + dateSuffix;
+
+            file = new File(fileName);
+            if (!file.exists()) {
+                boolean success = file.createNewFile();
+                if (!success) {
+                    throw new HyracksDataException(
+                            "Unable to create spill file " + fileName + " for feed " + runtimeId);
+                } else {
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info("Created spill file " + file.getAbsolutePath());
+                    }
+                }
+            }
+            bos = new BufferedOutputStream(new FileOutputStream(file));
+        } catch (Throwable th) {
+            throw new HyracksDataException(th);
+        }
+    }
+
+    public Iterator<ByteBuffer> replayData() throws Exception {
+        bos.flush();
+        return new FrameIterator(ctx, file.getName());
+    }
+
+    private static class FrameIterator implements Iterator<ByteBuffer> {
+
+        private final BufferedInputStream bis;
+        private final IHyracksTaskContext ctx;
+        private int readFrameCount = 0;
+
+        public FrameIterator(IHyracksTaskContext ctx, String filename) throws FileNotFoundException {
+            bis = new BufferedInputStream(new FileInputStream(new File(filename)));
+            this.ctx = ctx;
+        }
+
+        @Override
+        public boolean hasNext() {
+            boolean more = false;
+            try {
+                more = bis.available() > 0;
+                if (!more) {
+                    bis.close();
+                }
+            } catch (IOException e) {
+                e.printStackTrace();
+            }
+
+            return more;
+        }
+
+        @Override
+        public ByteBuffer next() {
+            IFrame frame = null;
+            try {
+                frame = new VSizeFrame(ctx);
+                Arrays.fill(frame.getBuffer().array(), (byte) 0);
+                bis.read(frame.getBuffer().array(), 0, frame.getFrameSize());
+                readFrameCount++;
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Read spill frome " + readFrameCount);
+                }
+            } catch (IOException e) {
+                e.printStackTrace();
+            }
+            return frame.getBuffer();
+        }
+
+        @Override
+        public void remove() {
+        }
+
+    }
+
+    public void reset() {
+        bytesWritten = 0;
+        //  file.delete();
+        fileCreated = false;
+        bos = null;
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Resetted the FrameSpiller!");
+        }
+    }
+
+    public void close() {
+        if (bos != null) {
+            try {
+                bos.flush();
+                bos.close();
+            } catch (IOException e) {
+                e.printStackTrace();
+            }
+        }
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameTupleAccessor.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameTupleAccessor.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameTupleAccessor.java
new file mode 100644
index 0000000..f08243e
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameTupleAccessor.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.dataflow;
+
+import java.nio.ByteBuffer;
+
+import org.apache.asterix.external.util.FeedConstants.StatisticsConstants;
+import org.apache.hyracks.api.comm.IFrameTupleAccessor;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+
+public class FeedFrameTupleAccessor implements IFrameTupleAccessor {
+
+    private final FrameTupleAccessor frameAccessor;
+    private final int numOpenFields;
+
+    public FeedFrameTupleAccessor(FrameTupleAccessor frameAccessor) {
+        this.frameAccessor = frameAccessor;
+        int firstRecordStart = frameAccessor.getTupleStartOffset(0) + frameAccessor.getFieldSlotsLength();
+        int openPartOffsetOrig = frameAccessor.getBuffer().getInt(firstRecordStart + 6);
+        numOpenFields = frameAccessor.getBuffer().getInt(firstRecordStart + openPartOffsetOrig);
+    }
+
+    public int getFeedIntakePartition(int tupleIndex) {
+        ByteBuffer buffer = frameAccessor.getBuffer();
+        int recordStart = frameAccessor.getTupleStartOffset(tupleIndex) + frameAccessor.getFieldSlotsLength();
+        int openPartOffsetOrig = buffer.getInt(recordStart + 6);
+        int partitionOffset = openPartOffsetOrig + 4 + 8 * numOpenFields
+                + StatisticsConstants.INTAKE_PARTITION.length() + 2 + 1;
+        return buffer.getInt(recordStart + partitionOffset);
+    }
+    
+    
+
+    @Override
+    public int getFieldCount() {
+        return frameAccessor.getFieldCount();
+    }
+
+    @Override
+    public int getFieldSlotsLength() {
+        return frameAccessor.getFieldSlotsLength();
+    }
+
+    @Override
+    public int getFieldEndOffset(int tupleIndex, int fIdx) {
+        return frameAccessor.getFieldEndOffset(tupleIndex, fIdx);
+    }
+
+    @Override
+    public int getFieldStartOffset(int tupleIndex, int fIdx) {
+        return frameAccessor.getFieldStartOffset(tupleIndex, fIdx);
+    }
+
+    @Override
+    public int getFieldLength(int tupleIndex, int fIdx) {
+        return frameAccessor.getFieldLength(tupleIndex, fIdx);
+    }
+
+    @Override
+    public int getTupleEndOffset(int tupleIndex) {
+        return frameAccessor.getTupleEndOffset(tupleIndex);
+    }
+
+    @Override
+    public int getTupleStartOffset(int tupleIndex) {
+        return frameAccessor.getTupleStartOffset(tupleIndex);
+    }
+
+    @Override
+    public int getTupleCount() {
+        return frameAccessor.getTupleCount();
+    }
+
+    @Override
+    public ByteBuffer getBuffer() {
+        return frameAccessor.getBuffer();
+    }
+
+    @Override
+    public void reset(ByteBuffer buffer) {
+        frameAccessor.reset(buffer);
+    }
+
+    @Override
+    public int getAbsoluteFieldStartOffset(int tupleIndex, int fIdx) {
+        return frameAccessor.getAbsoluteFieldStartOffset(tupleIndex, fIdx);
+    }
+
+    @Override
+    public int getTupleLength(int tupleIndex) {
+        return frameAccessor.getTupleLength(tupleIndex);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameTupleDecorator.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameTupleDecorator.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameTupleDecorator.java
new file mode 100644
index 0000000..d43f90d
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedFrameTupleDecorator.java
@@ -0,0 +1,108 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.dataflow;
+
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.asterix.builders.IARecordBuilder;
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.util.FeedConstants.StatisticsConstants;
+import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import org.apache.asterix.om.base.AInt32;
+import org.apache.asterix.om.base.AInt64;
+import org.apache.asterix.om.base.AMutableInt32;
+import org.apache.asterix.om.base.AMutableInt64;
+import org.apache.asterix.om.base.AMutableString;
+import org.apache.asterix.om.base.AString;
+import org.apache.asterix.om.types.BuiltinType;
+import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
+
+public class FeedFrameTupleDecorator {
+
+    private AMutableString aString = new AMutableString("");
+    private AMutableInt64 aInt64 = new AMutableInt64(0);
+    private AMutableInt32 aInt32 = new AMutableInt32(0);
+    private AtomicInteger tupleId;
+
+    @SuppressWarnings("unchecked")
+    private static ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
+            .getSerializerDeserializer(BuiltinType.ASTRING);
+    @SuppressWarnings("unchecked")
+    private static ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
+            .getSerializerDeserializer(BuiltinType.AINT32);
+    @SuppressWarnings("unchecked")
+    private static ISerializerDeserializer<AInt64> int64Serde = AqlSerializerDeserializerProvider.INSTANCE
+            .getSerializerDeserializer(BuiltinType.AINT64);
+
+    private final int partition;
+    private final ArrayBackedValueStorage attrNameStorage;
+    private final ArrayBackedValueStorage attrValueStorage;
+
+    public FeedFrameTupleDecorator(int partition) {
+        this.tupleId = new AtomicInteger(0);
+        this.partition = partition;
+        this.attrNameStorage = new ArrayBackedValueStorage();
+        this.attrValueStorage = new ArrayBackedValueStorage();
+    }
+
+    public void addLongAttribute(String attrName, long attrValue, IARecordBuilder recordBuilder)
+            throws HyracksDataException, AsterixException {
+        attrNameStorage.reset();
+        aString.setValue(attrName);
+        stringSerde.serialize(aString, attrNameStorage.getDataOutput());
+
+        attrValueStorage.reset();
+        aInt64.setValue(attrValue);
+        int64Serde.serialize(aInt64, attrValueStorage.getDataOutput());
+
+        recordBuilder.addField(attrNameStorage, attrValueStorage);
+    }
+
+    public void addIntegerAttribute(String attrName, int attrValue, IARecordBuilder recordBuilder)
+            throws HyracksDataException, AsterixException {
+        attrNameStorage.reset();
+        aString.setValue(attrName);
+        stringSerde.serialize(aString, attrNameStorage.getDataOutput());
+
+        attrValueStorage.reset();
+        aInt32.setValue(attrValue);
+        int32Serde.serialize(aInt32, attrValueStorage.getDataOutput());
+
+        recordBuilder.addField(attrNameStorage, attrValueStorage);
+    }
+
+    public void addTupleId(IARecordBuilder recordBuilder) throws HyracksDataException, AsterixException {
+        addIntegerAttribute(StatisticsConstants.INTAKE_TUPLEID, tupleId.incrementAndGet(), recordBuilder);
+    }
+
+    public void addIntakePartition(IARecordBuilder recordBuilder) throws HyracksDataException, AsterixException {
+        addIntegerAttribute(StatisticsConstants.INTAKE_PARTITION, partition, recordBuilder);
+    }
+
+    public void addIntakeTimestamp(IARecordBuilder recordBuilder) throws HyracksDataException, AsterixException {
+        addLongAttribute(StatisticsConstants.INTAKE_TIMESTAMP, System.currentTimeMillis(), recordBuilder);
+    }
+
+    public void addStoreTimestamp(IARecordBuilder recordBuilder) throws HyracksDataException, AsterixException {
+        addLongAttribute(StatisticsConstants.STORE_TIMESTAMP, System.currentTimeMillis(), recordBuilder);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java
new file mode 100644
index 0000000..3a46b1a
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java
@@ -0,0 +1,468 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.dataflow;
+
+import java.nio.ByteBuffer;
+import java.util.Iterator;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.external.feed.api.IExceptionHandler;
+import org.apache.asterix.external.feed.api.IFeedManager;
+import org.apache.asterix.external.feed.api.IFeedMemoryComponent;
+import org.apache.asterix.external.feed.api.IFeedMessage;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.api.IFeedRuntime.Mode;
+import org.apache.asterix.external.feed.dataflow.DataBucket.ContentType;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.message.FeedCongestionMessage;
+import org.apache.asterix.external.feed.message.ThrottlingEnabledFeedMessage;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+import org.apache.asterix.external.feed.watch.MonitoredBuffer;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+
+/**
+ * Provides for error-handling and input-side buffering for a feed runtime.
+ * The input handler is buffering in:
+ * 1. FeedMetaComputeNodePushable.initializeNewFeedRuntime();
+ * 2. FeedMetaStoreNodePushable.initializeNewFeedRuntime();
+ *              ______
+ *             |      |
+ * ============|core  |============
+ * ============| op   |============
+ * ^^^^^^^^^^^^|______|
+ *  Input Side
+ *  Handler
+ *
+ **/
+public class FeedRuntimeInputHandler implements IFrameWriter {
+
+    private static Logger LOGGER = Logger.getLogger(FeedRuntimeInputHandler.class.getName());
+
+    private final FeedConnectionId connectionId;
+    private final FeedRuntimeId runtimeId;
+    private final FeedPolicyAccessor feedPolicyAccessor;
+    private final IExceptionHandler exceptionHandler;
+    private final FeedFrameDiscarder discarder;
+    private final FeedFrameSpiller spiller;
+    private final FeedPolicyAccessor fpa;
+    private final IFeedManager feedManager;
+    private boolean bufferingEnabled;
+    private IFrameWriter coreOperator;
+    private MonitoredBuffer mBuffer;
+    private DataBucketPool pool;
+    private FrameCollection frameCollection;
+    private Mode mode;
+    private Mode lastMode;
+    private boolean finished;
+    private long nProcessed;
+    private boolean throttlingEnabled;
+
+    private FrameEventCallback frameEventCallback;
+
+    public FeedRuntimeInputHandler(IHyracksTaskContext ctx, FeedConnectionId connectionId, FeedRuntimeId runtimeId,
+            IFrameWriter coreOperator, FeedPolicyAccessor fpa, FrameTupleAccessor fta, RecordDescriptor recordDesc,
+            IFeedManager feedManager, int nPartitions) throws HyracksDataException {
+        this(ctx, connectionId, runtimeId, coreOperator, fpa, fpa.bufferingEnabled(), fta, recordDesc, feedManager,
+                nPartitions);
+    }
+
+    public FeedRuntimeInputHandler(IHyracksTaskContext ctx, FeedConnectionId connectionId, FeedRuntimeId runtimeId,
+            IFrameWriter coreOperator, FeedPolicyAccessor fpa, boolean bufferingEnabled, FrameTupleAccessor fta,
+            RecordDescriptor recordDesc, IFeedManager feedManager, int nPartitions) throws HyracksDataException {
+        this.connectionId = connectionId;
+        this.runtimeId = runtimeId;
+        this.coreOperator = coreOperator;
+        this.bufferingEnabled = bufferingEnabled;
+        this.feedPolicyAccessor = fpa;
+        this.spiller = new FeedFrameSpiller(ctx, connectionId, runtimeId, fpa);
+        this.discarder = new FeedFrameDiscarder(connectionId, runtimeId, fpa, this);
+        this.exceptionHandler = new FeedExceptionHandler(ctx, fta, recordDesc, feedManager, connectionId);
+        this.mode = Mode.PROCESS;
+        this.lastMode = Mode.PROCESS;
+        this.finished = false;
+        this.fpa = fpa;
+        this.feedManager = feedManager;
+        this.pool = (DataBucketPool) feedManager.getFeedMemoryManager()
+                .getMemoryComponent(IFeedMemoryComponent.Type.POOL);
+        this.frameCollection = (FrameCollection) feedManager.getFeedMemoryManager()
+                .getMemoryComponent(IFeedMemoryComponent.Type.COLLECTION);
+        this.frameEventCallback = new FrameEventCallback(fpa, this, coreOperator);
+        this.mBuffer = MonitoredBuffer.getMonitoredBuffer(ctx, this, coreOperator, fta, recordDesc,
+                feedManager.getFeedMetricCollector(), connectionId, runtimeId, exceptionHandler, frameEventCallback,
+                nPartitions, fpa);
+        this.mBuffer.start();
+        this.throttlingEnabled = false;
+    }
+
+    @Override
+    public synchronized void nextFrame(ByteBuffer frame) throws HyracksDataException {
+        try {
+            switch (mode) {
+                case PROCESS:
+                    switch (lastMode) {
+                        case SPILL:
+                        case POST_SPILL_DISCARD:
+                            setMode(Mode.PROCESS_SPILL);
+                            processSpilledBacklog();
+                            break;
+                        case STALL:
+                            setMode(Mode.PROCESS_BACKLOG);
+                            processBufferredBacklog();
+                            break;
+                        default:
+                            break;
+                    }
+                    process(frame);
+                    break;
+                case PROCESS_BACKLOG:
+                case PROCESS_SPILL:
+                    process(frame);
+                    break;
+                case SPILL:
+                    spill(frame);
+                    break;
+                case DISCARD:
+                case POST_SPILL_DISCARD:
+                    discard(frame);
+                    break;
+                case STALL:
+                    switch (runtimeId.getFeedRuntimeType()) {
+                        case COLLECT:
+                        case COMPUTE_COLLECT:
+                        case COMPUTE:
+                        case STORE:
+                            bufferDataUntilRecovery(frame);
+                            break;
+                        default:
+                            if (LOGGER.isLoggable(Level.WARNING)) {
+                                LOGGER.warning("Discarding frame during " + mode + " mode " + this.runtimeId);
+                            }
+                            break;
+                    }
+                    break;
+                case END:
+                case FAIL:
+                    if (LOGGER.isLoggable(Level.WARNING)) {
+                        LOGGER.warning("Ignoring incoming tuples in " + mode + " mode");
+                    }
+                    break;
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    private void bufferDataUntilRecovery(ByteBuffer frame) throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Bufferring data until recovery is complete " + this.runtimeId);
+        }
+        if (frameCollection == null) {
+            this.frameCollection = (FrameCollection) feedManager.getFeedMemoryManager()
+                    .getMemoryComponent(IFeedMemoryComponent.Type.COLLECTION);
+        }
+        if (frameCollection == null) {
+            discarder.processMessage(frame);
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Running low on memory! DISCARDING FRAME ");
+            }
+        } else {
+            boolean success = frameCollection.addFrame(frame);
+            if (!success) {
+                if (fpa.spillToDiskOnCongestion()) {
+                    if (frame != null) {
+                        spiller.processMessage(frame);
+                    } // TODO handle the else case
+                } else {
+                    discarder.processMessage(frame);
+                }
+            }
+        }
+    }
+
+    public void reportUnresolvableCongestion() throws HyracksDataException {
+        if (this.runtimeId.getFeedRuntimeType().equals(FeedRuntimeType.COMPUTE)) {
+            FeedCongestionMessage congestionReport = new FeedCongestionMessage(connectionId, runtimeId,
+                    mBuffer.getInflowRate(), mBuffer.getOutflowRate(), mode);
+            feedManager.getFeedMessageService().sendMessage(congestionReport);
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Congestion reported " + this.connectionId + " " + this.runtimeId);
+            }
+        } else {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Unresolvable congestion at " + this.connectionId + " " + this.runtimeId);
+            }
+        }
+    }
+
+    private void processBufferredBacklog() throws HyracksDataException {
+        try {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Processing backlog " + this.runtimeId);
+            }
+
+            if (frameCollection != null) {
+                Iterator<ByteBuffer> backlog = frameCollection.getFrameCollectionIterator();
+                while (backlog.hasNext()) {
+                    process(backlog.next());
+                    nProcessed++;
+                }
+                DataBucket bucket = pool.getDataBucket();
+                bucket.setContentType(ContentType.EOSD);
+                bucket.setDesiredReadCount(1);
+                mBuffer.sendMessage(bucket);
+                feedManager.getFeedMemoryManager().releaseMemoryComponent(frameCollection);
+                frameCollection = null;
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    private void processSpilledBacklog() throws HyracksDataException {
+        try {
+            Iterator<ByteBuffer> backlog = spiller.replayData();
+            while (backlog.hasNext()) {
+                process(backlog.next());
+                nProcessed++;
+            }
+            DataBucket bucket = pool.getDataBucket();
+            bucket.setContentType(ContentType.EOSD);
+            bucket.setDesiredReadCount(1);
+            mBuffer.sendMessage(bucket);
+            spiller.reset();
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    protected void process(ByteBuffer frame) throws HyracksDataException {
+        boolean frameProcessed = false;
+        while (!frameProcessed) {
+            try {
+                if (!bufferingEnabled) {
+                    if (frame == null) {
+                        setFinished(true);
+                        synchronized (coreOperator) {
+                            coreOperator.notifyAll();
+                        }
+                    } else {
+                        coreOperator.nextFrame(frame); // synchronous
+                        mBuffer.sendReport(frame);
+                    }
+                } else {
+                    DataBucket bucket = pool.getDataBucket();
+                    if (bucket != null) {
+                        if (frame != null) {
+                            bucket.reset(frame); // created a copy here
+                            bucket.setContentType(ContentType.DATA);
+                        } else {
+                            bucket.setContentType(ContentType.EOD);
+                        }
+                        bucket.setDesiredReadCount(1);
+                        mBuffer.sendMessage(bucket);
+                        mBuffer.sendReport(frame);
+                        nProcessed++;
+                    } else {
+                        if (fpa.spillToDiskOnCongestion()) {
+                            if (frame != null) {
+                                boolean spilled = spiller.processMessage(frame);
+                                if (spilled) {
+                                    setMode(Mode.SPILL);
+                                } else {
+                                    reportUnresolvableCongestion();
+                                }
+                            }
+                        } else if (fpa.discardOnCongestion()) {
+                            boolean discarded = discarder.processMessage(frame);
+                            if (!discarded) {
+                                reportUnresolvableCongestion();
+                            }
+                        } else if (fpa.throttlingEnabled()) {
+                            setThrottlingEnabled(true);
+                        } else {
+                            reportUnresolvableCongestion();
+                        }
+
+                    }
+                }
+                frameProcessed = true;
+            } catch (Exception e) {
+                e.printStackTrace();
+                if (feedPolicyAccessor.continueOnSoftFailure()) {
+                    frame = exceptionHandler.handleException(e, frame);
+                    if (frame == null) {
+                        frameProcessed = true;
+                        if (LOGGER.isLoggable(Level.WARNING)) {
+                            LOGGER.warning("Encountered exception! " + e.getMessage()
+                                    + "Insufficient information, Cannot extract failing tuple");
+                        }
+                    }
+                } else {
+                    if (LOGGER.isLoggable(Level.WARNING)) {
+                        LOGGER.warning("Ingestion policy does not require recovering from tuple. Feed would terminate");
+                    }
+                    mBuffer.close(false);
+                    throw new HyracksDataException(e);
+                }
+            }
+        }
+    }
+
+    private void spill(ByteBuffer frame) throws Exception {
+        boolean success = spiller.processMessage(frame);
+        if (!success) {
+            // limit reached
+            setMode(Mode.POST_SPILL_DISCARD);
+            reportUnresolvableCongestion();
+        }
+    }
+
+    private void discard(ByteBuffer frame) throws Exception {
+        boolean success = discarder.processMessage(frame);
+        if (!success) { // limit reached
+            reportUnresolvableCongestion();
+        }
+    }
+
+    public Mode getMode() {
+        return mode;
+    }
+
+    public synchronized void setMode(Mode mode) {
+        if (mode.equals(this.mode)) {
+            return;
+        }
+        this.lastMode = this.mode;
+        this.mode = mode;
+        if (mode.equals(Mode.END)) {
+            this.close();
+        }
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Switched from " + lastMode + " to " + mode + " " + this.runtimeId);
+        }
+    }
+
+    @Override
+    public void close() {
+        if (mBuffer != null) {
+            boolean disableMonitoring = !this.mode.equals(Mode.STALL);
+            if (frameCollection != null) {
+                feedManager.getFeedMemoryManager().releaseMemoryComponent(frameCollection);
+            }
+            if (pool != null) {
+                feedManager.getFeedMemoryManager().releaseMemoryComponent(pool);
+            }
+            mBuffer.close(false, disableMonitoring);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Closed input side handler for " + this.runtimeId + " disabled monitoring "
+                        + disableMonitoring + " Mode for runtime " + this.mode);
+            }
+        }
+    }
+
+    public IFrameWriter getCoreOperator() {
+        return coreOperator;
+    }
+
+    public void setCoreOperator(IFrameWriter coreOperator) {
+        this.coreOperator = coreOperator;
+        mBuffer.setFrameWriter(coreOperator);
+        frameEventCallback.setCoreOperator(coreOperator);
+    }
+
+    public boolean isFinished() {
+        return finished;
+    }
+
+    public void setFinished(boolean finished) {
+        this.finished = finished;
+    }
+
+    public long getProcessed() {
+        return nProcessed;
+    }
+
+    public FeedRuntimeId getRuntimeId() {
+        return runtimeId;
+    }
+
+    @Override
+    public void open() throws HyracksDataException {
+        coreOperator.open();
+    }
+
+    @Override
+    public void fail() throws HyracksDataException {
+        coreOperator.fail();
+    }
+
+    public void reset(int nPartitions) {
+        this.mBuffer.setNumberOfPartitions(nPartitions);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Reset number of partitions to " + nPartitions + " for " + this.runtimeId);
+        }
+        if (mBuffer != null) {
+            mBuffer.reset();
+        }
+    }
+
+    public FeedConnectionId getConnectionId() {
+        return connectionId;
+    }
+
+    public IFeedManager getFeedManager() {
+        return feedManager;
+    }
+
+    public MonitoredBuffer getmBuffer() {
+        return mBuffer;
+    }
+
+    public boolean isThrottlingEnabled() {
+        return throttlingEnabled;
+    }
+
+    public void setThrottlingEnabled(boolean throttlingEnabled) {
+        if (this.throttlingEnabled != throttlingEnabled) {
+            this.throttlingEnabled = throttlingEnabled;
+            IFeedMessage throttlingEnabledMesg = new ThrottlingEnabledFeedMessage(connectionId, runtimeId);
+            feedManager.getFeedMessageService().sendMessage(throttlingEnabledMesg);
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Throttling " + throttlingEnabled + " for " + this.connectionId + "[" + runtimeId + "]");
+            }
+        }
+    }
+
+    public boolean isBufferingEnabled() {
+        return bufferingEnabled;
+    }
+
+    public void setBufferingEnabled(boolean bufferingEnabled) {
+        this.bufferingEnabled = bufferingEnabled;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FrameCollection.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FrameCollection.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FrameCollection.java
new file mode 100644
index 0000000..7980712
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FrameCollection.java
@@ -0,0 +1,101 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.dataflow;
+
+import java.nio.ByteBuffer;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.asterix.external.feed.api.IFeedMemoryComponent;
+import org.apache.asterix.external.feed.api.IFeedMemoryManager;
+
+/**
+ * Represents an expandable collection of frames.
+ */
+public class FrameCollection implements IFeedMemoryComponent {
+
+    /** A unique identifier for the feed memory component **/
+    private final int componentId;
+
+    /** A collection of frames (each being a ByteBuffer) **/
+    private final List<ByteBuffer> frames = new LinkedList<ByteBuffer>();
+
+    /** The permitted maximum size, the collection may grow to **/
+    private int maxSize;
+
+    /** The {@link IFeedMemoryManager} for the NodeController **/
+    private final IFeedMemoryManager memoryManager;
+
+    public FrameCollection(int componentId, IFeedMemoryManager memoryManager, int maxSize) {
+        this.componentId = componentId;
+        this.maxSize = maxSize;
+        this.memoryManager = memoryManager;
+    }
+
+    public boolean addFrame(ByteBuffer frame) {
+        if (frames.size() == maxSize) {
+            boolean expansionGranted = memoryManager.expandMemoryComponent(this);
+            if (!expansionGranted) {
+                return false;
+            }
+        }
+        ByteBuffer storageBuffer = ByteBuffer.allocate(frame.capacity());
+        storageBuffer.put(frame);
+        frames.add(storageBuffer);
+        storageBuffer.flip();
+        return true;
+    }
+
+    public Iterator<ByteBuffer> getFrameCollectionIterator() {
+        return frames.iterator();
+    }
+
+    @Override
+    public int getTotalAllocation() {
+        return frames.size();
+    }
+
+    @Override
+    public Type getType() {
+        return Type.COLLECTION;
+    }
+
+    @Override
+    public int getComponentId() {
+        return componentId;
+    }
+
+    @Override
+    public void expand(int delta) {
+        maxSize = maxSize + delta;
+    }
+
+    @Override
+    public void reset() {
+        frames.clear();
+        maxSize = IFeedMemoryManager.START_COLLECTION_SIZE;
+    }
+
+    @Override
+    public String toString() {
+        return "FrameCollection" + "[" + componentId + "]" + "(" + frames.size() + "/" + maxSize + ")";
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FrameDistributor.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FrameDistributor.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FrameDistributor.java
new file mode 100644
index 0000000..543efb2
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FrameDistributor.java
@@ -0,0 +1,361 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.dataflow;
+
+import java.nio.ByteBuffer;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.external.feed.api.IFeedMemoryManager;
+import org.apache.asterix.external.feed.api.IFeedMemoryComponent.Type;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+
+public class FrameDistributor {
+
+    private static final Logger LOGGER = Logger.getLogger(FrameDistributor.class.getName());
+
+    private static final long MEMORY_AVAILABLE_POLL_PERIOD = 1000; // 1 second
+
+    private final FeedId feedId;
+    private final FeedRuntimeType feedRuntimeType;
+    private final int partition;
+    private final IFeedMemoryManager memoryManager;
+    private final boolean enableSynchronousTransfer;
+    /** A map storing the registered frame readers ({@code FeedFrameCollector}. **/
+    private final Map<IFrameWriter, FeedFrameCollector> registeredCollectors;
+    private final FrameTupleAccessor fta;
+
+    private DataBucketPool pool;
+    private DistributionMode distributionMode;
+    private boolean spillToDiskRequired = false;
+
+    public enum DistributionMode {
+        /**
+         * A single feed frame collector is registered for receiving tuples.
+         * Tuple is sent via synchronous call, that is no buffering is involved
+         **/
+        SINGLE,
+
+        /**
+         * Multiple feed frame collectors are concurrently registered for
+         * receiving tuples.
+         **/
+        SHARED,
+
+        /**
+         * Feed tuples are not being processed, irrespective of # of registered
+         * feed frame collectors.
+         **/
+        INACTIVE
+    }
+
+    public FrameDistributor(FeedId feedId, FeedRuntimeType feedRuntimeType, int partition,
+            boolean enableSynchronousTransfer, IFeedMemoryManager memoryManager, FrameTupleAccessor fta)
+                    throws HyracksDataException {
+        this.feedId = feedId;
+        this.feedRuntimeType = feedRuntimeType;
+        this.partition = partition;
+        this.memoryManager = memoryManager;
+        this.enableSynchronousTransfer = enableSynchronousTransfer;
+        this.registeredCollectors = new HashMap<IFrameWriter, FeedFrameCollector>();
+        this.distributionMode = DistributionMode.INACTIVE;
+        this.fta = fta;
+    }
+
+    public void notifyEndOfFeed() {
+        DataBucket bucket = getDataBucket();
+        if (bucket != null) {
+            sendEndOfFeedDataBucket(bucket);
+        } else {
+            while (bucket == null) {
+                try {
+                    Thread.sleep(MEMORY_AVAILABLE_POLL_PERIOD);
+                    bucket = getDataBucket();
+                } catch (InterruptedException e) {
+                    break;
+                }
+            }
+            if (bucket != null) {
+                sendEndOfFeedDataBucket(bucket);
+            }
+        }
+    }
+
+    private void sendEndOfFeedDataBucket(DataBucket bucket) {
+        bucket.setContentType(DataBucket.ContentType.EOD);
+        nextBucket(bucket);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("End of feed data packet sent " + this.feedId);
+        }
+    }
+
+    public synchronized void registerFrameCollector(FeedFrameCollector frameCollector) {
+        DistributionMode currentMode = distributionMode;
+        switch (distributionMode) {
+            case INACTIVE:
+                if (!enableSynchronousTransfer) {
+                    pool = (DataBucketPool) memoryManager.getMemoryComponent(Type.POOL);
+                    frameCollector.start();
+                }
+                registeredCollectors.put(frameCollector.getFrameWriter(), frameCollector);
+                setMode(DistributionMode.SINGLE);
+                break;
+            case SINGLE:
+                pool = (DataBucketPool) memoryManager.getMemoryComponent(Type.POOL);
+                registeredCollectors.put(frameCollector.getFrameWriter(), frameCollector);
+                for (FeedFrameCollector reader : registeredCollectors.values()) {
+                    reader.start();
+                }
+                setMode(DistributionMode.SHARED);
+                break;
+            case SHARED:
+                frameCollector.start();
+                registeredCollectors.put(frameCollector.getFrameWriter(), frameCollector);
+                break;
+        }
+        evaluateIfSpillIsEnabled();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(
+                    "Switching to " + distributionMode + " mode from " + currentMode + " mode " + " Feed id " + feedId);
+        }
+    }
+
+    public synchronized void deregisterFrameCollector(FeedFrameCollector frameCollector) {
+        switch (distributionMode) {
+            case INACTIVE:
+                throw new IllegalStateException(
+                        "Invalid attempt to deregister frame collector in " + distributionMode + " mode.");
+            case SHARED:
+                frameCollector.closeCollector();
+                registeredCollectors.remove(frameCollector.getFrameWriter());
+                int nCollectors = registeredCollectors.size();
+                if (nCollectors == 1) {
+                    FeedFrameCollector loneCollector = registeredCollectors.values().iterator().next();
+                    setMode(DistributionMode.SINGLE);
+                    loneCollector.setState(FeedFrameCollector.State.TRANSITION);
+                    loneCollector.closeCollector();
+                    memoryManager.releaseMemoryComponent(pool);
+                    evaluateIfSpillIsEnabled();
+                } else {
+                    if (!spillToDiskRequired) {
+                        evaluateIfSpillIsEnabled();
+                    }
+                }
+                break;
+            case SINGLE:
+                frameCollector.closeCollector();
+                setMode(DistributionMode.INACTIVE);
+                spillToDiskRequired = false;
+                break;
+
+        }
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Deregistered frame reader" + frameCollector + " from feed distributor for " + feedId);
+        }
+    }
+
+    public void evaluateIfSpillIsEnabled() {
+        if (!spillToDiskRequired) {
+            for (FeedFrameCollector collector : registeredCollectors.values()) {
+                spillToDiskRequired = spillToDiskRequired
+                        || collector.getFeedPolicyAccessor().spillToDiskOnCongestion();
+                if (spillToDiskRequired) {
+                    break;
+                }
+            }
+        }
+    }
+
+    public boolean deregisterFrameCollector(IFrameWriter frameWriter) {
+        FeedFrameCollector collector = registeredCollectors.get(frameWriter);
+        if (collector != null) {
+            deregisterFrameCollector(collector);
+            return true;
+        }
+        return false;
+    }
+
+    public synchronized void setMode(DistributionMode mode) {
+        this.distributionMode = mode;
+    }
+
+    public boolean isRegistered(IFrameWriter writer) {
+        return registeredCollectors.get(writer) != null;
+    }
+
+    public synchronized void nextFrame(ByteBuffer frame) throws HyracksDataException {
+        switch (distributionMode) {
+            case INACTIVE:
+                break;
+            case SINGLE:
+                FeedFrameCollector collector = registeredCollectors.values().iterator().next();
+                switch (collector.getState()) {
+                    case HANDOVER:
+                    case ACTIVE:
+                        if (enableSynchronousTransfer) {
+                            collector.nextFrame(frame); // processing is synchronous
+                        } else {
+                            handleDataBucket(frame);
+                        }
+                        break;
+                    case TRANSITION:
+                        handleDataBucket(frame);
+                        break;
+                    case FINISHED:
+                        if (LOGGER.isLoggable(Level.WARNING)) {
+                            LOGGER.warning("Discarding fetched tuples, feed has ended [" + registeredCollectors.get(0)
+                                    + "]" + " Feed Id " + feedId + " frame distributor " + this.getFeedRuntimeType());
+                        }
+                        registeredCollectors.remove(0);
+                        break;
+                }
+                break;
+            case SHARED:
+                handleDataBucket(frame);
+                break;
+        }
+    }
+
+    private void nextBucket(DataBucket bucket) {
+        for (FeedFrameCollector collector : registeredCollectors.values()) {
+            collector.sendMessage(bucket); // asynchronous call
+        }
+    }
+
+    private void handleDataBucket(ByteBuffer frame) throws HyracksDataException {
+        DataBucket bucket = getDataBucket();
+        if (bucket == null) {
+            handleFrameDuringMemoryCongestion(frame);
+        } else {
+            bucket.reset(frame);
+            bucket.setDesiredReadCount(registeredCollectors.size());
+            nextBucket(bucket);
+        }
+    }
+
+    private void handleFrameDuringMemoryCongestion(ByteBuffer frame) throws HyracksDataException {
+        if (LOGGER.isLoggable(Level.WARNING)) {
+            LOGGER.warning("Unable to allocate memory, will evaluate the need to spill");
+        }
+        // wait till memory is available
+    }
+
+    private DataBucket getDataBucket() {
+        DataBucket bucket = null;
+        if (pool != null) {
+            bucket = pool.getDataBucket();
+            if (bucket != null) {
+                bucket.setDesiredReadCount(registeredCollectors.size());
+                return bucket;
+            } else {
+                return null;
+            }
+        }
+        return null;
+    }
+
+    public DistributionMode getMode() {
+        return distributionMode;
+    }
+
+    public void close() {
+        switch (distributionMode) {
+            case INACTIVE:
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("FrameDistributor is " + distributionMode);
+                }
+                break;
+            case SINGLE:
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Disconnecting single frame reader in " + distributionMode + " mode " + " for  feedId "
+                            + feedId + " " + this.feedRuntimeType);
+                }
+                setMode(DistributionMode.INACTIVE);
+                if (!enableSynchronousTransfer) {
+                    notifyEndOfFeed(); // send EOD Data Bucket
+                    waitForCollectorsToFinish();
+                }
+                registeredCollectors.values().iterator().next().disconnect();
+                break;
+            case SHARED:
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Signalling End Of Feed; currently operating in " + distributionMode + " mode");
+                }
+                notifyEndOfFeed(); // send EOD Data Bucket
+                waitForCollectorsToFinish();
+                break;
+        }
+    }
+
+    private void waitForCollectorsToFinish() {
+        synchronized (registeredCollectors.values()) {
+            while (!allCollectorsFinished()) {
+                try {
+                    registeredCollectors.values().wait();
+                } catch (InterruptedException e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+    }
+
+    private boolean allCollectorsFinished() {
+        boolean allFinished = true;
+        for (FeedFrameCollector collector : registeredCollectors.values()) {
+            allFinished = allFinished && collector.getState().equals(FeedFrameCollector.State.FINISHED);
+        }
+        return allFinished;
+    }
+
+    public Collection<FeedFrameCollector> getRegisteredCollectors() {
+        return registeredCollectors.values();
+    }
+
+    public Map<IFrameWriter, FeedFrameCollector> getRegisteredReaders() {
+        return registeredCollectors;
+    }
+
+    public FeedId getFeedId() {
+        return feedId;
+    }
+
+    public DistributionMode getDistributionMode() {
+        return distributionMode;
+    }
+
+    public FeedRuntimeType getFeedRuntimeType() {
+        return feedRuntimeType;
+    }
+
+    public int getPartition() {
+        return partition;
+    }
+
+    public FrameTupleAccessor getFta() {
+        return fta;
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FrameEventCallback.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FrameEventCallback.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FrameEventCallback.java
new file mode 100644
index 0000000..ba67862
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FrameEventCallback.java
@@ -0,0 +1,103 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.dataflow;
+
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.external.feed.api.IFrameEventCallback;
+import org.apache.asterix.external.feed.api.IFeedRuntime.Mode;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class FrameEventCallback implements IFrameEventCallback {
+
+    private static final Logger LOGGER = Logger.getLogger(FrameEventCallback.class.getName());
+
+    private final FeedPolicyAccessor fpa;
+    private final FeedRuntimeInputHandler inputSideHandler;
+    private IFrameWriter coreOperator;
+
+    public FrameEventCallback(FeedPolicyAccessor fpa, FeedRuntimeInputHandler inputSideHandler,
+            IFrameWriter coreOperator) {
+        this.fpa = fpa;
+        this.inputSideHandler = inputSideHandler;
+        this.coreOperator = coreOperator;
+    }
+
+    @Override
+    public void frameEvent(FrameEvent event) {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Frame Event for " + inputSideHandler.getRuntimeId() + " " + event);
+        }
+        if (!event.equals(FrameEvent.FINISHED_PROCESSING_SPILLAGE)
+                && inputSideHandler.getMode().equals(Mode.PROCESS_SPILL)) {
+            return;
+        }
+        switch (event) {
+            case PENDING_WORK_THRESHOLD_REACHED:
+                if (fpa.spillToDiskOnCongestion()) {
+                    inputSideHandler.setMode(Mode.SPILL);
+                } else if (fpa.discardOnCongestion()) {
+                    inputSideHandler.setMode(Mode.DISCARD);
+                } else if (fpa.throttlingEnabled()) {
+                    inputSideHandler.setThrottlingEnabled(true);
+                } else {
+                    try {
+                        inputSideHandler.reportUnresolvableCongestion();
+                    } catch (HyracksDataException e) {
+                        if (LOGGER.isLoggable(Level.WARNING)) {
+                            LOGGER.warning("Unable to report congestion!!!");
+                        }
+                    }
+                }
+                break;
+            case FINISHED_PROCESSING:
+                inputSideHandler.setFinished(true);
+                synchronized (coreOperator) {
+                    coreOperator.notifyAll();
+                }
+                break;
+            case PENDING_WORK_DONE:
+                switch (inputSideHandler.getMode()) {
+                    case SPILL:
+                    case DISCARD:
+                    case POST_SPILL_DISCARD:
+                        inputSideHandler.setMode(Mode.PROCESS);
+                        break;
+                    default:
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info("Received " + event + " ignoring as operating in " + inputSideHandler.getMode());
+                        }
+                }
+                break;
+            case FINISHED_PROCESSING_SPILLAGE:
+                inputSideHandler.setMode(Mode.PROCESS);
+                break;
+            default:
+                break;
+        }
+    }
+
+    public void setCoreOperator(IFrameWriter coreOperator) {
+        this.coreOperator = coreOperator;
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/StorageFrameHandler.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/StorageFrameHandler.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/StorageFrameHandler.java
new file mode 100644
index 0000000..22dcfac
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/StorageFrameHandler.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.dataflow;
+
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import org.apache.asterix.external.feed.watch.IntakePartitionStatistics;
+import org.apache.asterix.external.util.FeedConstants.StatisticsConstants;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+
+public class StorageFrameHandler {
+
+    private final Map<Integer, Map<Integer, IntakePartitionStatistics>> intakeStatistics;
+    private long avgDelayPersistence;
+
+    public StorageFrameHandler() {
+        intakeStatistics = new HashMap<Integer, Map<Integer, IntakePartitionStatistics>>();
+        avgDelayPersistence = 0L;
+    }
+
+    public synchronized void updateTrackingInformation(ByteBuffer frame, FrameTupleAccessor frameAccessor) {
+        int nTuples = frameAccessor.getTupleCount();
+        long delay = 0;
+        long intakeTimestamp;
+        long currentTime = System.currentTimeMillis();
+        int partition = 0;
+        int recordId = 0;
+        for (int i = 0; i < nTuples; i++) {
+            int recordStart = frameAccessor.getTupleStartOffset(i) + frameAccessor.getFieldSlotsLength();
+            int openPartOffsetOrig = frame.getInt(recordStart + 6);
+            int numOpenFields = frame.getInt(recordStart + openPartOffsetOrig);
+
+            int recordIdOffset = openPartOffsetOrig + 4 + 8 * numOpenFields
+                    + (StatisticsConstants.INTAKE_TUPLEID.length() + 2) + 1;
+            recordId = frame.getInt(recordStart + recordIdOffset);
+
+            int partitionOffset = recordIdOffset + 4 + (StatisticsConstants.INTAKE_PARTITION.length() + 2) + 1;
+            partition = frame.getInt(recordStart + partitionOffset);
+
+            ackRecordId(partition, recordId);
+            int intakeTimestampValueOffset = partitionOffset + 4 + (StatisticsConstants.INTAKE_TIMESTAMP.length() + 2)
+                    + 1;
+            intakeTimestamp = frame.getLong(recordStart + intakeTimestampValueOffset);
+
+            int storeTimestampValueOffset = intakeTimestampValueOffset + 8
+                    + (StatisticsConstants.STORE_TIMESTAMP.length() + 2) + 1;
+            frame.putLong(recordStart + storeTimestampValueOffset, System.currentTimeMillis());
+            delay += currentTime - intakeTimestamp;
+        }
+        avgDelayPersistence = delay / nTuples;
+    }
+
+    private void ackRecordId(int partition, int recordId) {
+        Map<Integer, IntakePartitionStatistics> map = intakeStatistics.get(partition);
+        if (map == null) {
+            map = new HashMap<Integer, IntakePartitionStatistics>();
+            intakeStatistics.put(partition, map);
+        }
+        int base = (int) Math.ceil(recordId * 1.0 / IntakePartitionStatistics.ACK_WINDOW_SIZE);
+        IntakePartitionStatistics intakeStatsForBaseOfPartition = map.get(base);
+        if (intakeStatsForBaseOfPartition == null) {
+            intakeStatsForBaseOfPartition = new IntakePartitionStatistics(partition, base);
+            map.put(base, intakeStatsForBaseOfPartition);
+        }
+        intakeStatsForBaseOfPartition.ackRecordId(recordId);
+    }
+
+    public byte[] getAckData(int partition, int base) {
+        Map<Integer, IntakePartitionStatistics> intakeStats = intakeStatistics.get(partition);
+        if (intakeStats != null) {
+            IntakePartitionStatistics intakePartitionStats = intakeStats.get(base);
+            if (intakePartitionStats != null) {
+                return intakePartitionStats.getAckInfo();
+            }
+        }
+        return null;
+    }
+
+    public synchronized Map<Integer, IntakePartitionStatistics> getBaseAcksForPartition(int partition) {
+        Map<Integer, IntakePartitionStatistics> intakeStatsForPartition = intakeStatistics.get(partition);
+        Map<Integer, IntakePartitionStatistics> clone = new HashMap<Integer, IntakePartitionStatistics>();
+        for (Entry<Integer, IntakePartitionStatistics> entry : intakeStatsForPartition.entrySet()) {
+            clone.put(entry.getKey(), entry.getValue());
+        }
+        return intakeStatsForPartition;
+    }
+
+    public long getAvgDelayPersistence() {
+        return avgDelayPersistence;
+    }
+
+    public void setAvgDelayPersistence(long avgDelayPersistence) {
+        this.avgDelayPersistence = avgDelayPersistence;
+    }
+
+    public Set<Integer> getPartitionsWithStats() {
+        return intakeStatistics.keySet();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedCollectInfo.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedCollectInfo.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedCollectInfo.java
new file mode 100644
index 0000000..9f861d4
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedCollectInfo.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.management;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hyracks.api.job.JobId;
+import org.apache.hyracks.api.job.JobSpecification;
+
+public class FeedCollectInfo extends FeedInfo {
+    public FeedId sourceFeedId;
+    public FeedConnectionId feedConnectionId;
+    public List<String> collectLocations = new ArrayList<String>();
+    public List<String> computeLocations = new ArrayList<String>();
+    public List<String> storageLocations = new ArrayList<String>();
+    public Map<String, String> feedPolicy;
+    public String superFeedManagerHost;
+    public int superFeedManagerPort;
+    public boolean fullyConnected;
+
+    public FeedCollectInfo(FeedId sourceFeedId, FeedConnectionId feedConnectionId, JobSpecification jobSpec,
+            JobId jobId, Map<String, String> feedPolicy) {
+        super(jobSpec, jobId, FeedInfoType.COLLECT);
+        this.sourceFeedId = sourceFeedId;
+        this.feedConnectionId = feedConnectionId;
+        this.feedPolicy = feedPolicy;
+        this.fullyConnected = true;
+    }
+
+    @Override
+    public String toString() {
+        return FeedInfoType.COLLECT + "[" + feedConnectionId + "]";
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedConnectionId.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedConnectionId.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedConnectionId.java
new file mode 100644
index 0000000..1af7153
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedConnectionId.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.management;
+
+import java.io.Serializable;
+
+/**
+ * A unique identifier for a feed connection. A feed connection is an instance of a data feed that is flowing into a
+ * dataset.
+ */
+public class FeedConnectionId implements Serializable {
+
+    private static final long serialVersionUID = 1L;
+
+    private final FeedId feedId;            // Dataverse - Feed
+    private final String datasetName;       // Dataset
+
+    public FeedConnectionId(FeedId feedId, String datasetName) {
+        this.feedId = feedId;
+        this.datasetName = datasetName;
+    }
+
+    public FeedConnectionId(String dataverse, String feedName, String datasetName) {
+        this.feedId = new FeedId(dataverse, feedName);
+        this.datasetName = datasetName;
+    }
+
+    public FeedId getFeedId() {
+        return feedId;
+    }
+
+    public String getDatasetName() {
+        return datasetName;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (o == null || !(o instanceof FeedConnectionId)) {
+            return false;
+        }
+
+        if (this == o || (((FeedConnectionId) o).getFeedId().equals(feedId)
+                && ((FeedConnectionId) o).getDatasetName().equals(datasetName))) {
+            return true;
+        }
+        return false;
+    }
+
+    @Override
+    public int hashCode() {
+        return toString().hashCode();
+    }
+
+    @Override
+    public String toString() {
+        return feedId.toString() + "-->" + datasetName;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedConnectionManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedConnectionManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedConnectionManager.java
new file mode 100644
index 0000000..dd2fc60
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedConnectionManager.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.management;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.external.feed.api.IFeedConnectionManager;
+import org.apache.asterix.external.feed.runtime.FeedRuntime;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+
+/**
+ * An implementation of the IFeedManager interface.
+ * Provider necessary central repository for registering/retrieving
+ * artifacts/services associated with a feed.
+ */
+public class FeedConnectionManager implements IFeedConnectionManager {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedConnectionManager.class.getName());
+
+    private Map<FeedConnectionId, FeedRuntimeManager> feedRuntimeManagers = new HashMap<FeedConnectionId, FeedRuntimeManager>();
+    private final String nodeId;
+
+    public FeedConnectionManager(String nodeId) {
+        this.nodeId = nodeId;
+    }
+
+    public FeedRuntimeManager getFeedRuntimeManager(FeedConnectionId feedId) {
+        return feedRuntimeManagers.get(feedId);
+    }
+
+    @Override
+    public void deregisterFeed(FeedConnectionId feedId) {
+        try {
+            FeedRuntimeManager mgr = feedRuntimeManagers.get(feedId);
+            if (mgr != null) {
+                mgr.close();
+                feedRuntimeManagers.remove(feedId);
+            }
+        } catch (Exception e) {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Exception in closing feed runtime" + e.getMessage());
+            }
+        }
+
+    }
+
+    @Override
+    public synchronized void registerFeedRuntime(FeedConnectionId connectionId, FeedRuntime feedRuntime)
+            throws Exception {
+        FeedRuntimeManager runtimeMgr = feedRuntimeManagers.get(connectionId);
+        if (runtimeMgr == null) {
+            runtimeMgr = new FeedRuntimeManager(connectionId, this);
+            feedRuntimeManagers.put(connectionId, runtimeMgr);
+        }
+        runtimeMgr.registerFeedRuntime(feedRuntime.getRuntimeId(), feedRuntime);
+    }
+
+    @Override
+    public void deRegisterFeedRuntime(FeedConnectionId connectionId, FeedRuntimeId feedRuntimeId) {
+        FeedRuntimeManager runtimeMgr = feedRuntimeManagers.get(connectionId);
+        if (runtimeMgr != null) {
+            runtimeMgr.deregisterFeedRuntime(feedRuntimeId);
+        }
+    }
+
+    @Override
+    public FeedRuntime getFeedRuntime(FeedConnectionId connectionId, FeedRuntimeId feedRuntimeId) {
+        FeedRuntimeManager runtimeMgr = feedRuntimeManagers.get(connectionId);
+        return runtimeMgr != null ? runtimeMgr.getFeedRuntime(feedRuntimeId) : null;
+    }
+
+    @Override
+    public String toString() {
+        return "FeedManager " + "[" + nodeId + "]";
+    }
+
+    @Override
+    public List<FeedRuntimeId> getRegisteredRuntimes() {
+        List<FeedRuntimeId> runtimes = new ArrayList<FeedRuntimeId>();
+        for (Entry<FeedConnectionId, FeedRuntimeManager> entry : feedRuntimeManagers.entrySet()) {
+            runtimes.addAll(entry.getValue().getFeedRuntimes());
+        }
+        return runtimes;
+    }
+}



[05/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
index d748ef8..8f4e763 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
@@ -39,7 +39,7 @@ import org.apache.asterix.metadata.entities.DatasourceAdapter;
 import org.apache.asterix.metadata.entities.Datatype;
 import org.apache.asterix.metadata.entities.Dataverse;
 import org.apache.asterix.metadata.entities.Feed;
-import org.apache.asterix.metadata.entities.FeedPolicy;
+import org.apache.asterix.metadata.entities.FeedPolicyEntity;
 import org.apache.asterix.metadata.entities.Function;
 import org.apache.asterix.metadata.entities.Index;
 import org.apache.asterix.metadata.entities.Library;
@@ -651,7 +651,8 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public void addFeedPolicy(MetadataTransactionContext mdTxnCtx, FeedPolicy feedPolicy) throws MetadataException {
+    public void addFeedPolicy(MetadataTransactionContext mdTxnCtx, FeedPolicyEntity feedPolicy)
+            throws MetadataException {
         try {
             metadataNode.addFeedPolicy(mdTxnCtx.getJobId(), feedPolicy);
         } catch (RemoteException e) {
@@ -786,10 +787,10 @@ public class MetadataManager implements IMetadataManager {
     }
 
     @Override
-    public FeedPolicy getFeedPolicy(MetadataTransactionContext ctx, String dataverse, String policyName)
+    public FeedPolicyEntity getFeedPolicy(MetadataTransactionContext ctx, String dataverse, String policyName)
             throws MetadataException {
 
-        FeedPolicy FeedPolicy = null;
+        FeedPolicyEntity FeedPolicy = null;
         try {
             FeedPolicy = metadataNode.getFeedPolicy(ctx.getJobId(), dataverse, policyName);
         } catch (RemoteException e) {
@@ -844,7 +845,7 @@ public class MetadataManager implements IMetadataManager {
 
     public void dropFeedPolicy(MetadataTransactionContext mdTxnCtx, String dataverseName, String policyName)
             throws MetadataException {
-        FeedPolicy feedPolicy = null;
+        FeedPolicyEntity feedPolicy = null;
         try {
             feedPolicy = metadataNode.getFeedPolicy(mdTxnCtx.getJobId(), dataverseName, policyName);
             metadataNode.dropFeedPolicy(mdTxnCtx.getJobId(), dataverseName, policyName);
@@ -854,9 +855,9 @@ public class MetadataManager implements IMetadataManager {
         mdTxnCtx.dropFeedPolicy(feedPolicy);
     }
 
-    public List<FeedPolicy> getDataversePolicies(MetadataTransactionContext mdTxnCtx, String dataverse)
+    public List<FeedPolicyEntity> getDataversePolicies(MetadataTransactionContext mdTxnCtx, String dataverse)
             throws MetadataException {
-        List<FeedPolicy> dataverseFeedPolicies;
+        List<FeedPolicyEntity> dataverseFeedPolicies;
         try {
             dataverseFeedPolicies = metadataNode.getDataversePolicies(mdTxnCtx.getJobId(), dataverse);
         } catch (RemoteException e) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
index ce272fa..ed586aa 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
@@ -50,7 +50,7 @@ import org.apache.asterix.metadata.entities.DatasourceAdapter;
 import org.apache.asterix.metadata.entities.Datatype;
 import org.apache.asterix.metadata.entities.Dataverse;
 import org.apache.asterix.metadata.entities.Feed;
-import org.apache.asterix.metadata.entities.FeedPolicy;
+import org.apache.asterix.metadata.entities.FeedPolicyEntity;
 import org.apache.asterix.metadata.entities.Function;
 import org.apache.asterix.metadata.entities.Index;
 import org.apache.asterix.metadata.entities.InternalDatasetDetails;
@@ -364,10 +364,12 @@ public class MetadataNode implements IMetadataNode {
                 dropFeed(jobId, dataverseName, feed.getFeedName());
             }
 
-            List<FeedPolicy> feedPolicies = getDataversePolicies(jobId, dataverseName);
-            // Drop all feed ingestion policies in this dataverse.
-            for (FeedPolicy feedPolicy : feedPolicies) {
-                dropFeedPolicy(jobId, dataverseName, feedPolicy.getPolicyName());
+            List<FeedPolicyEntity> feedPolicies = getDataversePolicies(jobId, dataverseName);
+            if (feedPolicies != null && feedPolicies.size() > 0) {
+                // Drop all feed ingestion policies in this dataverse.
+                for (FeedPolicyEntity feedPolicy : feedPolicies) {
+                    dropFeedPolicy(jobId, dataverseName, feedPolicy.getPolicyName());
+                }
             }
 
             // Delete the dataverse entry from the 'dataverse' dataset.
@@ -1315,7 +1317,7 @@ public class MetadataNode implements IMetadataNode {
     }
 
     @Override
-    public void addFeedPolicy(JobId jobId, FeedPolicy feedPolicy) throws MetadataException, RemoteException {
+    public void addFeedPolicy(JobId jobId, FeedPolicyEntity feedPolicy) throws MetadataException, RemoteException {
         try {
             // Insert into the 'FeedPolicy' dataset.
             FeedPolicyTupleTranslator tupleReaderWriter = new FeedPolicyTupleTranslator(true);
@@ -1332,14 +1334,14 @@ public class MetadataNode implements IMetadataNode {
     }
 
     @Override
-    public FeedPolicy getFeedPolicy(JobId jobId, String dataverse, String policyName)
+    public FeedPolicyEntity getFeedPolicy(JobId jobId, String dataverse, String policyName)
             throws MetadataException, RemoteException {
 
         try {
             ITupleReference searchKey = createTuple(dataverse, policyName);
             FeedPolicyTupleTranslator tupleReaderWriter = new FeedPolicyTupleTranslator(false);
-            List<FeedPolicy> results = new ArrayList<FeedPolicy>();
-            IValueExtractor<FeedPolicy> valueExtractor = new MetadataEntityValueExtractor<FeedPolicy>(
+            List<FeedPolicyEntity> results = new ArrayList<FeedPolicyEntity>();
+            IValueExtractor<FeedPolicyEntity> valueExtractor = new MetadataEntityValueExtractor<FeedPolicyEntity>(
                     tupleReaderWriter);
             searchIndex(jobId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, searchKey, valueExtractor, results);
             if (!results.isEmpty()) {
@@ -1418,14 +1420,14 @@ public class MetadataNode implements IMetadataNode {
     }
 
     @Override
-    public List<FeedPolicy> getDataversePolicies(JobId jobId, String dataverse)
+    public List<FeedPolicyEntity> getDataversePolicies(JobId jobId, String dataverse)
             throws MetadataException, RemoteException {
         try {
             ITupleReference searchKey = createTuple(dataverse);
             FeedPolicyTupleTranslator tupleReaderWriter = new FeedPolicyTupleTranslator(false);
-            IValueExtractor<FeedPolicy> valueExtractor = new MetadataEntityValueExtractor<FeedPolicy>(
+            IValueExtractor<FeedPolicyEntity> valueExtractor = new MetadataEntityValueExtractor<FeedPolicyEntity>(
                     tupleReaderWriter);
-            List<FeedPolicy> results = new ArrayList<FeedPolicy>();
+            List<FeedPolicyEntity> results = new ArrayList<FeedPolicyEntity>();
             searchIndex(jobId, MetadataPrimaryIndexes.FEED_POLICY_DATASET, searchKey, valueExtractor, results);
             return results;
         } catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java
index b5a689d..3d07a00 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataTransactionContext.java
@@ -23,6 +23,9 @@ import java.util.ArrayList;
 
 import org.apache.asterix.common.functions.FunctionSignature;
 import org.apache.asterix.common.transactions.JobId;
+import org.apache.asterix.external.dataset.adapter.AdapterIdentifier;
+import org.apache.asterix.external.feed.api.IFeed;
+import org.apache.asterix.external.feed.api.IFeed.FeedType;
 import org.apache.asterix.metadata.api.IMetadataEntity;
 import org.apache.asterix.metadata.entities.CompactionPolicy;
 import org.apache.asterix.metadata.entities.Dataset;
@@ -30,15 +33,11 @@ import org.apache.asterix.metadata.entities.DatasourceAdapter;
 import org.apache.asterix.metadata.entities.Datatype;
 import org.apache.asterix.metadata.entities.Dataverse;
 import org.apache.asterix.metadata.entities.Feed;
-import org.apache.asterix.metadata.entities.Feed.FeedType;
-import org.apache.asterix.metadata.entities.FeedPolicy;
+import org.apache.asterix.metadata.entities.FeedPolicyEntity;
 import org.apache.asterix.metadata.entities.Function;
 import org.apache.asterix.metadata.entities.Index;
 import org.apache.asterix.metadata.entities.Library;
 import org.apache.asterix.metadata.entities.NodeGroup;
-import org.apache.asterix.metadata.entities.PrimaryFeed;
-import org.apache.asterix.metadata.entities.SecondaryFeed;
-import org.apache.asterix.metadata.feeds.AdapterIdentifier;
 
 /**
  * Used to implement serializable transactions against the MetadataCache.
@@ -223,7 +222,7 @@ public class MetadataTransactionContext extends MetadataCache {
         return opLog;
     }
 
-    public void addFeedPolicy(FeedPolicy feedPolicy) {
+    public void addFeedPolicy(FeedPolicyEntity feedPolicy) {
         droppedCache.dropFeedPolicy(feedPolicy);
         logAndApply(new MetadataLogicalOperation(feedPolicy, true));
 
@@ -235,16 +234,10 @@ public class MetadataTransactionContext extends MetadataCache {
 
     }
 
-    public void dropFeed(String dataverseName, String feedName, FeedType feedType) {
+    public void dropFeed(String dataverseName, String feedName, IFeed.FeedType feedType) {
         Feed feed = null;
-        switch (feedType) {
-            case PRIMARY:
-                feed = new PrimaryFeed(dataverseName, feedName, null, null, null);
-                break;
-            case SECONDARY:
-                feed = new SecondaryFeed(dataverseName, feedName, null, null);
-                break;
-        }
+        feed = new Feed(dataverseName, feedName, null, feedType, (feedType == FeedType.PRIMARY) ? feedName : null, null,
+                null);
         droppedCache.addFeedIfNotExists(feed);
         logAndApply(new MetadataLogicalOperation(feed, false));
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataEntity.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataEntity.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataEntity.java
index bc19f72..2fc846b 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataEntity.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataEntity.java
@@ -23,13 +23,13 @@ import java.io.Serializable;
 
 import org.apache.asterix.metadata.MetadataCache;
 
-public interface IMetadataEntity extends Serializable {
+public interface IMetadataEntity<T> extends Serializable {
 
     public static final int PENDING_NO_OP = 0;
     public static final int PENDING_ADD_OP = 1;
     public static final int PENDING_DROP_OP = 2;
 
-    Object addToCache(MetadataCache cache);
+    public T addToCache(MetadataCache cache);
 
-    Object dropFromCache(MetadataCache cache);
+    public T dropFromCache(MetadataCache cache);
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataManager.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataManager.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataManager.java
index 9d85fb5..242bbe3 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataManager.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataManager.java
@@ -33,7 +33,7 @@ import org.apache.asterix.metadata.entities.DatasourceAdapter;
 import org.apache.asterix.metadata.entities.Datatype;
 import org.apache.asterix.metadata.entities.Dataverse;
 import org.apache.asterix.metadata.entities.Feed;
-import org.apache.asterix.metadata.entities.FeedPolicy;
+import org.apache.asterix.metadata.entities.FeedPolicyEntity;
 import org.apache.asterix.metadata.entities.Function;
 import org.apache.asterix.metadata.entities.Index;
 import org.apache.asterix.metadata.entities.Library;
@@ -56,7 +56,6 @@ public interface IMetadataManager {
 
     /**
      * Initializes the metadata manager, e.g., finds the remote metadata node.
-     * 
      * @throws RemoteException
      *             If an error occurred while contacting the proxy for finding
      *             the metadata node.
@@ -65,7 +64,6 @@ public interface IMetadataManager {
 
     /**
      * Begins a transaction on the metadata node.
-     * 
      * @return A globally unique transaction id.
      * @throws ACIDException
      * @throws RemoteException
@@ -74,7 +72,6 @@ public interface IMetadataManager {
 
     /**
      * Commits a remote transaction on the metadata node.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @throws ACIDException
@@ -84,7 +81,6 @@ public interface IMetadataManager {
 
     /**
      * Aborts a remote transaction running on the metadata node.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @throws ACIDException
@@ -95,7 +91,6 @@ public interface IMetadataManager {
     /**
      * Locks the metadata in given mode. The lock acquisition is delegated to
      * the metadata node. This method blocks until the lock can be acquired.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param lockMode
@@ -107,7 +102,6 @@ public interface IMetadataManager {
 
     /**
      * Releases all locks on the metadata held by the given transaction id.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @throws ACIDException
@@ -117,7 +111,6 @@ public interface IMetadataManager {
 
     /**
      * Inserts a new dataverse into the metadata.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param dataverse
@@ -129,7 +122,6 @@ public interface IMetadataManager {
 
     /**
      * Retrieves all dataverses
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @return A list of dataverse instances.
@@ -139,7 +131,6 @@ public interface IMetadataManager {
 
     /**
      * Retrieves a dataverse with given name.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param dataverseName
@@ -152,7 +143,6 @@ public interface IMetadataManager {
 
     /**
      * Retrieves all datasets belonging to the given dataverse.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param dataverseName
@@ -167,7 +157,6 @@ public interface IMetadataManager {
     /**
      * Deletes the dataverse with given name, and all it's associated datasets,
      * indexes, and types.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @return A list of dataset instances.
@@ -178,7 +167,6 @@ public interface IMetadataManager {
 
     /**
      * Inserts a new dataset into the metadata.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param dataset
@@ -190,7 +178,6 @@ public interface IMetadataManager {
 
     /**
      * Retrieves a dataset within a given dataverse.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param dataverseName
@@ -206,7 +193,6 @@ public interface IMetadataManager {
 
     /**
      * Retrieves all indexes of a dataset.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param dataverseName
@@ -222,7 +208,6 @@ public interface IMetadataManager {
 
     /**
      * Deletes the dataset with given name, and all it's associated indexes.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param dataverseName
@@ -238,7 +223,6 @@ public interface IMetadataManager {
     /**
      * Inserts an index into the metadata. The index itself knows its name, and
      * which dataset it belongs to.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param index
@@ -250,7 +234,6 @@ public interface IMetadataManager {
 
     /**
      * Retrieves the index with given name, in given dataverse and dataset.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param dataverseName
@@ -267,7 +250,6 @@ public interface IMetadataManager {
 
     /**
      * Deletes the index with given name, in given dataverse and dataset.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param dataverseName
@@ -283,7 +265,6 @@ public interface IMetadataManager {
 
     /**
      * Inserts a datatype.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param datatype
@@ -295,7 +276,6 @@ public interface IMetadataManager {
 
     /**
      * Retrieves the datatype with given name in given dataverse.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param dataverseName
@@ -311,7 +291,6 @@ public interface IMetadataManager {
 
     /**
      * Deletes the given datatype in given dataverse.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param dataverseName
@@ -327,7 +306,6 @@ public interface IMetadataManager {
 
     /**
      * Inserts a node group.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param nodeGroup
@@ -339,7 +317,6 @@ public interface IMetadataManager {
 
     /**
      * Retrieves a node group.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param nodeGroupName
@@ -351,7 +328,6 @@ public interface IMetadataManager {
 
     /**
      * Deletes a node group.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param nodeGroupName
@@ -364,7 +340,6 @@ public interface IMetadataManager {
 
     /**
      * Inserts a node (machine).
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param node
@@ -490,13 +465,12 @@ public interface IMetadataManager {
      */
     public void dropFeed(MetadataTransactionContext ctx, String dataverse, String feedName) throws MetadataException;
 
-   
-   /**
+    /**
      * @param ctx
      * @param policy
      * @throws MetadataException
      */
-    public void addFeedPolicy(MetadataTransactionContext ctx, FeedPolicy policy) throws MetadataException;
+    public void addFeedPolicy(MetadataTransactionContext ctx, FeedPolicyEntity policy) throws MetadataException;
 
     /**
      * @param ctx
@@ -505,10 +479,9 @@ public interface IMetadataManager {
      * @return
      * @throws MetadataException
      */
-    public FeedPolicy getFeedPolicy(MetadataTransactionContext ctx, String dataverse, String policyName)
+    public FeedPolicyEntity getFeedPolicy(MetadataTransactionContext ctx, String dataverse, String policyName)
             throws MetadataException;
 
-   
     public void initializeDatasetIdFactory(MetadataTransactionContext ctx) throws MetadataException;
 
     public int getMostRecentDatasetId() throws MetadataException;
@@ -524,7 +497,6 @@ public interface IMetadataManager {
     /**
      * Removes a library , acquiring local locks on behalf of the given
      * transaction id.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param dataverseName
@@ -540,7 +512,6 @@ public interface IMetadataManager {
     /**
      * Adds a library, acquiring local locks on behalf of the given
      * transaction id.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param library
@@ -567,7 +538,6 @@ public interface IMetadataManager {
 
     /**
      * Retireve libraries installed in a given dataverse.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param dataverseName
@@ -624,7 +594,6 @@ public interface IMetadataManager {
 
     /**
      * Get en external file
-     * 
      * @param mdTxnCtx
      * @param dataverseName
      * @param datasetName
@@ -637,7 +606,6 @@ public interface IMetadataManager {
 
     /**
      * update an existing dataset in metadata.
-     * 
      * @param ctx
      *            MetadataTransactionContext of an active metadata transaction.
      * @param dataset
@@ -649,7 +617,6 @@ public interface IMetadataManager {
 
     /**
      * Clean up temporary datasets that have not been active for a long time.
-     * 
      * @throws MetadataException
      */
     public void cleanupTempDatasets() throws MetadataException;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataNode.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataNode.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataNode.java
index 59a8f76..0d8818d 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataNode.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataNode.java
@@ -35,7 +35,7 @@ import org.apache.asterix.metadata.entities.DatasourceAdapter;
 import org.apache.asterix.metadata.entities.Datatype;
 import org.apache.asterix.metadata.entities.Dataverse;
 import org.apache.asterix.metadata.entities.Feed;
-import org.apache.asterix.metadata.entities.FeedPolicy;
+import org.apache.asterix.metadata.entities.FeedPolicyEntity;
 import org.apache.asterix.metadata.entities.Function;
 import org.apache.asterix.metadata.entities.Index;
 import org.apache.asterix.metadata.entities.Library;
@@ -557,7 +557,7 @@ public interface IMetadataNode extends Remote, Serializable {
      * @throws MetadataException
      * @throws RemoteException
      */
-    public void addFeedPolicy(JobId jobId, FeedPolicy feedPolicy) throws MetadataException, RemoteException;
+    public void addFeedPolicy(JobId jobId, FeedPolicyEntity feedPolicy) throws MetadataException, RemoteException;
 
     /**
      * @param jobId
@@ -567,7 +567,7 @@ public interface IMetadataNode extends Remote, Serializable {
      * @throws MetadataException
      * @throws RemoteException
      */
-    public FeedPolicy getFeedPolicy(JobId jobId, String dataverse, String policy) throws MetadataException,
+    public FeedPolicyEntity getFeedPolicy(JobId jobId, String dataverse, String policy) throws MetadataException,
             RemoteException;
 
    
@@ -658,7 +658,7 @@ public interface IMetadataNode extends Remote, Serializable {
      * @throws MetadataException
      * @throws RemoteException
      */
-    public List<FeedPolicy> getDataversePolicies(JobId jobId, String dataverse) throws MetadataException,
+    public List<FeedPolicyEntity> getDataversePolicies(JobId jobId, String dataverse) throws MetadataException,
             RemoteException;
 
     /**

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
index ab0b79d..aa7f7d5 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
@@ -16,7 +16,6 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-
 package org.apache.asterix.metadata.bootstrap;
 
 import java.io.File;
@@ -37,14 +36,16 @@ import org.apache.asterix.common.config.AsterixMetadataProperties;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.config.GlobalConfig;
 import org.apache.asterix.common.config.IAsterixPropertiesProvider;
+import org.apache.asterix.common.config.MetadataConstants;
 import org.apache.asterix.common.context.BaseOperationTracker;
 import org.apache.asterix.common.context.CorrelatedPrefixMergePolicyFactory;
 import org.apache.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
+import org.apache.asterix.common.utils.StoragePathUtil;
 import org.apache.asterix.external.adapter.factory.GenericAdapterFactory;
 import org.apache.asterix.external.api.IAdapterFactory;
+import org.apache.asterix.external.api.IDataSourceAdapter;
+import org.apache.asterix.external.dataset.adapter.AdapterIdentifier;
 import org.apache.asterix.external.indexing.ExternalFile;
-import org.apache.asterix.external.runtime.GenericSocketFeedAdapterFactory;
-import org.apache.asterix.external.runtime.SocketClientAdapterFactory;
 import org.apache.asterix.metadata.IDatasetDetails;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.MetadataManager;
@@ -57,16 +58,14 @@ import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.entities.DatasourceAdapter;
 import org.apache.asterix.metadata.entities.Datatype;
 import org.apache.asterix.metadata.entities.Dataverse;
-import org.apache.asterix.metadata.entities.FeedPolicy;
+import org.apache.asterix.metadata.entities.FeedPolicyEntity;
 import org.apache.asterix.metadata.entities.Index;
 import org.apache.asterix.metadata.entities.InternalDatasetDetails;
 import org.apache.asterix.metadata.entities.InternalDatasetDetails.FileStructure;
 import org.apache.asterix.metadata.entities.InternalDatasetDetails.PartitioningStrategy;
 import org.apache.asterix.metadata.entities.Node;
 import org.apache.asterix.metadata.entities.NodeGroup;
-import org.apache.asterix.metadata.feeds.AdapterIdentifier;
 import org.apache.asterix.metadata.feeds.BuiltinFeedPolicies;
-import org.apache.asterix.metadata.utils.SplitsAndConstraintsUtil;
 import org.apache.asterix.om.types.BuiltinType;
 import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.om.util.AsterixClusterProperties;
@@ -105,7 +104,7 @@ import org.apache.hyracks.storage.common.file.LocalResource;
  */
 public class MetadataBootstrap {
     private static final Logger LOGGER = Logger.getLogger(MetadataBootstrap.class.getName());
-    public static final boolean IS_DEBUG_MODE = false;//true
+    public static final boolean IS_DEBUG_MODE = false;// true
 
     private static IAsterixAppRuntimeContext runtimeContext;
 
@@ -193,7 +192,7 @@ public class MetadataBootstrap {
                 }
             }
 
-            //#. initialize datasetIdFactory
+            // #. initialize datasetIdFactory
             MetadataManager.INSTANCE.initializeDatasetIdFactory(mdTxnCtx);
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
         } catch (Exception e) {
@@ -204,8 +203,8 @@ public class MetadataBootstrap {
                 MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
             } catch (Exception e2) {
                 e.addSuppressed(e2);
-                //TODO
-                //change the exception type to AbortFailureException
+                // TODO
+                // change the exception type to AbortFailureException
                 throw new MetadataException(e);
             }
             throw e;
@@ -278,8 +277,8 @@ public class MetadataBootstrap {
             // Map.Entry<String, String[]> me = (Map.Entry<String,
             // String[]>)im.next();
             MetadataManager.INSTANCE.addNode(mdTxnCtx, new Node(iter.next(), 0, 0/*
-                                                                                 * , me . getValue ( )
-                                                                                 */));
+                                                                                  * , me . getValue ( )
+                                                                                  */));
         }
     }
 
@@ -298,8 +297,7 @@ public class MetadataBootstrap {
     }
 
     private static void insertInitialAdapters(MetadataTransactionContext mdTxnCtx) throws Exception {
-        String[] builtInAdapterClassNames = new String[] { GenericAdapterFactory.class.getName(),
-                GenericSocketFeedAdapterFactory.class.getName(), SocketClientAdapterFactory.class.getName() };
+        String[] builtInAdapterClassNames = new String[] { GenericAdapterFactory.class.getName() };
         DatasourceAdapter adapter;
         for (String adapterClassName : builtInAdapterClassNames) {
             adapter = getAdapter(adapterClassName);
@@ -311,7 +309,7 @@ public class MetadataBootstrap {
     }
 
     private static void insertInitialFeedPolicies(MetadataTransactionContext mdTxnCtx) throws Exception {
-        for (FeedPolicy feedPolicy : BuiltinFeedPolicies.policies) {
+        for (FeedPolicyEntity feedPolicy : BuiltinFeedPolicies.policies) {
             MetadataManager.INSTANCE.addFeedPolicy(mdTxnCtx, feedPolicy);
         }
         if (LOGGER.isLoggable(Level.INFO)) {
@@ -333,7 +331,7 @@ public class MetadataBootstrap {
     private static DatasourceAdapter getAdapter(String adapterFactoryClassName) throws Exception {
         String adapterName = ((IAdapterFactory) (Class.forName(adapterFactoryClassName).newInstance())).getAlias();
         return new DatasourceAdapter(new AdapterIdentifier(MetadataConstants.METADATA_DATAVERSE_NAME, adapterName),
-                adapterFactoryClassName, DatasourceAdapter.AdapterType.INTERNAL);
+                adapterFactoryClassName, IDataSourceAdapter.AdapterType.INTERNAL);
     }
 
     private static CompactionPolicy getCompactionPolicyEntity(String compactionPolicyClassName) throws Exception {
@@ -346,7 +344,7 @@ public class MetadataBootstrap {
             throws Exception {
         ClusterPartition metadataPartition = propertiesProvider.getMetadataProperties().getMetadataPartition();
         int metadataDeviceId = metadataPartition.getIODeviceNum();
-        String metadataPartitionPath = SplitsAndConstraintsUtil.prepareStoragePartitionPath(
+        String metadataPartitionPath = StoragePathUtil.prepareStoragePartitionPath(
                 AsterixClusterProperties.INSTANCE.getStorageDirectoryName(), metadataPartition.getPartitionId());
         String resourceName = metadataPartitionPath + File.separator + index.getFileNameRelativePath();
         FileReference file = ioManager.getAbsoluteFileRef(metadataDeviceId, resourceName);
@@ -413,8 +411,8 @@ public class MetadataBootstrap {
     }
 
     public static void startDDLRecovery() throws MetadataException {
-        //#. clean up any record which has pendingAdd/DelOp flag 
-        //   as traversing all records from DATAVERSE_DATASET to DATASET_DATASET, and then to INDEX_DATASET.
+        // #. clean up any record which has pendingAdd/DelOp flag
+        // as traversing all records from DATAVERSE_DATASET to DATASET_DATASET, and then to INDEX_DATASET.
         String dataverseName = null;
         String datasetName = null;
         String indexName = null;
@@ -433,7 +431,7 @@ public class MetadataBootstrap {
             for (Dataverse dataverse : dataverses) {
                 dataverseName = dataverse.getDataverseName();
                 if (dataverse.getPendingOp() != IMetadataEntity.PENDING_NO_OP) {
-                    //drop pending dataverse
+                    // drop pending dataverse
                     MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
                     if (LOGGER.isLoggable(Level.INFO)) {
                         LOGGER.info("Dropped a pending dataverse: " + dataverseName);
@@ -443,7 +441,7 @@ public class MetadataBootstrap {
                     for (Dataset dataset : datasets) {
                         datasetName = dataset.getDatasetName();
                         if (dataset.getPendingOp() != IMetadataEntity.PENDING_NO_OP) {
-                            //drop pending dataset
+                            // drop pending dataset
                             MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
                             if (LOGGER.isLoggable(Level.INFO)) {
                                 LOGGER.info("Dropped a pending dataset: " + dataverseName + "." + datasetName);
@@ -454,7 +452,7 @@ public class MetadataBootstrap {
                             for (Index index : indexes) {
                                 indexName = index.getIndexName();
                                 if (index.getPendingOp() != IMetadataEntity.PENDING_NO_OP) {
-                                    //drop pending index
+                                    // drop pending index
                                     MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
                                     if (LOGGER.isLoggable(Level.INFO)) {
                                         LOGGER.info("Dropped a pending index: " + dataverseName + "." + datasetName

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataConstants.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataConstants.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataConstants.java
deleted file mode 100644
index a68ea81..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataConstants.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.asterix.metadata.bootstrap;
-
-public class MetadataConstants {
-
-    // Name of the dataverse the metadata lives in.
-    public final static String METADATA_DATAVERSE_NAME = "Metadata";
-
-    // Name of the node group where metadata is stored on.
-    public final static String METADATA_NODEGROUP_NAME = "MetadataGroup";
-
-    // Name of the default nodegroup where internal/feed datasets will be partitioned
-    // if an explicit nodegroup is not specified at the time of creation of a dataset
-    public static final String METADATA_DEFAULT_NODEGROUP_NAME = "DEFAULT_NG_ALL_NODES";
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataIndex.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataIndex.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataIndex.java
index 9ef9f84..2d5e663 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataIndex.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataIndex.java
@@ -24,6 +24,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
+import org.apache.asterix.common.config.MetadataConstants;
 import org.apache.asterix.common.exceptions.AsterixRuntimeException;
 import org.apache.asterix.common.transactions.DatasetId;
 import org.apache.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataIndexImmutableProperties.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataIndexImmutableProperties.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataIndexImmutableProperties.java
index 129bc0c..91d25b3 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataIndexImmutableProperties.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataIndexImmutableProperties.java
@@ -18,6 +18,8 @@
  */
 package org.apache.asterix.metadata.bootstrap;
 
+import org.apache.asterix.common.config.MetadataConstants;
+
 public enum MetadataIndexImmutableProperties {
     METADATA(MetadataConstants.METADATA_DATAVERSE_NAME, 0, 0),
     DATAVERSE("Dataverse", 1, 1),

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
index ce22177..cff5c6b 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
@@ -16,7 +16,6 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-
 package org.apache.asterix.metadata.declared;
 
 import java.io.File;
@@ -34,6 +33,7 @@ import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
 import org.apache.asterix.common.config.DatasetConfig.IndexType;
 import org.apache.asterix.common.config.GlobalConfig;
+import org.apache.asterix.common.config.MetadataConstants;
 import org.apache.asterix.common.context.AsterixVirtualBufferCacheProvider;
 import org.apache.asterix.common.context.ITransactionSubsystemProvider;
 import org.apache.asterix.common.context.TransactionSubsystemProvider;
@@ -41,27 +41,32 @@ import org.apache.asterix.common.dataflow.AsterixLSMInvertedIndexInsertDeleteOpe
 import org.apache.asterix.common.dataflow.AsterixLSMTreeInsertDeleteOperatorDescriptor;
 import org.apache.asterix.common.dataflow.IAsterixApplicationContextInfo;
 import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.feeds.FeedActivity;
-import org.apache.asterix.common.feeds.FeedActivity.FeedActivityDetails;
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedConstants;
-import org.apache.asterix.common.feeds.FeedPolicyAccessor;
-import org.apache.asterix.common.feeds.api.ICentralFeedManager;
 import org.apache.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
 import org.apache.asterix.common.ioopcallbacks.LSMBTreeWithBuddyIOOperationCallbackFactory;
 import org.apache.asterix.common.ioopcallbacks.LSMInvertedIndexIOOperationCallbackFactory;
 import org.apache.asterix.common.ioopcallbacks.LSMRTreeIOOperationCallbackFactory;
 import org.apache.asterix.common.transactions.IRecoveryManager.ResourceType;
 import org.apache.asterix.common.transactions.JobId;
+import org.apache.asterix.common.utils.StoragePathUtil;
 import org.apache.asterix.dataflow.data.nontagged.valueproviders.AqlPrimitiveValueProviderFactory;
 import org.apache.asterix.external.adapter.factory.LookupAdapterFactory;
 import org.apache.asterix.external.api.IAdapterFactory;
+import org.apache.asterix.external.api.IDataSourceAdapter;
+import org.apache.asterix.external.feed.api.ICentralFeedManager;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.external.feed.watch.FeedActivity;
+import org.apache.asterix.external.feed.watch.FeedActivity.FeedActivityDetails;
 import org.apache.asterix.external.indexing.ExternalFile;
 import org.apache.asterix.external.indexing.IndexingConstants;
 import org.apache.asterix.external.operators.ExternalBTreeSearchOperatorDescriptor;
+import org.apache.asterix.external.operators.ExternalDataScanOperatorDescriptor;
 import org.apache.asterix.external.operators.ExternalLookupOperatorDescriptor;
 import org.apache.asterix.external.operators.ExternalRTreeSearchOperatorDescriptor;
+import org.apache.asterix.external.operators.FeedCollectOperatorDescriptor;
+import org.apache.asterix.external.operators.FeedIntakeOperatorDescriptor;
 import org.apache.asterix.external.provider.AdapterFactoryProvider;
+import org.apache.asterix.external.util.FeedConstants;
 import org.apache.asterix.formats.base.IDataFormat;
 import org.apache.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
 import org.apache.asterix.formats.nontagged.AqlLinearizeComparatorFactoryProvider;
@@ -69,25 +74,19 @@ import org.apache.asterix.formats.nontagged.AqlTypeTraitProvider;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
-import org.apache.asterix.metadata.bootstrap.MetadataConstants;
 import org.apache.asterix.metadata.dataset.hints.DatasetHints.DatasetCardinalityHint;
 import org.apache.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.entities.DatasourceAdapter;
-import org.apache.asterix.metadata.entities.DatasourceAdapter.AdapterType;
 import org.apache.asterix.metadata.entities.Datatype;
 import org.apache.asterix.metadata.entities.Dataverse;
 import org.apache.asterix.metadata.entities.ExternalDatasetDetails;
 import org.apache.asterix.metadata.entities.Feed;
-import org.apache.asterix.metadata.entities.FeedPolicy;
+import org.apache.asterix.metadata.entities.FeedPolicyEntity;
 import org.apache.asterix.metadata.entities.Index;
 import org.apache.asterix.metadata.entities.InternalDatasetDetails;
-import org.apache.asterix.metadata.entities.PrimaryFeed;
 import org.apache.asterix.metadata.feeds.BuiltinFeedPolicies;
-import org.apache.asterix.metadata.feeds.ExternalDataScanOperatorDescriptor;
-import org.apache.asterix.metadata.feeds.FeedCollectOperatorDescriptor;
-import org.apache.asterix.metadata.feeds.FeedIntakeOperatorDescriptor;
-import org.apache.asterix.metadata.feeds.FeedUtil;
+import org.apache.asterix.metadata.feeds.FeedMetadataUtil;
 import org.apache.asterix.metadata.utils.DatasetUtils;
 import org.apache.asterix.metadata.utils.ExternalDatasetsRegistry;
 import org.apache.asterix.metadata.utils.SplitsAndConstraintsUtil;
@@ -388,7 +387,7 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
                     .getSerializerDeserializer(feedOutputType);
             RecordDescriptor feedDesc = new RecordDescriptor(new ISerializerDeserializer[] { payloadSerde });
 
-            FeedPolicy feedPolicy = (FeedPolicy) ((AqlDataSource) dataSource).getProperties()
+            FeedPolicyEntity feedPolicy = (FeedPolicyEntity) ((AqlDataSource) dataSource).getProperties()
                     .get(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY);
             if (feedPolicy == null) {
                 throw new AlgebricksException("Feed not configured with a policy");
@@ -560,7 +559,7 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
                     }
                 }
                 // TODO Check this call, result of merge from master!
-                //  ((IGenericAdapterFactory) adapterFactory).setFiles(files);
+                // ((IGenericAdapterFactory) adapterFactory).setFiles(files);
             }
 
             return adapterFactory;
@@ -594,9 +593,9 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
     }
 
     public Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, IAdapterFactory> buildFeedIntakeRuntime(
-            JobSpecification jobSpec, PrimaryFeed primaryFeed, FeedPolicyAccessor policyAccessor) throws Exception {
-        Triple<IAdapterFactory, ARecordType, AdapterType> factoryOutput = null;
-        factoryOutput = FeedUtil.getPrimaryFeedFactoryAndOutput(primaryFeed, policyAccessor, mdTxnCtx);
+            JobSpecification jobSpec, Feed primaryFeed, FeedPolicyAccessor policyAccessor) throws Exception {
+        Triple<IAdapterFactory, ARecordType, IDataSourceAdapter.AdapterType> factoryOutput = null;
+        factoryOutput = FeedMetadataUtil.getPrimaryFeedFactoryAndOutput(primaryFeed, policyAccessor, mdTxnCtx);
         IAdapterFactory adapterFactory = factoryOutput.first;
         FeedIntakeOperatorDescriptor feedIngestor = null;
         switch (factoryOutput.third) {
@@ -605,7 +604,7 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
                         factoryOutput.second, policyAccessor);
                 break;
             case EXTERNAL:
-                String libraryName = primaryFeed.getAdaptorName().trim()
+                String libraryName = primaryFeed.getAdapterName().trim()
                         .split(FeedConstants.NamingConstants.LIBRARY_NAME_SEPARATOR)[0];
                 feedIngestor = new FeedIntakeOperatorDescriptor(jobSpec, primaryFeed, libraryName,
                         adapterFactory.getClass().getName(), factoryOutput.second, policyAccessor);
@@ -2084,7 +2083,7 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
     public Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitProviderAndPartitionConstraintsForDataset(
             String dataverseName, String datasetName, String targetIdxName, boolean temp) throws AlgebricksException {
         FileSplit[] splits = splitsForDataset(mdTxnCtx, dataverseName, datasetName, targetIdxName, temp);
-        return SplitsAndConstraintsUtil.splitProviderAndPartitionConstraints(splits);
+        return StoragePathUtil.splitProviderAndPartitionConstraints(splits);
     }
 
     public Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitProviderAndPartitionConstraintsForDataverse(
@@ -2140,7 +2139,7 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
         }
     }
 
-    public FeedPolicy findFeedPolicy(String dataverse, String policyName) throws AlgebricksException {
+    public FeedPolicyEntity findFeedPolicy(String dataverse, String policyName) throws AlgebricksException {
         try {
             return MetadataManager.INSTANCE.getFeedPolicy(mdTxnCtx, dataverse, policyName);
         } catch (MetadataException e) {
@@ -2193,7 +2192,8 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
             itemType = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(),
                     dataset.getDataverseName(), dataset.getItemTypeName()).getDatatype();
 
-            // Create the adapter factory <- right now there is only one. if there are more in the future, we can create a map->
+            // Create the adapter factory <- right now there is only one. if there are more in the future, we can create
+            // a map->
             ExternalDatasetDetails datasetDetails = (ExternalDatasetDetails) dataset.getDatasetDetails();
             LookupAdapterFactory<?> adapterFactory = AdapterFactoryProvider.getAdapterFactory(
                     datasetDetails.getProperties(), (ARecordType) itemType, ridIndexes, retainInput, retainNull,

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/FeedDataSource.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/FeedDataSource.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/FeedDataSource.java
index 2e603f1..6f540df 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/FeedDataSource.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/FeedDataSource.java
@@ -18,12 +18,12 @@
  */
 package org.apache.asterix.metadata.declared;
 
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.api.IFeedLifecycleListener.ConnectionLocation;
+import org.apache.asterix.external.feed.api.IFeed;
+import org.apache.asterix.external.feed.api.IFeedLifecycleListener.ConnectionLocation;
+import org.apache.asterix.external.feed.management.FeedId;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
 import org.apache.asterix.metadata.entities.Feed;
-import org.apache.asterix.metadata.entities.Feed.FeedType;
 import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.om.util.AsterixClusterProperties;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -33,14 +33,14 @@ public class FeedDataSource extends AqlDataSource {
 
     private Feed feed;
     private final FeedId sourceFeedId;
-    private final FeedType sourceFeedType;
+    private final IFeed.FeedType sourceFeedType;
     private final ConnectionLocation location;
     private final String targetDataset;
     private final String[] locations;
     private final int computeCardinality;
 
     public FeedDataSource(AqlSourceId id, String targetDataset, IAType itemType, AqlDataSourceType dataSourceType,
-            FeedId sourceFeedId, FeedType sourceFeedType, ConnectionLocation location, String[] locations)
+            FeedId sourceFeedId, IFeed.FeedType sourceFeedType, ConnectionLocation location, String[] locations)
                     throws AlgebricksException {
         super(id, itemType, dataSourceType);
         this.targetDataset = targetDataset;
@@ -75,6 +75,11 @@ public class FeedDataSource extends AqlDataSource {
         return feed;
     }
 
+    @Override
+    public IAType[] getSchemaTypes() {
+        return schemaTypes;
+    }
+
     public String getTargetDataset() {
         return targetDataset;
     }
@@ -108,7 +113,7 @@ public class FeedDataSource extends AqlDataSource {
         domain = domainForExternalData;
     }
 
-    public FeedType getSourceFeedType() {
+    public IFeed.FeedType getSourceFeedType() {
         return sourceFeedType;
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/CompactionPolicy.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/CompactionPolicy.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/CompactionPolicy.java
index 39654e9..d71663a 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/CompactionPolicy.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/CompactionPolicy.java
@@ -25,7 +25,7 @@ import org.apache.asterix.metadata.api.IMetadataEntity;
 /**
  * Metadata describing a compaction policy record.
  */
-public class CompactionPolicy implements IMetadataEntity {
+public class CompactionPolicy implements IMetadataEntity<CompactionPolicy> {
 
     private static final long serialVersionUID = 1L;
 
@@ -71,12 +71,12 @@ public class CompactionPolicy implements IMetadataEntity {
     }
 
     @Override
-    public Object addToCache(MetadataCache cache) {
+    public CompactionPolicy addToCache(MetadataCache cache) {
         return cache.addCompactionPolicyIfNotExists(this);
     }
 
     @Override
-    public Object dropFromCache(MetadataCache cache) {
+    public CompactionPolicy dropFromCache(MetadataCache cache) {
         return cache.dropCompactionPolicy(this);
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
index ae4c742..32456ae 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
@@ -29,7 +29,7 @@ import org.apache.asterix.metadata.api.IMetadataEntity;
 /**
  * Metadata describing a dataset.
  */
-public class Dataset implements IMetadataEntity {
+public class Dataset implements IMetadataEntity<Dataset> {
 
     private static final long serialVersionUID = 1L;
 
@@ -122,12 +122,12 @@ public class Dataset implements IMetadataEntity {
     }
 
     @Override
-    public Object addToCache(MetadataCache cache) {
+    public Dataset addToCache(MetadataCache cache) {
         return cache.addDatasetIfNotExists(this);
     }
 
     @Override
-    public Object dropFromCache(MetadataCache cache) {
+    public Dataset dropFromCache(MetadataCache cache) {
         return cache.dropDataset(this);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/DatasourceAdapter.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/DatasourceAdapter.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/DatasourceAdapter.java
index ac98fb0..5a85327 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/DatasourceAdapter.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/DatasourceAdapter.java
@@ -18,19 +18,15 @@
  */
 package org.apache.asterix.metadata.entities;
 
+import org.apache.asterix.external.api.IDataSourceAdapter.AdapterType;
+import org.apache.asterix.external.dataset.adapter.AdapterIdentifier;
 import org.apache.asterix.metadata.MetadataCache;
 import org.apache.asterix.metadata.api.IMetadataEntity;
-import org.apache.asterix.metadata.feeds.AdapterIdentifier;
 
-public class DatasourceAdapter implements IMetadataEntity {
+public class DatasourceAdapter implements IMetadataEntity<DatasourceAdapter> {
 
     private static final long serialVersionUID = 1L;
 
-    public enum AdapterType {
-        INTERNAL,
-        EXTERNAL
-    }
-
     private final AdapterIdentifier adapterIdentifier;
     private final String classname;
     private final AdapterType type;
@@ -42,12 +38,12 @@ public class DatasourceAdapter implements IMetadataEntity {
     }
 
     @Override
-    public Object addToCache(MetadataCache cache) {
+    public DatasourceAdapter addToCache(MetadataCache cache) {
         return cache.addAdapterIfNotExists(this);
     }
 
     @Override
-    public Object dropFromCache(MetadataCache cache) {
+    public DatasourceAdapter dropFromCache(MetadataCache cache) {
         return cache.dropAdapter(this);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Datatype.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Datatype.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Datatype.java
index 73015df..0580756 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Datatype.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Datatype.java
@@ -26,7 +26,7 @@ import org.apache.asterix.om.types.IAType;
 /**
  * Metadata describing a datatype.
  */
-public class Datatype implements IMetadataEntity {
+public class Datatype implements IMetadataEntity<Datatype> {
 
     private static final long serialVersionUID = 1L;
 
@@ -60,12 +60,12 @@ public class Datatype implements IMetadataEntity {
     }
 
     @Override
-    public Object addToCache(MetadataCache cache) {
+    public Datatype addToCache(MetadataCache cache) {
         return cache.addDatatypeIfNotExists(this);
     }
 
     @Override
-    public Object dropFromCache(MetadataCache cache) {
+    public Datatype dropFromCache(MetadataCache cache) {
         return cache.dropDatatype(this);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataverse.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataverse.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataverse.java
index ef315f4..467e6f5 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataverse.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataverse.java
@@ -25,7 +25,7 @@ import org.apache.asterix.metadata.api.IMetadataEntity;
 /**
  * Metadata describing a dataverse.
  */
-public class Dataverse implements IMetadataEntity {
+public class Dataverse implements IMetadataEntity<Dataverse> {
 
     private static final long serialVersionUID = 1L;
     // Enforced to be unique within an Asterix cluster..
@@ -52,12 +52,12 @@ public class Dataverse implements IMetadataEntity {
     }
 
     @Override
-    public Object addToCache(MetadataCache cache) {
+    public Dataverse addToCache(MetadataCache cache) {
         return cache.addDataverseIfNotExists(this);
     }
 
     @Override
-    public Object dropFromCache(MetadataCache cache) {
+    public Dataverse dropFromCache(MetadataCache cache) {
         return cache.dropDataverse(this);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Feed.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Feed.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Feed.java
index 5419dfa..58aa622 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Feed.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Feed.java
@@ -19,66 +19,68 @@
 
 package org.apache.asterix.metadata.entities;
 
-import org.apache.asterix.common.feeds.FeedId;
+import java.util.Map;
+
 import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.external.feed.api.IFeed;
+import org.apache.asterix.external.feed.management.FeedId;
 import org.apache.asterix.metadata.MetadataCache;
 import org.apache.asterix.metadata.api.IMetadataEntity;
 
 /**
  * Feed POJO
  */
-public class Feed implements IMetadataEntity {
-
+public class Feed implements IMetadataEntity<Feed>, IFeed {
     private static final long serialVersionUID = 1L;
 
     /** A unique identifier for the feed */
-    protected final FeedId feedId;
-
+    private FeedId feedId;
     /** The function that is to be applied on each incoming feed tuple **/
-    protected final FunctionSignature appliedFunction;
-
+    private FunctionSignature appliedFunction;
     /** The type {@code FeedType} associated with the feed. **/
-    protected final FeedType feedType;
-
+    private IFeed.FeedType feedType;
     /** A string representation of the instance **/
-    protected final String displayName;
-
-    public enum FeedType {
-        /**
-         * A feed that derives its data from an external source.
-         */
-        PRIMARY,
-
-        /**
-         * A feed that derives its data from another primary or secondary feed.
-         */
-        SECONDARY
-    }
-
-    public Feed(String dataverseName, String datasetName, FunctionSignature appliedFunction, FeedType feedType) {
-        this.feedId = new FeedId(dataverseName, datasetName);
+    private String displayName;
+    /** A string representation of the adapter name **/
+    private String adapterName;
+    /** Adapter configuration */
+    private Map<String, String> adapterConfiguration;
+    /** Source primary feed */
+    private String sourceFeedName;
+
+    public Feed(String dataverseName, String feedName, FunctionSignature appliedFunction, IFeed.FeedType feedType,
+            String sourceFeedName, String adapterName, Map<String, String> configuration) {
+        this.feedId = new FeedId(dataverseName, feedName);
         this.appliedFunction = appliedFunction;
         this.feedType = feedType;
         this.displayName = feedType + "(" + feedId + ")";
+        this.adapterName = adapterName;
+        this.adapterConfiguration = configuration;
+        this.sourceFeedName = sourceFeedName;
     }
 
+    @Override
     public FeedId getFeedId() {
         return feedId;
     }
 
+    @Override
     public String getDataverseName() {
         return feedId.getDataverse();
     }
 
+    @Override
     public String getFeedName() {
         return feedId.getFeedName();
     }
 
+    @Override
     public FunctionSignature getAppliedFunction() {
         return appliedFunction;
     }
 
-    public FeedType getFeedType() {
+    @Override
+    public IFeed.FeedType getFeedType() {
         return feedType;
     }
 
@@ -105,12 +107,26 @@ public class Feed implements IMetadataEntity {
     }
 
     @Override
-    public Object addToCache(MetadataCache cache) {
+    public Feed addToCache(MetadataCache cache) {
         return cache.addFeedIfNotExists(this);
     }
 
     @Override
-    public Object dropFromCache(MetadataCache cache) {
+    public Feed dropFromCache(MetadataCache cache) {
         return cache.dropFeed(this);
     }
+
+    @Override
+    public String getAdapterName() {
+        return adapterName;
+    }
+
+    @Override
+    public Map<String, String> getAdapterConfiguration() {
+        return adapterConfiguration;
+    }
+
+    public String getSourceFeedName() {
+        return sourceFeedName;
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/FeedPolicy.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/FeedPolicy.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/FeedPolicy.java
deleted file mode 100644
index 4c2fe23..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/FeedPolicy.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.asterix.metadata.entities;
-
-import java.util.Map;
-
-import org.apache.asterix.metadata.MetadataCache;
-import org.apache.asterix.metadata.api.IMetadataEntity;
-
-/**
- * Metadata describing a feed activity record.
- */
-public class FeedPolicy implements IMetadataEntity {
-
-    private static final long serialVersionUID = 1L;
-
-    private final String dataverseName;
-    // Enforced to be unique within a dataverse.
-    private final String policyName;
-    // A description of the policy
-    private final String description;
-    // The policy properties associated with the feed dataset
-    private Map<String, String> properties;
-
-    public FeedPolicy(String dataverseName, String policyName, String description, Map<String, String> properties) {
-        this.dataverseName = dataverseName;
-        this.policyName = policyName;
-        this.description = description;
-        this.properties = properties;
-    }
-
-    public String getDataverseName() {
-        return dataverseName;
-    }
-
-    public String getPolicyName() {
-        return policyName;
-    }
-
-    @Override
-    public boolean equals(Object other) {
-        if (this == other) {
-            return true;
-        }
-        if (!(other instanceof FeedPolicy)) {
-            return false;
-        }
-        FeedPolicy otherPolicy = (FeedPolicy) other;
-        if (!otherPolicy.dataverseName.equals(dataverseName)) {
-            return false;
-        }
-        if (!otherPolicy.policyName.equals(policyName)) {
-            return false;
-        }
-        return true;
-    }
-
-    @Override
-    public Object addToCache(MetadataCache cache) {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    @Override
-    public Object dropFromCache(MetadataCache cache) {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-    public String getDescription() {
-        return description;
-    }
-
-    public Map<String, String> getProperties() {
-        return properties;
-    }
-
-    public void setProperties(Map<String, String> properties) {
-        this.properties = properties;
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/FeedPolicyEntity.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/FeedPolicyEntity.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/FeedPolicyEntity.java
new file mode 100644
index 0000000..76f704a
--- /dev/null
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/FeedPolicyEntity.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.metadata.entities;
+
+import java.util.Map;
+
+import org.apache.asterix.external.feed.policy.FeedPolicy;
+import org.apache.asterix.metadata.MetadataCache;
+import org.apache.asterix.metadata.api.IMetadataEntity;
+
+/**
+ * Metadata describing a feed activity record.
+ */
+public class FeedPolicyEntity extends FeedPolicy implements IMetadataEntity<FeedPolicyEntity> {
+
+    public FeedPolicyEntity(String dataverseName, String policyName, String description,
+            Map<String, String> properties) {
+        super(dataverseName, policyName, description, properties);
+    }
+
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public FeedPolicyEntity addToCache(MetadataCache cache) {
+        return null;
+    }
+
+    @Override
+    public FeedPolicyEntity dropFromCache(MetadataCache cache) {
+        return null;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java
index cf7a95c..7ff423c 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Function.java
@@ -23,7 +23,7 @@ import java.util.List;
 import org.apache.asterix.metadata.MetadataCache;
 import org.apache.asterix.metadata.api.IMetadataEntity;
 
-public class Function implements IMetadataEntity {
+public class Function implements IMetadataEntity<Function> {
     private static final long serialVersionUID = 1L;
     public static final String LANGUAGE_AQL = "AQL";
     public static final String LANGUAGE_JAVA = "JAVA";
@@ -85,12 +85,12 @@ public class Function implements IMetadataEntity {
     }
 
     @Override
-    public Object addToCache(MetadataCache cache) {
+    public Function addToCache(MetadataCache cache) {
         return cache.addFunctionIfNotExists(this);
     }
 
     @Override
-    public Object dropFromCache(MetadataCache cache) {
+    public Function dropFromCache(MetadataCache cache) {
         return cache.dropFunction(this);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Index.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Index.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Index.java
index 3c0feb9..6d047a2 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Index.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Index.java
@@ -35,7 +35,7 @@ import org.apache.hyracks.algebricks.common.utils.Pair;
 /**
  * Metadata describing an index.
  */
-public class Index implements IMetadataEntity, Comparable<Index> {
+public class Index implements IMetadataEntity<Index>, Comparable<Index> {
 
     private static final long serialVersionUID = 1L;
 
@@ -202,12 +202,12 @@ public class Index implements IMetadataEntity, Comparable<Index> {
     }
 
     @Override
-    public Object addToCache(MetadataCache cache) {
+    public Index addToCache(MetadataCache cache) {
         return cache.addIndexIfNotExists(this);
     }
 
     @Override
-    public Object dropFromCache(MetadataCache cache) {
+    public Index dropFromCache(MetadataCache cache) {
         return cache.dropIndex(this);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Library.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Library.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Library.java
index 277fdc1..8ffc266 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Library.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Library.java
@@ -21,7 +21,7 @@ package org.apache.asterix.metadata.entities;
 import org.apache.asterix.metadata.MetadataCache;
 import org.apache.asterix.metadata.api.IMetadataEntity;
 
-public class Library implements IMetadataEntity {
+public class Library implements IMetadataEntity<Library> {
 
     private static final long serialVersionUID = 1L;
 
@@ -42,12 +42,12 @@ public class Library implements IMetadataEntity {
     }
 
     @Override
-    public Object addToCache(MetadataCache cache) {
+    public Library addToCache(MetadataCache cache) {
         return cache.addLibraryIfNotExists(this);
     }
 
     @Override
-    public Object dropFromCache(MetadataCache cache) {
+    public Library dropFromCache(MetadataCache cache) {
         return cache.dropLibrary(this);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/NodeGroup.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/NodeGroup.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/NodeGroup.java
index 8ef04e2..e5088aa 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/NodeGroup.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/NodeGroup.java
@@ -27,7 +27,7 @@ import org.apache.asterix.metadata.api.IMetadataEntity;
 /**
  * Metadata describing a named group of compute nodes.
  */
-public class NodeGroup implements IMetadataEntity {
+public class NodeGroup implements IMetadataEntity<NodeGroup> {
 
     private static final long serialVersionUID = 1L;
 
@@ -49,12 +49,12 @@ public class NodeGroup implements IMetadataEntity {
     }
 
     @Override
-    public Object addToCache(MetadataCache cache) {
+    public NodeGroup addToCache(MetadataCache cache) {
         return cache.addNodeGroupIfNotExists(this);
     }
 
     @Override
-    public Object dropFromCache(MetadataCache cache) {
+    public NodeGroup dropFromCache(MetadataCache cache) {
         return cache.dropNodeGroup(this);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/PrimaryFeed.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/PrimaryFeed.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/PrimaryFeed.java
deleted file mode 100644
index c70a210..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/PrimaryFeed.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.asterix.metadata.entities;
-
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.asterix.common.functions.FunctionSignature;
-import org.apache.asterix.metadata.api.IMetadataEntity;
-
-/**
- * A primary feed is one that derives its data from an external source via an adaptor.
- * This class is a holder object for the metadata associated with a primary feed.
- */
-public class PrimaryFeed extends Feed implements IMetadataEntity {
-
-    private static final long serialVersionUID = 1L;
-
-    private final String adaptorName;
-    private final Map<String, String> adaptorConfiguration;
-
-    public PrimaryFeed(String dataverseName, String datasetName, String adaptorName,
-            Map<String, String> adaptorConfiguration, FunctionSignature appliedFunction) {
-        super(dataverseName, datasetName, appliedFunction, FeedType.PRIMARY);
-        this.adaptorName = adaptorName;
-        this.adaptorConfiguration = adaptorConfiguration;
-    }
-
-    public String getAdaptorName() {
-        return adaptorName;
-    }
-
-    public Map<String, String> getAdaptorConfiguration() {
-        return adaptorConfiguration;
-    }
-
-    @Override
-    public boolean equals(Object other) {
-        if (this == other) {
-            return true;
-        }
-        if (!super.equals(other) || !(other instanceof PrimaryFeed)) {
-            return false;
-        }
-
-        PrimaryFeed otherFeed = (PrimaryFeed) other;
-        if (!otherFeed.getAdaptorName().equals(adaptorName)) {
-            return false;
-        }
-
-        for (Entry<String, String> entry : adaptorConfiguration.entrySet()) {
-            if (!(entry.getValue().equals(otherFeed.getAdaptorConfiguration().get(entry.getKey())))) {
-                return false;
-            }
-        }
-        return true;
-    }
-
-    @Override
-    public String toString() {
-        return "PrimaryFeed (" + adaptorName + ")";
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/SecondaryFeed.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/SecondaryFeed.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/SecondaryFeed.java
deleted file mode 100644
index caa633a..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/SecondaryFeed.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.asterix.metadata.entities;
-
-import org.apache.asterix.common.functions.FunctionSignature;
-import org.apache.asterix.metadata.api.IMetadataEntity;
-
-/**
- * A secondary feed is one that derives its data from another (primary/secondary) feed.
- * This class is a holder object for the metadata associated with a secondary feed.
- */
-public class SecondaryFeed extends Feed implements IMetadataEntity {
-
-    private static final long serialVersionUID = 1L;
-
-    private final String sourceFeedName;
-
-    public SecondaryFeed(String dataverseName, String feedName, String sourceFeedName, FunctionSignature appliedFunction) {
-        super(dataverseName, feedName, appliedFunction, FeedType.SECONDARY);
-        this.sourceFeedName = sourceFeedName;
-    }
-
-    public String getSourceFeedName() {
-        return sourceFeedName;
-    }
-
-    @Override
-    public boolean equals(Object other) {
-        if (this == other) {
-            return true;
-        }
-        if (!super.equals(other) || !(other instanceof SecondaryFeed)) {
-            return false;
-        }
-
-        SecondaryFeed otherFeed = (SecondaryFeed) other;
-        if (!otherFeed.getSourceFeedName().equals(sourceFeedName)) {
-            return false;
-        }
-        return true;
-    }
-
-    @Override
-    public String toString() {
-        return "SecondaryFeed (" + feedId + ")" + "<--" + "(" + sourceFeedName + ")";
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
index c37230d..b0d5f76 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/DatasourceAdapterTupleTranslator.java
@@ -26,13 +26,14 @@ import java.io.IOException;
 import java.util.Calendar;
 
 import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.api.IDataSourceAdapter;
+import org.apache.asterix.external.api.IDataSourceAdapter.AdapterType;
+import org.apache.asterix.external.dataset.adapter.AdapterIdentifier;
 import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
 import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
 import org.apache.asterix.metadata.entities.DatasourceAdapter;
-import org.apache.asterix.metadata.entities.DatasourceAdapter.AdapterType;
-import org.apache.asterix.metadata.feeds.AdapterIdentifier;
 import org.apache.asterix.om.base.ARecord;
 import org.apache.asterix.om.base.AString;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -76,7 +77,7 @@ public class DatasourceAdapterTupleTranslator extends AbstractTupleTranslator<Da
                 .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_ARECORD_NAME_FIELD_INDEX)).getStringValue();
         String classname = ((AString) adapterRecord
                 .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_ARECORD_CLASSNAME_FIELD_INDEX)).getStringValue();
-        AdapterType adapterType = AdapterType.valueOf(((AString) adapterRecord
+        IDataSourceAdapter.AdapterType adapterType = IDataSourceAdapter.AdapterType.valueOf(((AString) adapterRecord
                 .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_ARECORD_TYPE_FIELD_INDEX)).getStringValue());
 
         return new DatasourceAdapter(new AdapterIdentifier(dataverseName, adapterName), classname, adapterType);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
index e09928b..00e3e63 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedPolicyTupleTranslator.java
@@ -35,7 +35,7 @@ import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
 import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
-import org.apache.asterix.metadata.entities.FeedPolicy;
+import org.apache.asterix.metadata.entities.FeedPolicyEntity;
 import org.apache.asterix.om.base.AInt32;
 import org.apache.asterix.om.base.AMutableString;
 import org.apache.asterix.om.base.ARecord;
@@ -52,7 +52,7 @@ import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 /**
  * Translates a Dataset metadata entity to an ITupleReference and vice versa.
  */
-public class FeedPolicyTupleTranslator extends AbstractTupleTranslator<FeedPolicy> {
+public class FeedPolicyTupleTranslator extends AbstractTupleTranslator<FeedPolicyEntity> {
     // Field indexes of serialized FeedPolicy in a tuple.
     // Key field.
     public static final int FEED_POLICY_DATAVERSE_NAME_FIELD_INDEX = 0;
@@ -74,7 +74,7 @@ public class FeedPolicyTupleTranslator extends AbstractTupleTranslator<FeedPolic
     }
 
     @Override
-    public FeedPolicy getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
+    public FeedPolicyEntity getMetadataEntityFromTuple(ITupleReference frameTuple) throws IOException {
         byte[] serRecord = frameTuple.getFieldData(FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordStartOffset = frameTuple.getFieldStart(FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
         int recordLength = frameTuple.getFieldLength(FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX);
@@ -84,8 +84,8 @@ public class FeedPolicyTupleTranslator extends AbstractTupleTranslator<FeedPolic
         return createFeedPolicyFromARecord(feedPolicyRecord);
     }
 
-    private FeedPolicy createFeedPolicyFromARecord(ARecord feedPolicyRecord) {
-        FeedPolicy feedPolicy = null;
+    private FeedPolicyEntity createFeedPolicyFromARecord(ARecord feedPolicyRecord) {
+        FeedPolicyEntity feedPolicy = null;
         String dataverseName = ((AString) feedPolicyRecord
                 .getValueByPos(MetadataRecordTypes.FEED_POLICY_ARECORD_DATAVERSE_NAME_FIELD_INDEX)).getStringValue();
         String policyName = ((AString) feedPolicyRecord
@@ -106,12 +106,12 @@ public class FeedPolicyTupleTranslator extends AbstractTupleTranslator<FeedPolic
             policyParamters.put(key, value);
         }
 
-        feedPolicy = new FeedPolicy(dataverseName, policyName, description, policyParamters);
+        feedPolicy = new FeedPolicyEntity(dataverseName, policyName, description, policyParamters);
         return feedPolicy;
     }
 
     @Override
-    public ITupleReference getTupleFromMetadataEntity(FeedPolicy feedPolicy) throws IOException, MetadataException {
+    public ITupleReference getTupleFromMetadataEntity(FeedPolicyEntity feedPolicy) throws IOException, MetadataException {
         // write the key in the first three fields of the tuple
         ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
 



[17/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/StorageReportFeedMessage.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/StorageReportFeedMessage.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/StorageReportFeedMessage.java
deleted file mode 100644
index 6f2c102..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/StorageReportFeedMessage.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.message;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedConstants;
-import org.apache.asterix.common.feeds.FeedConstants.MessageConstants;
-import org.apache.asterix.common.feeds.FeedId;
-
-/**
- * A feed control message sent from a storage runtime of a feed pipeline to report the intake timestamp corresponding
- * to the last persisted tuple.
- */
-public class StorageReportFeedMessage extends FeedMessage {
-
-    private static final long serialVersionUID = 1L;
-
-    private final FeedConnectionId connectionId;
-    private final int partition;
-    private long lastPersistedTupleIntakeTimestamp;
-    private boolean persistenceDelayWithinLimit;
-    private long averageDelay;
-    private int intakePartition;
-
-    public StorageReportFeedMessage(FeedConnectionId connectionId, int partition,
-            long lastPersistedTupleIntakeTimestamp, boolean persistenceDelayWithinLimit, long averageDelay,
-            int intakePartition) {
-        super(MessageType.STORAGE_REPORT);
-        this.connectionId = connectionId;
-        this.partition = partition;
-        this.lastPersistedTupleIntakeTimestamp = lastPersistedTupleIntakeTimestamp;
-        this.persistenceDelayWithinLimit = persistenceDelayWithinLimit;
-        this.averageDelay = averageDelay;
-        this.intakePartition = intakePartition;
-    }
-
-    @Override
-    public String toString() {
-        return messageType.name() + " " + connectionId + " [" + lastPersistedTupleIntakeTimestamp + "] ";
-    }
-
-    public FeedConnectionId getConnectionId() {
-        return connectionId;
-    }
-
-    public long getLastPersistedTupleIntakeTimestamp() {
-        return lastPersistedTupleIntakeTimestamp;
-    }
-
-    public int getPartition() {
-        return partition;
-    }
-
-    public boolean isPersistenceDelayWithinLimit() {
-        return persistenceDelayWithinLimit;
-    }
-
-    public void setPersistenceDelayWithinLimit(boolean persistenceDelayWithinLimit) {
-        this.persistenceDelayWithinLimit = persistenceDelayWithinLimit;
-    }
-
-    public long getAverageDelay() {
-        return averageDelay;
-    }
-
-    public void setAverageDelay(long averageDelay) {
-        this.averageDelay = averageDelay;
-    }
-
-    public int getIntakePartition() {
-        return intakePartition;
-    }
-
-    @Override
-    public JSONObject toJSON() throws JSONException {
-        JSONObject obj = new JSONObject();
-        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
-        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
-        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
-        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
-        obj.put(FeedConstants.MessageConstants.LAST_PERSISTED_TUPLE_INTAKE_TIMESTAMP, lastPersistedTupleIntakeTimestamp);
-        obj.put(MessageConstants.PERSISTENCE_DELAY_WITHIN_LIMIT, persistenceDelayWithinLimit);
-        obj.put(MessageConstants.AVERAGE_PERSISTENCE_DELAY, averageDelay);
-        obj.put(FeedConstants.MessageConstants.PARTITION, partition);
-        obj.put(FeedConstants.MessageConstants.INTAKE_PARTITION, intakePartition);
-
-        return obj;
-    }
-
-    public static StorageReportFeedMessage read(JSONObject obj) throws JSONException {
-        FeedId feedId = new FeedId(obj.getString(FeedConstants.MessageConstants.DATAVERSE),
-                obj.getString(FeedConstants.MessageConstants.FEED));
-        FeedConnectionId connectionId = new FeedConnectionId(feedId,
-                obj.getString(FeedConstants.MessageConstants.DATASET));
-        int partition = obj.getInt(FeedConstants.MessageConstants.PARTITION);
-        long timestamp = obj.getLong(FeedConstants.MessageConstants.LAST_PERSISTED_TUPLE_INTAKE_TIMESTAMP);
-        boolean persistenceDelayWithinLimit = obj.getBoolean(MessageConstants.PERSISTENCE_DELAY_WITHIN_LIMIT);
-        long averageDelay = obj.getLong(MessageConstants.AVERAGE_PERSISTENCE_DELAY);
-        int intakePartition = obj.getInt(MessageConstants.INTAKE_PARTITION);
-        return new StorageReportFeedMessage(connectionId, partition, timestamp, persistenceDelayWithinLimit,
-                averageDelay, intakePartition);
-    }
-
-    public void reset(long lastPersistedTupleIntakeTimestamp, boolean delayWithinLimit, long averageDelay) {
-        this.lastPersistedTupleIntakeTimestamp = lastPersistedTupleIntakeTimestamp;
-        this.persistenceDelayWithinLimit = delayWithinLimit;
-        this.averageDelay = averageDelay;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/ThrottlingEnabledFeedMessage.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/ThrottlingEnabledFeedMessage.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/ThrottlingEnabledFeedMessage.java
deleted file mode 100644
index 0ddd8ba..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/message/ThrottlingEnabledFeedMessage.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.message;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedConstants;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.FeedRuntimeId;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-
-/**
- * A feed control message indicating the need to end the feed. This message is dispatched
- * to all locations that host an operator involved in the feed pipeline.
- */
-public class ThrottlingEnabledFeedMessage extends FeedMessage {
-
-    private static final long serialVersionUID = 1L;
-
-    private final FeedConnectionId connectionId;
-
-    private final FeedRuntimeId runtimeId;
-
-    public ThrottlingEnabledFeedMessage(FeedConnectionId connectionId, FeedRuntimeId runtimeId) {
-        super(MessageType.THROTTLING_ENABLED);
-        this.connectionId = connectionId;
-        this.runtimeId = runtimeId;
-    }
-
-    @Override
-    public String toString() {
-        return MessageType.END.name() + "  " + connectionId + " [" + runtimeId + "] ";
-    }
-
-    @Override
-    public JSONObject toJSON() throws JSONException {
-        JSONObject obj = new JSONObject();
-        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
-        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
-        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
-        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
-        obj.put(FeedConstants.MessageConstants.RUNTIME_TYPE, runtimeId.getFeedRuntimeType());
-        obj.put(FeedConstants.MessageConstants.OPERAND_ID, runtimeId.getOperandId());
-        obj.put(FeedConstants.MessageConstants.PARTITION, runtimeId.getPartition());
-        return obj;
-    }
-
-    public FeedConnectionId getConnectionId() {
-        return connectionId;
-    }
-
-    public FeedRuntimeId getFeedRuntimeId() {
-        return runtimeId;
-    }
-
-    public static ThrottlingEnabledFeedMessage read(JSONObject obj) throws JSONException {
-        FeedId feedId = new FeedId(obj.getString(FeedConstants.MessageConstants.DATAVERSE),
-                obj.getString(FeedConstants.MessageConstants.FEED));
-        FeedConnectionId connectionId = new FeedConnectionId(feedId,
-                obj.getString(FeedConstants.MessageConstants.DATASET));
-        FeedRuntimeId runtimeId = new FeedRuntimeId(FeedRuntimeType.valueOf(obj
-                .getString(FeedConstants.MessageConstants.RUNTIME_TYPE)),
-                obj.getInt(FeedConstants.MessageConstants.PARTITION),
-                obj.getString(FeedConstants.MessageConstants.OPERAND_ID));
-        return new ThrottlingEnabledFeedMessage(connectionId, runtimeId);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/parse/ITupleForwarder.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/parse/ITupleForwarder.java b/asterix-common/src/main/java/org/apache/asterix/common/parse/ITupleForwarder.java
index 5ee065a..27f4fcb 100644
--- a/asterix-common/src/main/java/org/apache/asterix/common/parse/ITupleForwarder.java
+++ b/asterix-common/src/main/java/org/apache/asterix/common/parse/ITupleForwarder.java
@@ -32,10 +32,11 @@ public interface ITupleForwarder {
     public enum TupleForwardPolicy {
         FRAME_FULL,
         COUNTER_TIMER_EXPIRED,
-        RATE_CONTROLLED
+        RATE_CONTROLLED,
+        FEED
     }
 
-    public void configure(Map<String, String> configuration);
+    public void configure(Map<String, String> configuration) throws HyracksDataException;
 
     public void initialize(IHyracksCommonContext ctx, IFrameWriter frameWriter) throws HyracksDataException;
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java b/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java
new file mode 100644
index 0000000..acfb9d5
--- /dev/null
+++ b/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.common.utils;
+
+import java.io.File;
+
+import org.apache.asterix.common.cluster.ClusterPartition;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.hyracks.api.io.FileReference;
+import org.apache.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import org.apache.hyracks.dataflow.std.file.FileSplit;
+import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
+
+public class StoragePathUtil {
+    public static final String PARTITION_DIR_PREFIX = "partition_";
+    public static final String TEMP_DATASETS_STORAGE_FOLDER = "temp";
+    public static final String DATASET_INDEX_NAME_SEPARATOR = "_idx_";
+    public static final String ADAPTER_INSTANCE_PREFIX = "adapter_";
+
+    public static Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitProviderAndPartitionConstraints(
+            FileSplit[] splits) {
+        IFileSplitProvider splitProvider = new ConstantFileSplitProvider(splits);
+        String[] loc = new String[splits.length];
+        for (int p = 0; p < splits.length; p++) {
+            loc[p] = splits[p].getNodeName();
+        }
+        AlgebricksPartitionConstraint pc = new AlgebricksAbsolutePartitionConstraint(loc);
+        return new Pair<IFileSplitProvider, AlgebricksPartitionConstraint>(splitProvider, pc);
+    }
+
+    public static FileSplit getFileSplitForClusterPartition(ClusterPartition partition, File relativeFile) {
+        return new FileSplit(partition.getActiveNodeId(), new FileReference(relativeFile), partition.getIODeviceNum(),
+                partition.getPartitionId());
+    }
+
+    public static String prepareStoragePartitionPath(String storageDirName, int partitonId) {
+        return storageDirName + File.separator + StoragePathUtil.PARTITION_DIR_PREFIX + partitonId;
+    }
+
+    public static String prepareDataverseIndexName(String dataverseName, String datasetName, String idxName) {
+        return dataverseName + File.separator + datasetName + StoragePathUtil.DATASET_INDEX_NAME_SEPARATOR + idxName;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
index 2e7158d..e957ac6 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
@@ -21,10 +21,10 @@ package org.apache.asterix.external.adapter.factory;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.asterix.common.feeds.api.IDataSourceAdapter;
 import org.apache.asterix.external.api.IAdapterFactory;
 import org.apache.asterix.external.api.IDataFlowController;
 import org.apache.asterix.external.api.IDataParserFactory;
+import org.apache.asterix.external.api.IDataSourceAdapter;
 import org.apache.asterix.external.api.IExternalDataSourceFactory;
 import org.apache.asterix.external.api.IIndexibleExternalDataSource;
 import org.apache.asterix.external.api.IIndexingAdapterFactory;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterFactory.java
index 9539278..bf2db9a 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterFactory.java
@@ -21,7 +21,6 @@ package org.apache.asterix.external.api;
 import java.io.Serializable;
 import java.util.Map;
 
-import org.apache.asterix.common.feeds.api.IDataSourceAdapter;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 import org.apache.hyracks.api.context.IHyracksTaskContext;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterRuntimeManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterRuntimeManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterRuntimeManager.java
new file mode 100644
index 0000000..252b43b
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterRuntimeManager.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.api;
+
+import org.apache.asterix.external.feed.api.IIntakeProgressTracker;
+import org.apache.asterix.external.feed.management.FeedId;
+
+public interface IAdapterRuntimeManager {
+
+    public enum State {
+        /**
+         * Indicates that AsterixDB is maintaining the flow of data from external source into its storage.
+         */
+        ACTIVE_INGESTION,
+
+        /**
+         * Indicates that data from external source is being buffered and not
+         * pushed downstream
+         */
+
+        INACTIVE_INGESTION,
+        /**
+         * Indicates that feed ingestion activity has finished.
+         */
+        FINISHED_INGESTION,
+
+        /** Indicates the occurrence of a failure during the intake stage of a data ingestion pipeline **/
+        FAILED_INGESTION
+    }
+
+    /**
+     * Start feed ingestion
+     * @throws Exception
+     */
+    public void start() throws Exception;
+
+    /**
+     * Stop feed ingestion.
+     * @throws Exception
+     */
+    public void stop() throws Exception;
+
+    /**
+     * @return feedId associated with the feed that is being ingested
+     */
+    public FeedId getFeedId();
+
+    /**
+     * @return the instance of the feed adapter (an implementation of {@code IFeedAdapter}) in use.
+     */
+    public IFeedAdapter getFeedAdapter();
+
+    /**
+     * @return state associated with the AdapterRuntimeManager. See {@code State}.
+     */
+    public State getState();
+
+    /**
+     * @param state
+     */
+    public void setState(State state);
+
+    public IIntakeProgressTracker getProgressTracker();
+
+    public int getPartition();
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataFlowController.java
index f5f47ec..e4435a1 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataFlowController.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataFlowController.java
@@ -18,7 +18,6 @@
  */
 package org.apache.asterix.external.api;
 
-import java.io.IOException;
 import java.util.Map;
 
 import org.apache.asterix.common.parse.ITupleForwarder;
@@ -39,11 +38,21 @@ public interface IDataFlowController {
      * 3. setTupleForwarder(forwarder)
      * 4. configure(configuration,ctx)
      * 5. start(writer)
+     *
+     * pause(), resume(), and stop() are only used with feeds
+     * pause is called after start when a feed is running and the system is overwhelmed with data.
+     * resume is called after the load goes down and we are ready to receive more data.
+     * stop is called to disconnect the feed. once stop is called, no other method is called.
+     *
      */
 
     public void start(IFrameWriter writer) throws HyracksDataException;
 
-    public boolean stop();
+    public boolean stop() throws HyracksDataException;
+
+    public boolean pause() throws HyracksDataException;
+
+    public boolean resume() throws HyracksDataException;
 
     public boolean handleException(Throwable th);
 
@@ -51,5 +60,5 @@ public interface IDataFlowController {
 
     public void setTupleForwarder(ITupleForwarder forwarder);
 
-    public void configure(Map<String, String> configuration, IHyracksTaskContext ctx) throws IOException;
+    public void configure(Map<String, String> configuration, IHyracksTaskContext ctx) throws HyracksDataException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataSourceAdapter.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataSourceAdapter.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataSourceAdapter.java
new file mode 100644
index 0000000..e37f2b1
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataSourceAdapter.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.api;
+
+import java.io.Serializable;
+
+import org.apache.hyracks.api.comm.IFrameWriter;
+
+/**
+ * A super interface implemented by a data source adapter. An adapter can be a
+ * pull based or push based. This interface provides all common APIs that need
+ * to be implemented by each adapter irrespective of the the kind of
+ * adapter(pull or push).
+ */
+public interface IDataSourceAdapter extends Serializable {
+
+    public enum AdapterType {
+        INTERNAL,
+        EXTERNAL
+    }
+
+    /**
+     * Triggers the adapter to begin ingesting data from the external source.
+     * @param partition
+     *            The adapter could be running with a degree of parallelism.
+     *            partition corresponds to the i'th parallel instance.
+     * @param writer
+     *            The instance of frame writer that is used by the adapter to
+     *            write frame to. Adapter packs the fetched bytes (from external source),
+     *            packs them into frames and forwards the frames to an upstream receiving
+     *            operator using the instance of IFrameWriter.
+     * @throws Exception
+     */
+    public void start(int partition, IFrameWriter writer) throws Exception;
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/api/IFeedAdapter.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IFeedAdapter.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IFeedAdapter.java
new file mode 100644
index 0000000..3261556
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IFeedAdapter.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.api;
+
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public interface IFeedAdapter extends IDataSourceAdapter {
+    /**
+     * Pause the ingestion of data.
+     * @throws HyracksDataException
+     * @throws Exception
+     */
+    public boolean pause() throws HyracksDataException;
+
+    /**
+     * Resume the ingestion of data.
+     * @throws HyracksDataException
+     * @throws Exception
+     */
+    public boolean resume() throws HyracksDataException;
+
+    /**
+     * Discontinue the ingestion of data.
+     * @throws Exception
+     */
+    public boolean stop() throws Exception;
+
+    /**
+     * @param e
+     * @return true if the ingestion should continue post the exception else false
+     * @throws Exception
+     */
+    public boolean handleException(Throwable e);
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamDataParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamDataParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamDataParser.java
index 31d6317..531d050 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamDataParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamDataParser.java
@@ -19,6 +19,7 @@
 package org.apache.asterix.external.api;
 
 import java.io.DataOutput;
+import java.io.IOException;
 import java.io.InputStream;
 
 public interface IStreamDataParser extends IDataParser {
@@ -30,10 +31,17 @@ public interface IStreamDataParser extends IDataParser {
     /**
      * Parse data into output AsterixDataModel binary records.
      * Used with parsers that support stream sources
-     *
      * @param out
      *            DataOutput instance that for writing the parser output.
      */
-
     public boolean parse(DataOutput out) throws Exception;
+
+    /**
+     * reset the parser state. this is called when a failure takes place
+     * and the job needs to continue and to do that, the parser need to
+     * be in a consistent state
+     * @return true if reset was successful, false, otherwise
+     * @throws IOException
+     */
+    public boolean reset(InputStream in) throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractDataFlowController.java
index d06161e..c5c8e48 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractDataFlowController.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractDataFlowController.java
@@ -18,7 +18,6 @@
  */
 package org.apache.asterix.external.dataflow;
 
-import java.io.IOException;
 import java.util.Map;
 
 import org.apache.asterix.common.parse.ITupleForwarder;
@@ -48,7 +47,7 @@ public abstract class AbstractDataFlowController implements IDataFlowController
     }
 
     @Override
-    public void configure(Map<String, String> configuration, IHyracksTaskContext ctx) throws IOException {
+    public void configure(Map<String, String> configuration, IHyracksTaskContext ctx) {
         this.configuration = configuration;
         this.ctx = ctx;
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractFeedDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractFeedDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractFeedDataFlowController.java
new file mode 100644
index 0000000..aab4bf6
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractFeedDataFlowController.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.dataflow;
+
+import java.util.Map;
+
+import org.apache.asterix.common.parse.ITupleForwarder;
+import org.apache.asterix.external.api.IDataFlowController;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+
+public abstract class AbstractFeedDataFlowController implements IDataFlowController {
+    protected FeedTupleForwarder tupleForwarder;
+    protected IHyracksTaskContext ctx;
+    protected Map<String, String> configuration;
+    protected static final int NUMBER_OF_TUPLE_FIELDS = 1;
+    protected ArrayTupleBuilder tb = new ArrayTupleBuilder(NUMBER_OF_TUPLE_FIELDS);
+
+    @Override
+    public ITupleForwarder getTupleForwarder() {
+        return tupleForwarder;
+    }
+
+    @Override
+    public void setTupleForwarder(ITupleForwarder tupleForwarder) {
+        this.tupleForwarder = (FeedTupleForwarder) tupleForwarder;
+    }
+
+    protected void initializeTupleForwarder(IFrameWriter writer) throws HyracksDataException {
+        tupleForwarder.initialize(ctx, writer);
+    }
+
+    @Override
+    public void configure(Map<String, String> configuration, IHyracksTaskContext ctx) {
+        this.configuration = configuration;
+        this.ctx = ctx;
+    }
+
+    @Override
+    public boolean pause() {
+        tupleForwarder.pause();
+        return true;
+    }
+
+    @Override
+    public boolean resume() {
+        tupleForwarder.resume();
+        return true;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java
new file mode 100644
index 0000000..fe4557d
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.dataflow;
+
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.asterix.external.api.IRawRecord;
+import org.apache.asterix.external.api.IRecordDataParser;
+import org.apache.asterix.external.api.IRecordFlowController;
+import org.apache.asterix.external.api.IRecordReader;
+import org.apache.asterix.external.util.ExternalDataExceptionUtils;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class FeedRecordDataFlowController<T> extends AbstractFeedDataFlowController
+        implements IRecordFlowController<T> {
+    protected IRecordDataParser<T> dataParser;
+    protected IRecordReader<? extends T> recordReader;
+    protected long interval;
+    protected AtomicBoolean closed = new AtomicBoolean(false);
+
+    @Override
+    public void start(IFrameWriter writer) throws HyracksDataException {
+        HyracksDataException hde = null;
+        try {
+            initializeTupleForwarder(writer);
+            while (recordReader.hasNext()) {
+                IRawRecord<? extends T> record = recordReader.next();
+                if (record == null) {
+                    Thread.sleep(interval);
+                    continue;
+                }
+                tb.reset();
+                dataParser.parse(record, tb.getDataOutput());
+                tb.addFieldEndOffset();
+                tupleForwarder.addTuple(tb);
+            }
+        } catch (Throwable th) {
+            hde = new HyracksDataException(th);
+        }
+        try {
+            tupleForwarder.close();
+        } catch (Throwable th) {
+            hde = ExternalDataExceptionUtils.suppress(hde, th);
+        }
+        try {
+            recordReader.close();
+        } catch (Throwable th) {
+            hde = ExternalDataExceptionUtils.suppress(hde, th);
+            throw hde;
+        } finally {
+            closeSignal();
+        }
+    }
+
+    private void closeSignal() {
+        synchronized (closed) {
+            closed.set(true);
+            closed.notifyAll();
+        }
+    }
+
+    private void waitForSignal() throws InterruptedException {
+        synchronized (closed) {
+            while (!closed.get()) {
+                closed.wait();
+            }
+        }
+    }
+
+    @Override
+    public boolean stop() throws HyracksDataException {
+        if (recordReader.stop()) {
+            try {
+                waitForSignal();
+            } catch (InterruptedException e) {
+                throw new HyracksDataException(e);
+            }
+            return true;
+        }
+        return false;
+    }
+
+    @Override
+    public boolean handleException(Throwable th) {
+        return true;
+    }
+
+    @Override
+    public void setRecordParser(IRecordDataParser<T> dataParser) {
+        this.dataParser = dataParser;
+    }
+
+    @Override
+    public void setRecordReader(IRecordReader<T> recordReader) {
+        this.recordReader = recordReader;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedStreamDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedStreamDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedStreamDataFlowController.java
new file mode 100644
index 0000000..4ef5f6d
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedStreamDataFlowController.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.dataflow;
+
+import org.apache.asterix.external.api.IStreamDataParser;
+import org.apache.asterix.external.api.IStreamFlowController;
+import org.apache.asterix.external.input.stream.AInputStream;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class FeedStreamDataFlowController extends AbstractFeedDataFlowController implements IStreamFlowController {
+
+    private IStreamDataParser dataParser;
+    private AInputStream stream;
+
+    @Override
+    public void start(IFrameWriter writer) throws HyracksDataException {
+        try {
+            initializeTupleForwarder(writer);
+            while (true) {
+                tb.reset();
+                if (!dataParser.parse(tb.getDataOutput())) {
+                    break;
+                }
+                tb.addFieldEndOffset();
+                tupleForwarder.addTuple(tb);
+            }
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        } finally {
+            tupleForwarder.close();
+        }
+    }
+
+    @Override
+    public boolean stop() throws HyracksDataException {
+        try {
+            if (stream.stop()) {
+                return true;
+            }
+            stream.close();
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+        return false;
+    }
+
+    @Override
+    public boolean handleException(Throwable th) {
+        boolean handled = true;
+        try {
+            handled &= stream.skipError();
+            if (handled) {
+                handled &= dataParser.reset(stream);
+            }
+        } catch (Exception e) {
+            th.addSuppressed(e);
+            return false;
+        }
+        return handled;
+    }
+
+    @Override
+    public void setStreamParser(IStreamDataParser dataParser) {
+        this.dataParser = dataParser;
+    }
+
+    public void setStream(AInputStream stream) {
+        this.stream = stream;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedTupleForwarder.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedTupleForwarder.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedTupleForwarder.java
new file mode 100644
index 0000000..d170766
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedTupleForwarder.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.dataflow;
+
+import java.util.Map;
+
+import org.apache.asterix.common.parse.ITupleForwarder;
+import org.apache.asterix.external.util.DataflowUtils;
+import org.apache.hyracks.api.comm.IFrame;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.comm.VSizeFrame;
+import org.apache.hyracks.api.context.IHyracksCommonContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import org.apache.hyracks.dataflow.common.comm.util.FrameUtils;
+
+public class FeedTupleForwarder implements ITupleForwarder {
+
+    private FrameTupleAppender appender;
+    private IFrame frame;
+    private IFrameWriter writer;
+    private boolean paused = false;
+
+    @Override
+    public void configure(Map<String, String> configuration) {
+    }
+
+    @Override
+    public void initialize(IHyracksCommonContext ctx, IFrameWriter writer) throws HyracksDataException {
+        this.frame = new VSizeFrame(ctx);
+        this.writer = writer;
+        this.appender = new FrameTupleAppender(frame);
+    }
+
+    @Override
+    public void addTuple(ArrayTupleBuilder tb) throws HyracksDataException {
+        if (paused) {
+            synchronized (this) {
+                while (paused) {
+                    try {
+                        wait();
+                    } catch (InterruptedException e) {
+                        throw new HyracksDataException(e);
+                    }
+                }
+            }
+        }
+        DataflowUtils.addTupleToFrame(appender, tb, writer);
+    }
+
+    public void pause() {
+        paused = true;
+    }
+
+    public synchronized void resume() {
+        paused = false;
+        notifyAll();
+    }
+
+    @Override
+    public void close() throws HyracksDataException {
+        if (appender.getTupleCount() > 0) {
+            FrameUtils.flushFrame(frame.getBuffer(), writer);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java
index ad8e791..9353a40 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java
@@ -57,7 +57,7 @@ public class RecordDataFlowController<T> extends AbstractDataFlowController impl
 
     @Override
     public boolean stop() {
-        return false;
+        return recordReader.stop();
     }
 
     @Override
@@ -74,4 +74,14 @@ public class RecordDataFlowController<T> extends AbstractDataFlowController impl
     public void setRecordReader(IRecordReader<T> recordReader) throws Exception {
         this.recordReader = recordReader;
     }
+
+    @Override
+    public boolean pause() throws HyracksDataException {
+        return false;
+    }
+
+    @Override
+    public boolean resume() throws HyracksDataException {
+        return false;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/StreamDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/StreamDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/StreamDataFlowController.java
index 3016470..43738eb 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/StreamDataFlowController.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/StreamDataFlowController.java
@@ -61,4 +61,14 @@ public class StreamDataFlowController extends AbstractDataFlowController impleme
     public void setStreamParser(IStreamDataParser dataParser) {
         this.dataParser = dataParser;
     }
+
+    @Override
+    public boolean pause() throws HyracksDataException {
+        return false;
+    }
+
+    @Override
+    public boolean resume() throws HyracksDataException {
+        return false;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/AdapterIdentifier.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/AdapterIdentifier.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/AdapterIdentifier.java
new file mode 100644
index 0000000..6c02500
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/AdapterIdentifier.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.dataset.adapter;
+
+import java.io.Serializable;
+
+/**
+ * A unique identifier for a data source adapter.
+ */
+public class AdapterIdentifier implements Serializable {
+
+    private static final long serialVersionUID = 1L;
+
+    private final String dataverseName;
+    private final String adapterName;
+
+    public AdapterIdentifier(String namespace, String name) {
+        this.dataverseName = namespace;
+        this.adapterName = name;
+    }
+
+    public String getNamespace() {
+        return dataverseName;
+    }
+
+    public String getName() {
+        return adapterName;
+    }
+
+    @Override
+    public int hashCode() {
+        return (dataverseName + "@" + adapterName).hashCode();
+
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (o == null) {
+            return false;
+        }
+        if (this == o) {
+            return true;
+        }
+        if (!(o instanceof AdapterIdentifier)) {
+            return false;
+        }
+        AdapterIdentifier a = (AdapterIdentifier) o;
+        return dataverseName.equals(a.getNamespace()) && adapterName.equals(a.getName());
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/GenericAdapter.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/GenericAdapter.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/GenericAdapter.java
index 74e98dd..d19eedf 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/GenericAdapter.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/GenericAdapter.java
@@ -18,11 +18,12 @@
  */
 package org.apache.asterix.external.dataset.adapter;
 
-import org.apache.asterix.common.feeds.api.IDataSourceAdapter;
 import org.apache.asterix.external.api.IDataFlowController;
+import org.apache.asterix.external.api.IFeedAdapter;
 import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
-public class GenericAdapter implements IDataSourceAdapter {
+public class GenericAdapter implements IFeedAdapter {
 
     private static final long serialVersionUID = 1L;
     private final IDataFlowController controller;
@@ -32,12 +33,12 @@ public class GenericAdapter implements IDataSourceAdapter {
     }
 
     @Override
-    public void start(int partition, IFrameWriter writer) throws Exception {
+    public void start(int partition, IFrameWriter writer) throws HyracksDataException {
         controller.start(writer);
     }
 
     @Override
-    public boolean stop() throws Exception {
+    public boolean stop() throws HyracksDataException {
         return controller.stop();
     }
 
@@ -45,4 +46,14 @@ public class GenericAdapter implements IDataSourceAdapter {
     public boolean handleException(Throwable e) {
         return controller.handleException(e);
     }
+
+    @Override
+    public boolean pause() throws HyracksDataException {
+        return controller.pause();
+    }
+
+    @Override
+    public boolean resume() throws HyracksDataException {
+        return controller.resume();
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/StreamBasedAdapter.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/StreamBasedAdapter.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/StreamBasedAdapter.java
deleted file mode 100644
index 3f10dc4..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/StreamBasedAdapter.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.dataset.adapter;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.feeds.api.IDataSourceAdapter;
-import org.apache.asterix.om.types.IAType;
-import org.apache.hyracks.api.comm.IFrameWriter;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.dataflow.std.file.ITupleParser;
-import org.apache.hyracks.dataflow.std.file.ITupleParserFactory;
-
-public abstract class StreamBasedAdapter implements IDataSourceAdapter {
-
-    private static final long serialVersionUID = 1L;
-
-    protected static final Logger LOGGER = Logger.getLogger(StreamBasedAdapter.class.getName());
-
-    public abstract InputStream getInputStream(int partition) throws IOException;
-
-    protected final ITupleParser tupleParser;
-
-    protected final IAType sourceDatatype;
-
-    public StreamBasedAdapter(ITupleParserFactory parserFactory, IAType sourceDatatype, IHyracksTaskContext ctx,
-            int partition) throws HyracksDataException {
-        this.tupleParser = parserFactory.createTupleParser(ctx);
-        this.sourceDatatype = sourceDatatype;
-    }
-
-    @Override
-    public void start(int partition, IFrameWriter writer) throws Exception {
-        InputStream in = getInputStream(partition);
-        if (in != null) {
-            tupleParser.parse(in, writer);
-        } else {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning(
-                        "Could not obtain input stream for parsing from adapter " + this + "[" + partition + "]");
-            }
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/ICentralFeedManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/ICentralFeedManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/ICentralFeedManager.java
new file mode 100644
index 0000000..4f0ed77
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/ICentralFeedManager.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import java.io.IOException;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+
+public interface ICentralFeedManager {
+
+    public void start() throws AsterixException;
+
+    public void stop() throws AsterixException, IOException;
+
+    public IFeedTrackingManager getFeedTrackingManager();
+
+    public IFeedLoadManager getFeedLoadManager();
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IExceptionHandler.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IExceptionHandler.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IExceptionHandler.java
new file mode 100644
index 0000000..ec0af1c
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IExceptionHandler.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import java.nio.ByteBuffer;
+
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * Handles an exception encountered during processing of a data frame.
+ * In the case when the exception is of type {@code FrameDataException}, the causing
+ * tuple is logged and a new frame with tuple after the exception-generating tuple
+ * is returned. This funcitonality is used during feed ingestion to bypass an exception
+ * generating tuple and thus avoid the data flow from terminating
+ */
+public interface IExceptionHandler {
+
+    /**
+     * @param e
+     *            the exception that needs to be handled
+     * @param frame
+     *            the frame that was being processed when exception occurred
+     * @return returns a new frame with tuples after the exception generating tuple
+     * @throws HyracksDataException
+     */
+    public ByteBuffer handleException(Exception e, ByteBuffer frame);
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeed.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeed.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeed.java
new file mode 100644
index 0000000..6865522
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeed.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import java.io.Serializable;
+import java.util.Map;
+
+import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.external.feed.management.FeedId;
+
+public interface IFeed extends Serializable {
+
+    public enum FeedType {
+        /**
+         * A feed that derives its data from an external source.
+         */
+        PRIMARY,
+
+        /**
+         * A feed that derives its data from another primary or secondary feed.
+         */
+        SECONDARY
+    }
+
+    public FeedType getFeedType();
+
+    public FunctionSignature getAppliedFunction();
+
+    public String getFeedName();
+
+    public String getDataverseName();
+
+    public FeedId getFeedId();
+
+    public Map<String, String> getAdapterConfiguration();
+
+    public String getAdapterName();
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedConnectionManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedConnectionManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedConnectionManager.java
new file mode 100644
index 0000000..35d4cd7
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedConnectionManager.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedRuntimeManager;
+import org.apache.asterix.external.feed.runtime.FeedRuntime;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+
+/**
+ * Handle (de)registration of feeds for delivery of control messages.
+ */
+public interface IFeedConnectionManager {
+
+    /**
+     * Allows registration of a feedRuntime.
+     * 
+     * @param feedRuntime
+     * @throws Exception
+     */
+    public void registerFeedRuntime(FeedConnectionId connectionId, FeedRuntime feedRuntime) throws Exception;
+
+    /**
+     * Obtain feed runtime corresponding to a feedRuntimeId
+     * 
+     * @param feedRuntimeId
+     * @return
+     */
+    public FeedRuntime getFeedRuntime(FeedConnectionId connectionId, FeedRuntimeId feedRuntimeId);
+
+    /**
+     * De-register a feed
+     * 
+     * @param feedConnection
+     * @throws IOException
+     */
+    void deregisterFeed(FeedConnectionId feedConnection);
+
+    /**
+     * Obtain the feed runtime manager associated with a feed.
+     * 
+     * @param feedConnection
+     * @return
+     */
+    public FeedRuntimeManager getFeedRuntimeManager(FeedConnectionId feedConnection);
+
+    /**
+     * Allows de-registration of a feed runtime.
+     * 
+     * @param feedRuntimeId
+     */
+    void deRegisterFeedRuntime(FeedConnectionId connectionId, FeedRuntimeId feedRuntimeId);
+
+    public List<FeedRuntimeId> getRegisteredRuntimes();
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedFrameHandler.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedFrameHandler.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedFrameHandler.java
new file mode 100644
index 0000000..9cb0feb
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedFrameHandler.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import java.nio.ByteBuffer;
+import java.util.Iterator;
+
+import org.apache.asterix.external.feed.dataflow.DataBucket;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public interface IFeedFrameHandler {
+
+    public void handleFrame(ByteBuffer frame) throws HyracksDataException;
+
+    public void handleDataBucket(DataBucket bucket);
+
+    public void close();
+
+    public Iterator<ByteBuffer> replayData() throws HyracksDataException;
+
+    public String getSummary();
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedJoint.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedJoint.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedJoint.java
new file mode 100644
index 0000000..d990e45
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedJoint.java
@@ -0,0 +1,121 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import java.util.List;
+
+import org.apache.asterix.external.feed.api.IFeedLifecycleListener.ConnectionLocation;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedConnectionRequest;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.management.FeedJointKey;
+
+public interface IFeedJoint {
+
+    public enum FeedJointType {
+        /** Feed Joint is located at the intake stage of a primary feed **/
+        INTAKE,
+
+        /** Feed Joint is located at the compute stage of a primary/secondary feed **/
+        COMPUTE
+    }
+
+    public enum State {
+        /** Initial state of a feed joint post creation but prior to scheduling of corresponding Hyracks job. **/
+        CREATED,
+
+        /** State acquired post creation of Hyracks job and known physical locations of the joint **/
+        INITIALIZED,
+
+        /** State acquired post starting of Hyracks job at which point, data begins to flow through the joint **/
+        ACTIVE
+    }
+
+    /**
+     * @return the {@link State} associated with the FeedJoint
+     */
+    public State getState();
+
+    /**
+     * @return the {@link FeedJointType} associated with the FeedJoint
+     */
+    public FeedJointType getType();
+
+    /**
+     * @return the list of data receivers that are
+     *         receiving the data flowing through this FeedJoint
+     */
+    public List<FeedConnectionId> getReceivers();
+
+    /**
+     * @return the list of pending subscription request {@link FeedConnectionRequest} submitted for data flowing through the FeedJoint
+     */
+    public List<FeedConnectionRequest> getConnectionRequests();
+
+    /**
+     * @return the subscription location {@link ConnectionLocation} associated with the FeedJoint
+     */
+    public ConnectionLocation getConnectionLocation();
+
+    /**
+     * @return the unique {@link FeedJointKey} associated with the FeedJoint
+     */
+    public FeedJointKey getFeedJointKey();
+
+    /**
+     * Returns the feed subscriber {@link FeedSubscriber} corresponding to a given feed connection id.
+     * 
+     * @param feedConnectionId
+     *            the unique id of a feed connection
+     * @return an instance of feedConnectionId {@link FeedConnectionId}
+     */
+    public FeedConnectionId getReceiver(FeedConnectionId feedConnectionId);
+
+    /**
+     * @param active
+     */
+    public void setState(State active);
+
+    /**
+     * Remove the subscriber from the set of registered subscribers to the FeedJoint
+     * 
+     * @param connectionId
+     *            the connectionId that needs to be removed
+     */
+    public void removeReceiver(FeedConnectionId connectionId);
+
+    public FeedId getOwnerFeedId();
+
+    /**
+     * Add a feed connectionId to the set of registered subscribers
+     * 
+     * @param connectionId
+     */
+    public void addReceiver(FeedConnectionId connectionId);
+
+    /**
+     * Add a feed subscription request {@link FeedConnectionRequest} for the FeedJoint
+     * 
+     * @param request
+     */
+    public void addConnectionRequest(FeedConnectionRequest request);
+
+    public FeedConnectionId getProvider();
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedLifecycleEventSubscriber.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedLifecycleEventSubscriber.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedLifecycleEventSubscriber.java
new file mode 100644
index 0000000..0c8724e
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedLifecycleEventSubscriber.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+
+public interface IFeedLifecycleEventSubscriber {
+
+    public enum FeedLifecycleEvent {
+        FEED_INTAKE_STARTED,
+        FEED_COLLECT_STARTED,
+        FEED_INTAKE_FAILURE,
+        FEED_COLLECT_FAILURE,
+        FEED_ENDED
+    }
+
+    public void assertEvent(FeedLifecycleEvent event) throws AsterixException, InterruptedException;
+
+    public void handleFeedEvent(FeedLifecycleEvent event);
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedLifecycleIntakeEventSubscriber.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedLifecycleIntakeEventSubscriber.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedLifecycleIntakeEventSubscriber.java
new file mode 100644
index 0000000..b9caa0d
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedLifecycleIntakeEventSubscriber.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.feed.watch.FeedIntakeInfo;
+
+public interface IFeedLifecycleIntakeEventSubscriber extends IFeedLifecycleEventSubscriber {
+
+    public void handleFeedEvent(FeedIntakeInfo iInfo, FeedLifecycleEvent event) throws AsterixException;
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedLifecycleListener.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedLifecycleListener.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedLifecycleListener.java
new file mode 100644
index 0000000..ce82aaf
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedLifecycleListener.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import java.util.List;
+
+import org.apache.asterix.common.api.IClusterEventsSubscriber;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.management.FeedJointKey;
+import org.apache.hyracks.api.job.IJobLifecycleListener;
+
+public interface IFeedLifecycleListener extends IJobLifecycleListener, IClusterEventsSubscriber {
+
+    public enum ConnectionLocation {
+        SOURCE_FEED_INTAKE_STAGE,
+        SOURCE_FEED_COMPUTE_STAGE
+    }
+
+    public IFeedJoint getAvailableFeedJoint(FeedJointKey feedJoinKey);
+
+    public boolean isFeedJointAvailable(FeedJointKey feedJoinKey);
+
+    public List<FeedConnectionId> getActiveFeedConnections(FeedId feedId);
+
+    public List<String> getComputeLocations(FeedId feedId);
+
+    public List<String> getIntakeLocations(FeedId feedId);
+
+    public List<String> getStoreLocations(FeedConnectionId feedId);
+
+    public void registerFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber);
+
+    public void deregisterFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber);
+
+    public List<String> getCollectLocations(FeedConnectionId feedConnectionId);
+
+    boolean isFeedConnectionActive(FeedConnectionId connectionId);
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedLoadManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedLoadManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedLoadManager.java
new file mode 100644
index 0000000..f511979
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedLoadManager.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import java.util.Collection;
+import java.util.List;
+
+import org.json.JSONException;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.message.FeedCongestionMessage;
+import org.apache.asterix.external.feed.message.FeedReportMessage;
+import org.apache.asterix.external.feed.message.ScaleInReportMessage;
+import org.apache.asterix.external.feed.message.ThrottlingEnabledFeedMessage;
+import org.apache.asterix.external.feed.watch.FeedActivity;
+import org.apache.asterix.external.feed.watch.NodeLoadReport;
+
+public interface IFeedLoadManager {
+
+    public void submitNodeLoadReport(NodeLoadReport report);
+
+    public void reportCongestion(FeedCongestionMessage message) throws JSONException, AsterixException;
+
+    public void submitFeedRuntimeReport(FeedReportMessage message);
+
+    public void submitScaleInPossibleReport(ScaleInReportMessage sm) throws AsterixException, Exception;
+
+    public List<String> getNodes(int required);
+
+    public void reportThrottlingEnabled(ThrottlingEnabledFeedMessage mesg) throws AsterixException, Exception;
+
+    int getOutflowRate(FeedConnectionId connectionId, FeedRuntimeType runtimeType);
+
+    void reportFeedActivity(FeedConnectionId connectionId, FeedActivity activity);
+
+    void removeFeedActivity(FeedConnectionId connectionId);
+    
+    public FeedActivity getFeedActivity(FeedConnectionId connectionId);
+
+    public Collection<FeedActivity> getFeedActivities();
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedManager.java
new file mode 100644
index 0000000..b3ad0a5
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedManager.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+import org.apache.asterix.common.config.AsterixFeedProperties;
+
+/**
+ * Provides access to services related to feed management within a node controller
+ */
+public interface IFeedManager {
+
+    /**
+     * gets the handle to the singleton instance of subscription manager
+     * @return the singleton instance of subscription manager
+     * @see IFeedSubscriptionManager
+     */
+    public IFeedSubscriptionManager getFeedSubscriptionManager();
+
+    /**
+     * gets the handle to the singleton instance of connection manager
+     * @return the singleton instance of connection manager
+     * @see IFeedConnectionManager
+     */
+    public IFeedConnectionManager getFeedConnectionManager();
+
+    /**
+     * gets the handle to the singleton instance of memory manager
+     * @return the singleton instance of memory manager
+     * @see IFeedMemoryManager
+     */
+    public IFeedMemoryManager getFeedMemoryManager();
+
+    /**
+     * gets the handle to the singleton instance of feed metadata manager
+     * @return the singleton instance of feed metadata manager
+     * @see IFeedMetadataManager
+     */
+    public IFeedMetadataManager getFeedMetadataManager();
+
+    /**
+     * gets the handle to the singleton instance of feed metric collector
+     * @return the singleton instance of feed metric collector
+     * @see IFeedMetricCollector
+     */
+    public IFeedMetricCollector getFeedMetricCollector();
+
+    /**
+     * gets the handle to the singleton instance of feed message service
+     * @return the singleton instance of feed message service
+     * @see IFeedMessageService
+     */
+    public IFeedMessageService getFeedMessageService();
+
+    public AsterixFeedProperties getAsterixFeedProperties();
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMemoryComponent.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMemoryComponent.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMemoryComponent.java
new file mode 100644
index 0000000..313dc1b
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/api/IFeedMemoryComponent.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.api;
+
+/**
+ * Represents an in-memory components required for storing frames that contain feed tuples.
+ * The component's memory footprint is measured and regulated by the {@link IFeedMemoryManager}.
+ * Any expansion in size is accounted and can be restricted by the {@link IFeedMemoryManager}
+ **/
+public interface IFeedMemoryComponent {
+
+    public enum Type {
+
+        /** A pool of reusable frames **/
+        POOL,
+
+        /** An ordered list of frames **/
+        COLLECTION
+    }
+
+    /** Gets the unique id associated with the memory component **/
+    public int getComponentId();
+
+    /** Gets the type associated with the component. **/
+    public Type getType();
+
+    /** Gets the current size (number of allocated frames) of the component. **/
+    public int getTotalAllocation();
+
+    /**
+     * Expands this memory component by the speficied number of frames
+     * 
+     * @param delta
+     *            the amount (measured in number of frames) by which this memory component
+     *            should be expanded
+     */
+    public void expand(int delta);
+
+    /** Clears the allocated frames as a step to reclaim the memory **/
+    public void reset();
+
+}



[23/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feeds/FeedLifecycleListener.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedLifecycleListener.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedLifecycleListener.java
deleted file mode 100644
index d53428d..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedLifecycleListener.java
+++ /dev/null
@@ -1,497 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feeds;
-
-import java.io.PrintWriter;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.api.common.SessionConfig;
-import org.apache.asterix.api.common.SessionConfig.OutputFormat;
-import org.apache.asterix.aql.translator.QueryTranslator;
-import org.apache.asterix.common.api.IClusterManagementWork;
-import org.apache.asterix.common.api.IClusterManagementWork.ClusterState;
-import org.apache.asterix.common.api.IClusterManagementWorkResponse;
-import org.apache.asterix.common.feeds.FeedConnectJobInfo;
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedConnectionRequest;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.FeedIntakeInfo;
-import org.apache.asterix.common.feeds.FeedJobInfo;
-import org.apache.asterix.common.feeds.FeedJobInfo.FeedJobState;
-import org.apache.asterix.common.feeds.FeedJointKey;
-import org.apache.asterix.common.feeds.api.IFeedJoint;
-import org.apache.asterix.common.feeds.api.IFeedLifecycleEventSubscriber;
-import org.apache.asterix.common.feeds.api.IFeedLifecycleListener;
-import org.apache.asterix.common.feeds.api.IIntakeProgressTracker;
-import org.apache.asterix.common.feeds.message.StorageReportFeedMessage;
-import org.apache.asterix.compiler.provider.AqlCompilationProvider;
-import org.apache.asterix.compiler.provider.ILangCompilationProvider;
-import org.apache.asterix.lang.common.base.Statement;
-import org.apache.asterix.lang.common.statement.DataverseDecl;
-import org.apache.asterix.lang.common.statement.DisconnectFeedStatement;
-import org.apache.asterix.lang.common.struct.Identifier;
-import org.apache.asterix.metadata.MetadataManager;
-import org.apache.asterix.metadata.MetadataTransactionContext;
-import org.apache.asterix.metadata.cluster.AddNodeWork;
-import org.apache.asterix.metadata.cluster.ClusterManager;
-import org.apache.asterix.metadata.feeds.FeedCollectOperatorDescriptor;
-import org.apache.asterix.metadata.feeds.FeedIntakeOperatorDescriptor;
-import org.apache.asterix.om.util.AsterixAppContextInfo;
-import org.apache.asterix.om.util.AsterixClusterProperties;
-import org.apache.hyracks.algebricks.common.utils.Pair;
-import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
-import org.apache.hyracks.api.exceptions.HyracksException;
-import org.apache.hyracks.api.job.IActivityClusterGraphGeneratorFactory;
-import org.apache.hyracks.api.job.JobId;
-import org.apache.hyracks.api.job.JobSpecification;
-
-/**
- * A listener that subscribes to events associated with cluster membership
- * (nodes joining/leaving the cluster) and job lifecycle (start/end of a job).
- * Subscription to such events allows keeping track of feed ingestion jobs and
- * take any corrective action that may be required when a node involved in a
- * feed leaves the cluster.
- */
-public class FeedLifecycleListener implements IFeedLifecycleListener {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedLifecycleListener.class.getName());
-
-    public static FeedLifecycleListener INSTANCE = new FeedLifecycleListener();
-    private static final ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
-
-    private final LinkedBlockingQueue<Message> jobEventInbox;
-    private final LinkedBlockingQueue<IClusterManagementWorkResponse> responseInbox;
-    private final Map<FeedCollectInfo, List<String>> dependentFeeds = new HashMap<FeedCollectInfo, List<String>>();
-    private final Map<FeedConnectionId, LinkedBlockingQueue<String>> feedReportQueue;
-    private final FeedJobNotificationHandler feedJobNotificationHandler;
-    private final FeedWorkRequestResponseHandler feedWorkRequestResponseHandler;
-    private final ExecutorService executorService;
-
-    private ClusterState state;
-
-    private FeedLifecycleListener() {
-        this.jobEventInbox = new LinkedBlockingQueue<Message>();
-        this.feedJobNotificationHandler = new FeedJobNotificationHandler(jobEventInbox);
-        this.responseInbox = new LinkedBlockingQueue<IClusterManagementWorkResponse>();
-        this.feedWorkRequestResponseHandler = new FeedWorkRequestResponseHandler(responseInbox);
-        this.feedReportQueue = new HashMap<FeedConnectionId, LinkedBlockingQueue<String>>();
-        this.executorService = Executors.newCachedThreadPool();
-        this.executorService.execute(feedJobNotificationHandler);
-        this.executorService.execute(feedWorkRequestResponseHandler);
-        ClusterManager.INSTANCE.registerSubscriber(this);
-        this.state = AsterixClusterProperties.INSTANCE.getState();
-    }
-
-    @Override
-    public void notifyJobStart(JobId jobId) throws HyracksException {
-        if (feedJobNotificationHandler.isRegisteredFeedJob(jobId)) {
-            jobEventInbox.add(new Message(jobId, Message.MessageKind.JOB_START));
-        }
-    }
-
-    @Override
-    public void notifyJobFinish(JobId jobId) throws HyracksException {
-        if (feedJobNotificationHandler.isRegisteredFeedJob(jobId)) {
-            jobEventInbox.add(new Message(jobId, Message.MessageKind.JOB_FINISH));
-        } else {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("NO NEED TO NOTIFY JOB FINISH!");
-            }
-        }
-    }
-
-    public FeedConnectJobInfo getFeedConnectJobInfo(FeedConnectionId connectionId) {
-        return feedJobNotificationHandler.getFeedConnectJobInfo(connectionId);
-    }
-
-    public void registerFeedIntakeProgressTracker(FeedConnectionId connectionId,
-            IIntakeProgressTracker feedIntakeProgressTracker) {
-        feedJobNotificationHandler.registerFeedIntakeProgressTracker(connectionId, feedIntakeProgressTracker);
-    }
-
-    public void deregisterFeedIntakeProgressTracker(FeedConnectionId connectionId) {
-        feedJobNotificationHandler.deregisterFeedIntakeProgressTracker(connectionId);
-    }
-
-    public void updateTrackingInformation(StorageReportFeedMessage srm) {
-        feedJobNotificationHandler.updateTrackingInformation(srm);
-    }
-
-    /*
-     * Traverse job specification to categorize job as a feed intake job or a feed collection job
-     */
-    @Override
-    public void notifyJobCreation(JobId jobId, IActivityClusterGraphGeneratorFactory acggf) throws HyracksException {
-        JobSpecification spec = acggf.getJobSpecification();
-        FeedConnectionId feedConnectionId = null;
-        Map<String, String> feedPolicy = null;
-        for (IOperatorDescriptor opDesc : spec.getOperatorMap().values()) {
-            if (opDesc instanceof FeedCollectOperatorDescriptor) {
-                feedConnectionId = ((FeedCollectOperatorDescriptor) opDesc).getFeedConnectionId();
-                feedPolicy = ((FeedCollectOperatorDescriptor) opDesc).getFeedPolicyProperties();
-                feedJobNotificationHandler.registerFeedCollectionJob(
-                        ((FeedCollectOperatorDescriptor) opDesc).getSourceFeedId(), feedConnectionId, jobId, spec,
-                        feedPolicy);
-                break;
-            } else if (opDesc instanceof FeedIntakeOperatorDescriptor) {
-                feedJobNotificationHandler.registerFeedIntakeJob(((FeedIntakeOperatorDescriptor) opDesc).getFeedId(),
-                        jobId, spec);
-                break;
-            }
-        }
-    }
-
-    public void setJobState(FeedConnectionId connectionId, FeedJobState jobState) {
-        feedJobNotificationHandler.setJobState(connectionId, jobState);
-    }
-
-    public FeedJobState getFeedJobState(FeedConnectionId connectionId) {
-        return feedJobNotificationHandler.getFeedJobState(connectionId);
-    }
-
-    public static class Message {
-        public JobId jobId;
-
-        public enum MessageKind {
-            JOB_START,
-            JOB_FINISH
-        }
-
-        public MessageKind messageKind;
-
-        public Message(JobId jobId, MessageKind msgKind) {
-            this.jobId = jobId;
-            this.messageKind = msgKind;
-        }
-    }
-
-    @Override
-    public Set<IClusterManagementWork> notifyNodeFailure(Set<String> deadNodeIds) {
-        Set<IClusterManagementWork> workToBeDone = new HashSet<IClusterManagementWork>();
-
-        Collection<FeedIntakeInfo> intakeInfos = feedJobNotificationHandler.getFeedIntakeInfos();
-        Collection<FeedConnectJobInfo> connectJobInfos = feedJobNotificationHandler.getFeedConnectInfos();
-
-        Map<String, List<FeedJobInfo>> impactedJobs = new HashMap<String, List<FeedJobInfo>>();
-
-        for (String deadNode : deadNodeIds) {
-            for (FeedIntakeInfo intakeInfo : intakeInfos) {
-                if (intakeInfo.getIntakeLocation().contains(deadNode)) {
-                    List<FeedJobInfo> infos = impactedJobs.get(deadNode);
-                    if (infos == null) {
-                        infos = new ArrayList<FeedJobInfo>();
-                        impactedJobs.put(deadNode, infos);
-                    }
-                    infos.add(intakeInfo);
-                    intakeInfo.setState(FeedJobState.UNDER_RECOVERY);
-                }
-            }
-
-            for (FeedConnectJobInfo connectInfo : connectJobInfos) {
-                if (connectInfo.getStorageLocations().contains(deadNode)) {
-                    continue;
-                }
-                if (connectInfo.getComputeLocations().contains(deadNode)
-                        || connectInfo.getCollectLocations().contains(deadNode)) {
-                    List<FeedJobInfo> infos = impactedJobs.get(deadNode);
-                    if (infos == null) {
-                        infos = new ArrayList<FeedJobInfo>();
-                        impactedJobs.put(deadNode, infos);
-                    }
-                    infos.add(connectInfo);
-                    connectInfo.setState(FeedJobState.UNDER_RECOVERY);
-                    feedJobNotificationHandler.deregisterFeedActivity(connectInfo);
-                }
-            }
-
-        }
-
-        if (impactedJobs.size() > 0) {
-            AddNodeWork addNodeWork = new AddNodeWork(deadNodeIds, deadNodeIds.size(), this);
-            feedWorkRequestResponseHandler.registerFeedWork(addNodeWork.getWorkId(), impactedJobs);
-            workToBeDone.add(addNodeWork);
-        }
-        return workToBeDone;
-
-    }
-
-    public static class FailureReport {
-
-        private final List<Pair<FeedConnectJobInfo, List<String>>> recoverableConnectJobs;
-        private final Map<IFeedJoint, List<String>> recoverableIntakeFeedIds;
-
-        public FailureReport(Map<IFeedJoint, List<String>> recoverableIntakeFeedIds,
-                List<Pair<FeedConnectJobInfo, List<String>>> recoverableSubscribers) {
-            this.recoverableConnectJobs = recoverableSubscribers;
-            this.recoverableIntakeFeedIds = recoverableIntakeFeedIds;
-        }
-
-        public List<Pair<FeedConnectJobInfo, List<String>>> getRecoverableSubscribers() {
-            return recoverableConnectJobs;
-        }
-
-        public Map<IFeedJoint, List<String>> getRecoverableIntakeFeedIds() {
-            return recoverableIntakeFeedIds;
-        }
-
-    }
-
-    @Override
-    public Set<IClusterManagementWork> notifyNodeJoin(String joinedNodeId) {
-        ClusterState newState = AsterixClusterProperties.INSTANCE.getState();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info(joinedNodeId + " joined the cluster. " + "Asterix state: " + newState);
-        }
-
-        boolean needToReActivateFeeds = !newState.equals(state) && (newState == ClusterState.ACTIVE);
-        if (needToReActivateFeeds) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info(joinedNodeId + " Resuming loser feeds (if any)");
-            }
-            try {
-                FeedsActivator activator = new FeedsActivator();
-                (new Thread(activator)).start();
-            } catch (Exception e) {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Exception in resuming feeds" + e.getMessage());
-                }
-            }
-            state = newState;
-        } else {
-            List<FeedCollectInfo> feedsThatCanBeRevived = new ArrayList<FeedCollectInfo>();
-            for (Entry<FeedCollectInfo, List<String>> entry : dependentFeeds.entrySet()) {
-                List<String> requiredNodeIds = entry.getValue();
-                if (requiredNodeIds.contains(joinedNodeId)) {
-                    requiredNodeIds.remove(joinedNodeId);
-                    if (requiredNodeIds.isEmpty()) {
-                        feedsThatCanBeRevived.add(entry.getKey());
-                    }
-                }
-            }
-            if (!feedsThatCanBeRevived.isEmpty()) {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info(joinedNodeId + " Resuming feeds after rejoining of node " + joinedNodeId);
-                }
-                FeedsActivator activator = new FeedsActivator(feedsThatCanBeRevived);
-                (new Thread(activator)).start();
-            }
-        }
-        return null;
-    }
-
-    @Override
-    public void notifyRequestCompletion(IClusterManagementWorkResponse response) {
-        try {
-            responseInbox.put(response);
-        } catch (InterruptedException e) {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Interrupted exception");
-            }
-        }
-    }
-
-    @Override
-    public void notifyStateChange(ClusterState previousState, ClusterState newState) {
-        switch (newState) {
-            case ACTIVE:
-                if (previousState.equals(ClusterState.UNUSABLE)) {
-                    try {
-                        FeedsActivator activator = new FeedsActivator();
-                        // (new Thread(activator)).start();
-                    } catch (Exception e) {
-                        if (LOGGER.isLoggable(Level.INFO)) {
-                            LOGGER.info("Exception in resuming feeds" + e.getMessage());
-                        }
-                    }
-                }
-                break;
-            default:
-                break;
-        }
-
-    }
-
-    public static class FeedsDeActivator implements Runnable {
-
-        private List<FeedConnectJobInfo> failedConnectjobs;
-
-        public FeedsDeActivator(List<FeedConnectJobInfo> failedConnectjobs) {
-            this.failedConnectjobs = failedConnectjobs;
-        }
-
-        @Override
-        public void run() {
-            for (FeedConnectJobInfo failedConnectJob : failedConnectjobs) {
-                endFeed(failedConnectJob);
-            }
-        }
-
-        private void endFeed(FeedConnectJobInfo cInfo) {
-            MetadataTransactionContext ctx = null;
-            PrintWriter writer = new PrintWriter(System.out, true);
-            SessionConfig pc = new SessionConfig(writer, OutputFormat.ADM);
-
-            try {
-                ctx = MetadataManager.INSTANCE.beginTransaction();
-                FeedId feedId = cInfo.getConnectionId().getFeedId();
-                DisconnectFeedStatement stmt = new DisconnectFeedStatement(new Identifier(feedId.getDataverse()),
-                        new Identifier(feedId.getFeedName()), new Identifier(cInfo.getConnectionId().getDatasetName()));
-                List<Statement> statements = new ArrayList<Statement>();
-                DataverseDecl dataverseDecl = new DataverseDecl(new Identifier(feedId.getDataverse()));
-                statements.add(dataverseDecl);
-                statements.add(stmt);
-                QueryTranslator translator = new QueryTranslator(statements, pc, compilationProvider);
-                translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null,
-                        QueryTranslator.ResultDelivery.SYNC);
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("End irrecoverable feed: " + cInfo.getConnectionId());
-                }
-                MetadataManager.INSTANCE.commitTransaction(ctx);
-            } catch (Exception e) {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Exception in ending loser feed: " + cInfo.getConnectionId() + " Exception "
-                            + e.getMessage());
-                }
-                e.printStackTrace();
-                try {
-                    MetadataManager.INSTANCE.abortTransaction(ctx);
-                } catch (Exception e2) {
-                    e2.addSuppressed(e);
-                    if (LOGGER.isLoggable(Level.SEVERE)) {
-                        LOGGER.severe("Exception in aborting transaction! System is in inconsistent state");
-                    }
-                }
-
-            }
-
-        }
-    }
-
-    public void submitFeedConnectionRequest(IFeedJoint feedPoint, FeedConnectionRequest subscriptionRequest)
-            throws Exception {
-        feedJobNotificationHandler.submitFeedConnectionRequest(feedPoint, subscriptionRequest);
-    }
-
-    @Override
-    public List<FeedConnectionId> getActiveFeedConnections(FeedId feedId) {
-        List<FeedConnectionId> connections = new ArrayList<FeedConnectionId>();
-        Collection<FeedConnectionId> activeConnections = feedJobNotificationHandler.getActiveFeedConnections();
-        if (feedId != null) {
-            for (FeedConnectionId connectionId : activeConnections) {
-                if (connectionId.getFeedId().equals(feedId)) {
-                    connections.add(connectionId);
-                }
-            }
-        } else {
-            connections.addAll(activeConnections);
-        }
-        return connections;
-    }
-
-    @Override
-    public List<String> getComputeLocations(FeedId feedId) {
-        return feedJobNotificationHandler.getFeedComputeLocations(feedId);
-    }
-
-    @Override
-    public List<String> getIntakeLocations(FeedId feedId) {
-        return feedJobNotificationHandler.getFeedIntakeLocations(feedId);
-    }
-
-    @Override
-    public List<String> getStoreLocations(FeedConnectionId feedConnectionId) {
-        return feedJobNotificationHandler.getFeedStorageLocations(feedConnectionId);
-    }
-
-    @Override
-    public List<String> getCollectLocations(FeedConnectionId feedConnectionId) {
-        return feedJobNotificationHandler.getFeedCollectLocations(feedConnectionId);
-    }
-
-    @Override
-    public boolean isFeedConnectionActive(FeedConnectionId connectionId) {
-        return feedJobNotificationHandler.isFeedConnectionActive(connectionId);
-    }
-
-    public void reportPartialDisconnection(FeedConnectionId connectionId) {
-        feedJobNotificationHandler.removeFeedJointsPostPipelineTermination(connectionId);
-    }
-
-    public void registerFeedReportQueue(FeedConnectionId feedId, LinkedBlockingQueue<String> queue) {
-        feedReportQueue.put(feedId, queue);
-    }
-
-    public void deregisterFeedReportQueue(FeedConnectionId feedId, LinkedBlockingQueue<String> queue) {
-        feedReportQueue.remove(feedId);
-    }
-
-    public LinkedBlockingQueue<String> getFeedReportQueue(FeedConnectionId feedId) {
-        return feedReportQueue.get(feedId);
-    }
-
-    @Override
-    public IFeedJoint getAvailableFeedJoint(FeedJointKey feedJointKey) {
-        return feedJobNotificationHandler.getAvailableFeedJoint(feedJointKey);
-    }
-
-    @Override
-    public boolean isFeedJointAvailable(FeedJointKey feedJointKey) {
-        return feedJobNotificationHandler.isFeedPointAvailable(feedJointKey);
-    }
-
-    public void registerFeedJoint(IFeedJoint feedJoint) {
-        feedJobNotificationHandler.registerFeedJoint(feedJoint);
-    }
-
-    public IFeedJoint getFeedJoint(FeedJointKey feedJointKey) {
-        return feedJobNotificationHandler.getFeedJoint(feedJointKey);
-    }
-
-    @Override
-    public void registerFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
-        feedJobNotificationHandler.registerFeedEventSubscriber(connectionId, subscriber);
-    }
-
-    @Override
-    public void deregisterFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
-        feedJobNotificationHandler.deregisterFeedEventSubscriber(connectionId, subscriber);
-
-    }
-
-    public JobSpecification getCollectJobSpecification(FeedConnectionId connectionId) {
-        return feedJobNotificationHandler.getCollectJobSpecification(connectionId);
-    }
-
-    public JobId getFeedCollectJobId(FeedConnectionId connectionId) {
-        return feedJobNotificationHandler.getFeedCollectJobId(connectionId);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feeds/FeedLoadManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedLoadManager.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedLoadManager.java
deleted file mode 100644
index cb3133e..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedLoadManager.java
+++ /dev/null
@@ -1,302 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feeds;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.feeds.FeedActivity;
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedJobInfo.FeedJobState;
-import org.apache.asterix.common.feeds.FeedRuntimeId;
-import org.apache.asterix.common.feeds.NodeLoadReport;
-import org.apache.asterix.common.feeds.api.IFeedLoadManager;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.asterix.common.feeds.api.IFeedTrackingManager;
-import org.apache.asterix.common.feeds.message.FeedCongestionMessage;
-import org.apache.asterix.common.feeds.message.FeedReportMessage;
-import org.apache.asterix.common.feeds.message.ScaleInReportMessage;
-import org.apache.asterix.common.feeds.message.ThrottlingEnabledFeedMessage;
-import org.apache.asterix.file.FeedOperations;
-import org.apache.asterix.metadata.feeds.FeedUtil;
-import org.apache.asterix.metadata.feeds.PrepareStallMessage;
-import org.apache.asterix.metadata.feeds.TerminateDataFlowMessage;
-import org.apache.asterix.om.util.AsterixAppContextInfo;
-import org.apache.hyracks.algebricks.common.utils.Pair;
-import org.apache.hyracks.api.client.IHyracksClientConnection;
-import org.apache.hyracks.api.job.JobId;
-import org.apache.hyracks.api.job.JobSpecification;
-
-public class FeedLoadManager implements IFeedLoadManager {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedLoadManager.class.getName());
-
-    private static final long MIN_MODIFICATION_INTERVAL = 180000; // 10 seconds
-    private final TreeSet<NodeLoadReport> nodeReports;
-    private final Map<FeedConnectionId, FeedActivity> feedActivities;
-    private final Map<String, Pair<Integer, Integer>> feedMetrics;
-
-    private FeedConnectionId lastModified;
-    private long lastModifiedTimestamp;
-
-    private static final int UNKNOWN = -1;
-
-    public FeedLoadManager() {
-        this.nodeReports = new TreeSet<NodeLoadReport>();
-        this.feedActivities = new HashMap<FeedConnectionId, FeedActivity>();
-        this.feedMetrics = new HashMap<String, Pair<Integer, Integer>>();
-    }
-
-    @Override
-    public void submitNodeLoadReport(NodeLoadReport report) {
-        nodeReports.remove(report);
-        nodeReports.add(report);
-    }
-
-    @Override
-    public void reportCongestion(FeedCongestionMessage message) throws AsterixException {
-        FeedRuntimeId runtimeId = message.getRuntimeId();
-        FeedJobState jobState = FeedLifecycleListener.INSTANCE.getFeedJobState(message.getConnectionId());
-        if (jobState == null
-                || (jobState.equals(FeedJobState.UNDER_RECOVERY))
-                || (message.getConnectionId().equals(lastModified) && System.currentTimeMillis()
-                        - lastModifiedTimestamp < MIN_MODIFICATION_INTERVAL)) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Ignoring congestion report from " + runtimeId);
-            }
-            return;
-        } else {
-            try {
-                FeedLifecycleListener.INSTANCE.setJobState(message.getConnectionId(), FeedJobState.UNDER_RECOVERY);
-                int inflowRate = message.getInflowRate();
-                int outflowRate = message.getOutflowRate();
-                List<String> currentComputeLocations = new ArrayList<String>();
-                currentComputeLocations.addAll(FeedLifecycleListener.INSTANCE.getComputeLocations(message
-                        .getConnectionId().getFeedId()));
-                int computeCardinality = currentComputeLocations.size();
-                int requiredCardinality = (int) Math
-                        .ceil((double) ((computeCardinality * inflowRate) / (double) outflowRate)) + 5;
-                int additionalComputeNodes = requiredCardinality - computeCardinality;
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("INCREASING COMPUTE CARDINALITY from " + computeCardinality + " by "
-                            + additionalComputeNodes);
-                }
-
-                List<String> helperComputeNodes = getNodeForSubstitution(additionalComputeNodes);
-
-                // Step 1) Alter the original feed job to adjust the cardinality
-                JobSpecification jobSpec = FeedLifecycleListener.INSTANCE.getCollectJobSpecification(message
-                        .getConnectionId());
-                helperComputeNodes.addAll(currentComputeLocations);
-                List<String> newLocations = new ArrayList<String>();
-                newLocations.addAll(currentComputeLocations);
-                newLocations.addAll(helperComputeNodes);
-                FeedUtil.increaseCardinality(jobSpec, FeedRuntimeType.COMPUTE, requiredCardinality, newLocations);
-
-                // Step 2) send prepare to  stall message
-                gracefullyTerminateDataFlow(message.getConnectionId(), Integer.MAX_VALUE);
-
-                // Step 3) run the altered job specification 
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("New Job after adjusting to the workload " + jobSpec);
-                }
-
-                Thread.sleep(10000);
-                runJob(jobSpec, false);
-                lastModified = message.getConnectionId();
-                lastModifiedTimestamp = System.currentTimeMillis();
-
-            } catch (Exception e) {
-                e.printStackTrace();
-                if (LOGGER.isLoggable(Level.SEVERE)) {
-                    LOGGER.severe("Unable to form the required job for scaling in/out" + e.getMessage());
-                }
-                throw new AsterixException(e);
-            }
-        }
-    }
-
-    @Override
-    public void submitScaleInPossibleReport(ScaleInReportMessage message) throws Exception {
-        FeedJobState jobState = FeedLifecycleListener.INSTANCE.getFeedJobState(message.getConnectionId());
-        if (jobState == null || (jobState.equals(FeedJobState.UNDER_RECOVERY))) {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("JobState information for job " + "[" + message.getConnectionId() + "]" + " not found ");
-            }
-            return;
-        } else {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Processing scale-in message " + message);
-            }
-            FeedLifecycleListener.INSTANCE.setJobState(message.getConnectionId(), FeedJobState.UNDER_RECOVERY);
-            JobSpecification jobSpec = FeedLifecycleListener.INSTANCE.getCollectJobSpecification(message
-                    .getConnectionId());
-            int reducedCardinality = message.getReducedCardinaliy();
-            List<String> currentComputeLocations = new ArrayList<String>();
-            currentComputeLocations.addAll(FeedLifecycleListener.INSTANCE.getComputeLocations(message.getConnectionId()
-                    .getFeedId()));
-            FeedUtil.decreaseComputeCardinality(jobSpec, FeedRuntimeType.COMPUTE, reducedCardinality,
-                    currentComputeLocations);
-
-            gracefullyTerminateDataFlow(message.getConnectionId(), reducedCardinality - 1);
-            Thread.sleep(3000);
-            JobId newJobId = runJob(jobSpec, false);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Launch modified job" + "[" + newJobId + "]" + "for scale-in \n" + jobSpec);
-            }
-
-        }
-    }
-
-    private void gracefullyTerminateDataFlow(FeedConnectionId connectionId, int computePartitionRetainLimit)
-            throws Exception {
-        // Step 1) send prepare to  stall message
-        PrepareStallMessage stallMessage = new PrepareStallMessage(connectionId, computePartitionRetainLimit);
-        List<String> intakeLocations = FeedLifecycleListener.INSTANCE.getCollectLocations(connectionId);
-        List<String> computeLocations = FeedLifecycleListener.INSTANCE.getComputeLocations(connectionId.getFeedId());
-        List<String> storageLocations = FeedLifecycleListener.INSTANCE.getStoreLocations(connectionId);
-
-        Set<String> operatorLocations = new HashSet<String>();
-
-        operatorLocations.addAll(intakeLocations);
-        operatorLocations.addAll(computeLocations);
-        operatorLocations.addAll(storageLocations);
-
-        JobSpecification messageJobSpec = FeedOperations.buildPrepareStallMessageJob(stallMessage, operatorLocations);
-        runJob(messageJobSpec, true);
-
-        // Step 2)
-        TerminateDataFlowMessage terminateMesg = new TerminateDataFlowMessage(connectionId);
-        messageJobSpec = FeedOperations.buildTerminateFlowMessageJob(terminateMesg, intakeLocations);
-        runJob(messageJobSpec, true);
-    }
-
-    public static JobId runJob(JobSpecification spec, boolean waitForCompletion) throws Exception {
-        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
-        JobId jobId = hcc.startJob(spec);
-        if (waitForCompletion) {
-            hcc.waitForCompletion(jobId);
-        }
-        return jobId;
-    }
-
-    @Override
-    public void submitFeedRuntimeReport(FeedReportMessage report) {
-        String key = "" + report.getConnectionId() + ":" + report.getRuntimeId().getFeedRuntimeType();
-        Pair<Integer, Integer> value = feedMetrics.get(key);
-        if (value == null) {
-            value = new Pair<Integer, Integer>(report.getValue(), 1);
-            feedMetrics.put(key, value);
-        } else {
-            value.first = value.first + report.getValue();
-            value.second = value.second + 1;
-        }
-    }
-
-    @Override
-    public int getOutflowRate(FeedConnectionId connectionId, FeedRuntimeType runtimeType) {
-        int rVal;
-        String key = "" + connectionId + ":" + runtimeType;
-        feedMetrics.get(key);
-        Pair<Integer, Integer> value = feedMetrics.get(key);
-        if (value == null) {
-            rVal = UNKNOWN;
-        } else {
-            rVal = value.first / value.second;
-        }
-        return rVal;
-    }
-
-    private List<String> getNodeForSubstitution(int nRequired) {
-        List<String> nodeIds = new ArrayList<String>();
-        Iterator<NodeLoadReport> it = null;
-        int nAdded = 0;
-        while (nAdded < nRequired) {
-            it = nodeReports.iterator();
-            while (it.hasNext()) {
-                nodeIds.add(it.next().getNodeId());
-                nAdded++;
-            }
-        }
-        return nodeIds;
-    }
-
-    @Override
-    public synchronized List<String> getNodes(int required) {
-        Iterator<NodeLoadReport> it;
-        List<String> allocated = new ArrayList<String>();
-        while (allocated.size() < required) {
-            it = nodeReports.iterator();
-            while (it.hasNext() && allocated.size() < required) {
-                allocated.add(it.next().getNodeId());
-            }
-        }
-        return allocated;
-    }
-
-    @Override
-    public void reportThrottlingEnabled(ThrottlingEnabledFeedMessage mesg) throws AsterixException, Exception {
-        System.out.println("Throttling Enabled for " + mesg.getConnectionId() + " " + mesg.getFeedRuntimeId());
-        FeedConnectionId connectionId = mesg.getConnectionId();
-        List<String> destinationLocations = new ArrayList<String>();
-        List<String> storageLocations = FeedLifecycleListener.INSTANCE.getStoreLocations(connectionId);
-        List<String> collectLocations = FeedLifecycleListener.INSTANCE.getCollectLocations(connectionId);
-
-        destinationLocations.addAll(storageLocations);
-        destinationLocations.addAll(collectLocations);
-        JobSpecification messageJobSpec = FeedOperations.buildNotifyThrottlingEnabledMessageJob(mesg,
-                destinationLocations);
-        runJob(messageJobSpec, true);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.warning("Acking disabled for " + mesg.getConnectionId() + " in view of activated throttling");
-        }
-        IFeedTrackingManager trackingManager = CentralFeedManager.getInstance().getFeedTrackingManager();
-        trackingManager.disableAcking(connectionId);
-    }
-
-    @Override
-    public void reportFeedActivity(FeedConnectionId connectionId, FeedActivity activity) {
-        feedActivities.put(connectionId, activity);
-    }
-
-    @Override
-    public FeedActivity getFeedActivity(FeedConnectionId connectionId) {
-        return feedActivities.get(connectionId);
-    }
-
-    @Override
-    public Collection<FeedActivity> getFeedActivities() {
-        return feedActivities.values();
-    }
-
-    @Override
-    public void removeFeedActivity(FeedConnectionId connectionId) {
-        feedActivities.remove(connectionId);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feeds/FeedManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedManager.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedManager.java
deleted file mode 100644
index 7f9afb8..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedManager.java
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feeds;
-
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.config.AsterixFeedProperties;
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.feeds.FeedMemoryManager;
-import org.apache.asterix.common.feeds.FeedMessageService;
-import org.apache.asterix.common.feeds.FeedMetricCollector;
-import org.apache.asterix.common.feeds.NodeLoadReportService;
-import org.apache.asterix.common.feeds.api.IFeedConnectionManager;
-import org.apache.asterix.common.feeds.api.IFeedManager;
-import org.apache.asterix.common.feeds.api.IFeedMemoryManager;
-import org.apache.asterix.common.feeds.api.IFeedMessageService;
-import org.apache.asterix.common.feeds.api.IFeedMetadataManager;
-import org.apache.asterix.common.feeds.api.IFeedMetricCollector;
-import org.apache.asterix.common.feeds.api.IFeedSubscriptionManager;
-import org.apache.asterix.metadata.feeds.FeedConnectionManager;
-import org.apache.asterix.metadata.feeds.FeedSubscriptionManager;
-import org.apache.asterix.om.util.AsterixClusterProperties;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-/**
- * An implementation of the IFeedManager interface.
- * Provider necessary central repository for registering/retrieving
- * artifacts/services associated with a feed.
- */
-public class FeedManager implements IFeedManager {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedManager.class.getName());
-
-    private final IFeedSubscriptionManager feedSubscriptionManager;
-
-    private final IFeedConnectionManager feedConnectionManager;
-
-    private final IFeedMemoryManager feedMemoryManager;
-
-    private final IFeedMetricCollector feedMetricCollector;
-
-    private final IFeedMetadataManager feedMetadataManager;
-
-    private final IFeedMessageService feedMessageService;
-
-    private final NodeLoadReportService nodeLoadReportService;
-
-    private final AsterixFeedProperties asterixFeedProperties;
-
-    private final String nodeId;
-
-    private final int frameSize;
-
-    public FeedManager(String nodeId, AsterixFeedProperties feedProperties, int frameSize) throws AsterixException, HyracksDataException {
-        this.nodeId = nodeId;
-        this.feedSubscriptionManager = new FeedSubscriptionManager(nodeId);
-        this.feedConnectionManager = new FeedConnectionManager(nodeId);
-        this.feedMetadataManager = new FeedMetadataManager(nodeId);
-        this.feedMemoryManager = new FeedMemoryManager(nodeId, feedProperties, frameSize);
-        String ccClusterIp = AsterixClusterProperties.INSTANCE.getCluster() != null ? AsterixClusterProperties.INSTANCE
-                .getCluster().getMasterNode().getClusterIp() : "localhost";
-        this.feedMessageService = new FeedMessageService(feedProperties, nodeId, ccClusterIp);
-        this.nodeLoadReportService = new NodeLoadReportService(nodeId, this);
-        try {
-            this.feedMessageService.start();
-            this.nodeLoadReportService.start();
-        } catch (Exception e) {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Unable to start feed services " + e.getMessage());
-            }
-            e.printStackTrace();
-        }
-        this.feedMetricCollector = new FeedMetricCollector(nodeId);
-        this.frameSize = frameSize;
-        this.asterixFeedProperties = feedProperties;
-    }
-
-    @Override
-    public IFeedSubscriptionManager getFeedSubscriptionManager() {
-        return feedSubscriptionManager;
-    }
-
-    @Override
-    public IFeedConnectionManager getFeedConnectionManager() {
-        return feedConnectionManager;
-    }
-
-    @Override
-    public IFeedMemoryManager getFeedMemoryManager() {
-        return feedMemoryManager;
-    }
-
-    @Override
-    public IFeedMetricCollector getFeedMetricCollector() {
-        return feedMetricCollector;
-    }
-
-    public int getFrameSize() {
-        return frameSize;
-    }
-
-    @Override
-    public IFeedMetadataManager getFeedMetadataManager() {
-        return feedMetadataManager;
-    }
-
-    @Override
-    public IFeedMessageService getFeedMessageService() {
-        return feedMessageService;
-    }
-
-    @Override
-    public String getNodeId() {
-        return nodeId;
-    }
-
-    @Override
-    public String toString() {
-        return "FeedManager " + "[" + nodeId + "]";
-    }
-
-    @Override
-    public AsterixFeedProperties getAsterixFeedProperties() {
-        return asterixFeedProperties;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feeds/FeedMessageReceiver.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedMessageReceiver.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedMessageReceiver.java
deleted file mode 100644
index a3cd217..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedMessageReceiver.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feeds;
-
-import java.util.logging.Level;
-
-import org.json.JSONObject;
-
-import org.apache.asterix.common.feeds.FeedConstants;
-import org.apache.asterix.common.feeds.FeedTupleCommitAckMessage;
-import org.apache.asterix.common.feeds.MessageReceiver;
-import org.apache.asterix.common.feeds.NodeLoadReport;
-import org.apache.asterix.common.feeds.api.IFeedLoadManager;
-import org.apache.asterix.common.feeds.api.IFeedMessage.MessageType;
-import org.apache.asterix.common.feeds.api.IFeedTrackingManager;
-import org.apache.asterix.common.feeds.message.FeedCongestionMessage;
-import org.apache.asterix.common.feeds.message.FeedReportMessage;
-import org.apache.asterix.common.feeds.message.ScaleInReportMessage;
-import org.apache.asterix.common.feeds.message.StorageReportFeedMessage;
-import org.apache.asterix.common.feeds.message.ThrottlingEnabledFeedMessage;
-import org.apache.asterix.feeds.CentralFeedManager.AQLExecutor;
-import org.apache.asterix.hyracks.bootstrap.FeedBootstrap;
-
-public class FeedMessageReceiver extends MessageReceiver<String> {
-
-    private static boolean initialized;
-
-    private final IFeedLoadManager feedLoadManager;
-    private final IFeedTrackingManager feedTrackingManager;
-
-    public FeedMessageReceiver(CentralFeedManager centralFeedManager) {
-        this.feedLoadManager = centralFeedManager.getFeedLoadManager();
-        this.feedTrackingManager = centralFeedManager.getFeedTrackingManager();
-    }
-
-    @Override
-    public void processMessage(String message) throws Exception {
-        JSONObject obj = new JSONObject(message);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Received message " + obj);
-        }
-        MessageType messageType = MessageType.valueOf(obj.getString(FeedConstants.MessageConstants.MESSAGE_TYPE));
-        switch (messageType) {
-            case XAQL:
-                if (!initialized) {
-                    FeedBootstrap.setUpInitialArtifacts();
-                    initialized = true;
-                }
-                AQLExecutor.executeAQL(obj.getString(FeedConstants.MessageConstants.AQL));
-                break;
-            case CONGESTION:
-                feedLoadManager.reportCongestion(FeedCongestionMessage.read(obj));
-                break;
-            case FEED_REPORT:
-                feedLoadManager.submitFeedRuntimeReport(FeedReportMessage.read(obj));
-                break;
-            case NODE_REPORT:
-                feedLoadManager.submitNodeLoadReport(NodeLoadReport.read(obj));
-                break;
-            case SCALE_IN_REQUEST:
-                feedLoadManager.submitScaleInPossibleReport(ScaleInReportMessage.read(obj));
-                break;
-            case STORAGE_REPORT:
-                FeedLifecycleListener.INSTANCE.updateTrackingInformation(StorageReportFeedMessage.read(obj));
-                break;
-            case COMMIT_ACK:
-                feedTrackingManager.submitAckReport(FeedTupleCommitAckMessage.read(obj));
-                break;
-            case THROTTLING_ENABLED:
-                feedLoadManager.reportThrottlingEnabled(ThrottlingEnabledFeedMessage.read(obj));
-            default:
-                break;
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feeds/FeedMetadataManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedMetadataManager.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedMetadataManager.java
deleted file mode 100644
index 81cabeb..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedMetadataManager.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feeds;
-
-import java.util.Date;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.api.IFeedManager;
-import org.apache.asterix.common.feeds.api.IFeedMetadataManager;
-import org.apache.asterix.hyracks.bootstrap.FeedBootstrap;
-import org.apache.asterix.metadata.feeds.XAQLFeedMessage;
-import org.apache.asterix.om.base.ARecord;
-import org.apache.asterix.om.base.AString;
-import org.apache.asterix.om.base.IAObject;
-import org.apache.asterix.om.types.ARecordType;
-import org.apache.asterix.om.types.BuiltinType;
-import org.apache.asterix.om.types.IAType;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-public class FeedMetadataManager implements IFeedMetadataManager {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedMetadataManager.class.getName());
-
-    private final String nodeId;
-    private ARecordType recordType;
-
-    public FeedMetadataManager(String nodeId) throws AsterixException, HyracksDataException {
-        this.nodeId = nodeId;
-        String[] fieldNames = new String[] { "id", "dataverseName", "feedName", "targetDataset", "tuple", "message",
-                "timestamp" };
-        IAType[] fieldTypes = new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
-                BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING };
-
-        recordType = new ARecordType(FeedBootstrap.FAILED_TUPLE_DATASET_TYPE, fieldNames, fieldTypes, true);
-    }
-
-    @Override
-    public void logTuple(FeedConnectionId connectionId, String tuple, String message, IFeedManager feedManager)
-            throws AsterixException {
-        try {
-            AString id = new AString("1");
-            AString dataverseValue = new AString(connectionId.getFeedId().getDataverse());
-            AString feedValue = new AString(connectionId.getFeedId().getFeedName());
-            AString targetDatasetValue = new AString(connectionId.getDatasetName());
-            AString tupleValue = new AString(tuple);
-            AString messageValue = new AString(message);
-            AString dateTime = new AString(new Date().toString());
-
-            IAObject[] fields = new IAObject[] { id, dataverseValue, feedValue, targetDatasetValue, tupleValue,
-                    messageValue, dateTime };
-            ARecord record = new ARecord(recordType, fields);
-            StringBuilder builder = new StringBuilder();
-            builder.append("use dataverse " + FeedBootstrap.FEEDS_METADATA_DV + ";" + "\n");
-            builder.append("insert into dataset " + FeedBootstrap.FAILED_TUPLE_DATASET + " ");
-            builder.append(" (" + recordToString(record) + ")");
-            builder.append(";");
-
-            XAQLFeedMessage xAqlMessage = new XAQLFeedMessage(connectionId, builder.toString());
-            feedManager.getFeedMessageService().sendMessage(xAqlMessage);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info(" Sent " + xAqlMessage.toJSON());
-            }
-        } catch (Exception pe) {
-            throw new AsterixException(pe);
-        }
-    }
-
-    @Override
-    public String toString() {
-        return "FeedMetadataManager [" + nodeId + "]";
-    }
-
-    private String recordToString(ARecord record) {
-        String[] fieldNames = record.getType().getFieldNames();
-        StringBuilder sb = new StringBuilder();
-        sb.append("{ ");
-        for (int i = 0; i < fieldNames.length; i++) {
-            if (i > 0) {
-                sb.append(", ");
-            }
-            sb.append("\"" + fieldNames[i] + "\"");
-            sb.append(": ");
-            switch (record.getType().getFieldTypes()[i].getTypeTag()) {
-                case STRING:
-                    sb.append("\"" + ((AString) record.getValueByPos(i)).getStringValue() + "\"");
-                    break;
-                default:
-                    break;
-            }
-        }
-        sb.append(" }");
-        return sb.toString();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feeds/FeedTrackingManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedTrackingManager.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedTrackingManager.java
deleted file mode 100644
index d57a971..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedTrackingManager.java
+++ /dev/null
@@ -1,188 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feeds;
-
-import java.util.Arrays;
-import java.util.BitSet;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedTupleCommitAckMessage;
-import org.apache.asterix.common.feeds.FeedTupleCommitResponseMessage;
-import org.apache.asterix.common.feeds.api.IFeedTrackingManager;
-import org.apache.asterix.file.FeedOperations;
-import org.apache.hyracks.api.job.JobSpecification;
-
-public class FeedTrackingManager implements IFeedTrackingManager {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedTrackingManager.class.getName());
-
-    private final BitSet allOnes;
-
-    private Map<FeedConnectionId, Map<AckId, BitSet>> ackHistory;
-    private Map<FeedConnectionId, Map<AckId, Integer>> maxBaseAcked;
-
-    public FeedTrackingManager() {
-        byte[] allOneBytes = new byte[128];
-        Arrays.fill(allOneBytes, (byte) 0xff);
-        allOnes = BitSet.valueOf(allOneBytes);
-        ackHistory = new HashMap<FeedConnectionId, Map<AckId, BitSet>>();
-        maxBaseAcked = new HashMap<FeedConnectionId, Map<AckId, Integer>>();
-    }
-
-    @Override
-    public synchronized void submitAckReport(FeedTupleCommitAckMessage ackMessage) {
-        AckId ackId = getAckId(ackMessage);
-        Map<AckId, BitSet> acksForConnection = ackHistory.get(ackMessage.getConnectionId());
-        if (acksForConnection == null) {
-            acksForConnection = new HashMap<AckId, BitSet>();
-            acksForConnection.put(ackId, BitSet.valueOf(ackMessage.getCommitAcks()));
-            ackHistory.put(ackMessage.getConnectionId(), acksForConnection);
-        }
-        BitSet currentAcks = acksForConnection.get(ackId);
-        if (currentAcks == null) {
-            currentAcks = BitSet.valueOf(ackMessage.getCommitAcks());
-            acksForConnection.put(ackId, currentAcks);
-        } else {
-            currentAcks.or(BitSet.valueOf(ackMessage.getCommitAcks()));
-        }
-        if (Arrays.equals(currentAcks.toByteArray(), allOnes.toByteArray())) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info(ackMessage.getIntakePartition() + " (" + ackMessage.getBase() + ")" + " is convered");
-            }
-            Map<AckId, Integer> maxBaseAckedForConnection = maxBaseAcked.get(ackMessage.getConnectionId());
-            if (maxBaseAckedForConnection == null) {
-                maxBaseAckedForConnection = new HashMap<AckId, Integer>();
-                maxBaseAcked.put(ackMessage.getConnectionId(), maxBaseAckedForConnection);
-            }
-            Integer maxBaseAckedValue = maxBaseAckedForConnection.get(ackId);
-            if (maxBaseAckedValue == null) {
-                maxBaseAckedValue = ackMessage.getBase();
-                maxBaseAckedForConnection.put(ackId, ackMessage.getBase());
-                sendCommitResponseMessage(ackMessage.getConnectionId(), ackMessage.getIntakePartition(),
-                        ackMessage.getBase());
-            } else if (ackMessage.getBase() == maxBaseAckedValue + 1) {
-                maxBaseAckedForConnection.put(ackId, ackMessage.getBase());
-                sendCommitResponseMessage(ackMessage.getConnectionId(), ackMessage.getIntakePartition(),
-                        ackMessage.getBase());
-            } else {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Ignoring discountiuous acked base " + ackMessage.getBase() + " for " + ackId);
-                }
-            }
-
-        } else {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("AckId " + ackId + " pending number of acks " + (128 * 8 - currentAcks.cardinality()));
-            }
-        }
-    }
-
-    public synchronized void disableTracking(FeedConnectionId connectionId) {
-        ackHistory.remove(connectionId);
-        maxBaseAcked.remove(connectionId);
-    }
-
-    private void sendCommitResponseMessage(FeedConnectionId connectionId, int partition, int base) {
-        FeedTupleCommitResponseMessage response = new FeedTupleCommitResponseMessage(connectionId, partition, base);
-        List<String> storageLocations = FeedLifecycleListener.INSTANCE.getStoreLocations(connectionId);
-        List<String> collectLocations = FeedLifecycleListener.INSTANCE.getCollectLocations(connectionId);
-        String collectLocation = collectLocations.get(partition);
-        Set<String> messageDestinations = new HashSet<String>();
-        messageDestinations.add(collectLocation);
-        messageDestinations.addAll(storageLocations);
-        try {
-            JobSpecification spec = FeedOperations.buildCommitAckResponseJob(response, messageDestinations);
-            CentralFeedManager.runJob(spec, false);
-        } catch (Exception e) {
-            e.printStackTrace();
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Unable to send commit response message " + response + " exception " + e.getMessage());
-            }
-        }
-    }
-
-    private static AckId getAckId(FeedTupleCommitAckMessage ackMessage) {
-        return new AckId(ackMessage.getConnectionId(), ackMessage.getIntakePartition(), ackMessage.getBase());
-    }
-
-    private static class AckId {
-        private FeedConnectionId connectionId;
-        private int intakePartition;
-        private int base;
-
-        public AckId(FeedConnectionId connectionId, int intakePartition, int base) {
-            this.connectionId = connectionId;
-            this.intakePartition = intakePartition;
-            this.base = base;
-        }
-
-        @Override
-        public boolean equals(Object o) {
-            if (this == o) {
-                return true;
-            }
-            if (!(o instanceof AckId)) {
-                return false;
-            }
-            AckId other = (AckId) o;
-            return other.getConnectionId().equals(connectionId) && other.getIntakePartition() == intakePartition
-                    && other.getBase() == base;
-        }
-
-        @Override
-        public String toString() {
-            return connectionId + "[" + intakePartition + "]" + "(" + base + ")";
-        }
-
-        @Override
-        public int hashCode() {
-            return toString().hashCode();
-        }
-
-        public FeedConnectionId getConnectionId() {
-            return connectionId;
-        }
-
-        public int getIntakePartition() {
-            return intakePartition;
-        }
-
-        public int getBase() {
-            return base;
-        }
-
-    }
-
-    @Override
-    public void disableAcking(FeedConnectionId connectionId) {
-        ackHistory.remove(connectionId);
-        maxBaseAcked.remove(connectionId);
-        if (LOGGER.isLoggable(Level.WARNING)) {
-            LOGGER.warning("Acking disabled for " + connectionId);
-        }
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feeds/FeedWorkRequestResponseHandler.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedWorkRequestResponseHandler.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedWorkRequestResponseHandler.java
deleted file mode 100644
index 3686a03..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedWorkRequestResponseHandler.java
+++ /dev/null
@@ -1,269 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feeds;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.api.IClusterManagementWork;
-import org.apache.asterix.common.api.IClusterManagementWorkResponse;
-import org.apache.asterix.common.feeds.FeedConnectJobInfo;
-import org.apache.asterix.common.feeds.FeedIntakeInfo;
-import org.apache.asterix.common.feeds.FeedJobInfo;
-import org.apache.asterix.metadata.cluster.AddNodeWork;
-import org.apache.asterix.metadata.cluster.AddNodeWorkResponse;
-import org.apache.asterix.om.util.AsterixAppContextInfo;
-import org.apache.asterix.om.util.AsterixClusterProperties;
-import org.apache.hyracks.api.client.IHyracksClientConnection;
-import org.apache.hyracks.api.constraints.Constraint;
-import org.apache.hyracks.api.constraints.PartitionConstraintHelper;
-import org.apache.hyracks.api.constraints.expressions.ConstantExpression;
-import org.apache.hyracks.api.constraints.expressions.ConstraintExpression;
-import org.apache.hyracks.api.constraints.expressions.ConstraintExpression.ExpressionTag;
-import org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression;
-import org.apache.hyracks.api.constraints.expressions.PartitionCountExpression;
-import org.apache.hyracks.api.constraints.expressions.PartitionLocationExpression;
-import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
-import org.apache.hyracks.api.dataflow.OperatorDescriptorId;
-import org.apache.hyracks.api.job.JobSpecification;
-
-public class FeedWorkRequestResponseHandler implements Runnable {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedWorkRequestResponseHandler.class.getName());
-
-    private final LinkedBlockingQueue<IClusterManagementWorkResponse> inbox;
-
-    private Map<Integer, Map<String, List<FeedJobInfo>>> feedsWaitingForResponse = new HashMap<Integer, Map<String, List<FeedJobInfo>>>();
-
-    public FeedWorkRequestResponseHandler(LinkedBlockingQueue<IClusterManagementWorkResponse> inbox) {
-        this.inbox = inbox;
-    }
-
-    @Override
-    public void run() {
-        while (true) {
-            IClusterManagementWorkResponse response = null;
-            try {
-                response = inbox.take();
-            } catch (InterruptedException e) {
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("Interrupted exception " + e.getMessage());
-                }
-            }
-            IClusterManagementWork submittedWork = response.getWork();
-            Map<String, String> nodeSubstitution = new HashMap<String, String>();
-            switch (submittedWork.getClusterManagementWorkType()) {
-                case ADD_NODE:
-                    AddNodeWork addNodeWork = (AddNodeWork) submittedWork;
-                    int workId = addNodeWork.getWorkId();
-                    Map<String, List<FeedJobInfo>> failureAnalysis = feedsWaitingForResponse.get(workId);
-                    AddNodeWorkResponse resp = (AddNodeWorkResponse) response;
-                    List<String> nodesAdded = resp.getNodesAdded();
-                    List<String> unsubstitutedNodes = new ArrayList<String>();
-                    unsubstitutedNodes.addAll(addNodeWork.getDeadNodes());
-                    int nodeIndex = 0;
-
-                    /** form a mapping between the failed node and its substitute **/
-                    if (nodesAdded != null && nodesAdded.size() > 0) {
-                        for (String failedNodeId : addNodeWork.getDeadNodes()) {
-                            String substitute = nodesAdded.get(nodeIndex);
-                            nodeSubstitution.put(failedNodeId, substitute);
-                            nodeIndex = (nodeIndex + 1) % nodesAdded.size();
-                            unsubstitutedNodes.remove(failedNodeId);
-                            if (LOGGER.isLoggable(Level.INFO)) {
-                                LOGGER.info("Node " + substitute + " chosen to substiute lost node " + failedNodeId);
-                            }
-                        }
-                    }
-                    if (unsubstitutedNodes.size() > 0) {
-                        String[] participantNodes = AsterixClusterProperties.INSTANCE.getParticipantNodes()
-                                .toArray(new String[] {});
-                        nodeIndex = 0;
-                        for (String unsubstitutedNode : unsubstitutedNodes) {
-                            nodeSubstitution.put(unsubstitutedNode, participantNodes[nodeIndex]);
-                            if (LOGGER.isLoggable(Level.INFO)) {
-                                LOGGER.info("Node " + participantNodes[nodeIndex] + " chosen to substiute lost node "
-                                        + unsubstitutedNode);
-                            }
-                            nodeIndex = (nodeIndex + 1) % participantNodes.length;
-                        }
-
-                        if (LOGGER.isLoggable(Level.WARNING)) {
-                            LOGGER.warning("Request " + resp.getWork() + " completed using internal nodes");
-                        }
-                    }
-
-                    // alter failed feed intake jobs
-
-                    for (Entry<String, List<FeedJobInfo>> entry : failureAnalysis.entrySet()) {
-                        String failedNode = entry.getKey();
-                        List<FeedJobInfo> impactedJobInfos = entry.getValue();
-                        for (FeedJobInfo info : impactedJobInfos) {
-                            JobSpecification spec = info.getSpec();
-                            replaceNode(spec, failedNode, nodeSubstitution.get(failedNode));
-                            info.setSpec(spec);
-                        }
-                    }
-
-                    Set<FeedIntakeInfo> revisedIntakeJobs = new HashSet<FeedIntakeInfo>();
-                    Set<FeedConnectJobInfo> revisedConnectJobInfos = new HashSet<FeedConnectJobInfo>();
-
-                    for (List<FeedJobInfo> infos : failureAnalysis.values()) {
-                        for (FeedJobInfo info : infos) {
-                            switch (info.getJobType()) {
-                                case INTAKE:
-                                    revisedIntakeJobs.add((FeedIntakeInfo) info);
-                                    break;
-                                case FEED_CONNECT:
-                                    revisedConnectJobInfos.add((FeedConnectJobInfo) info);
-                                    break;
-                            }
-                        }
-                    }
-
-                    IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
-                    try {
-                        for (FeedIntakeInfo info : revisedIntakeJobs) {
-                            hcc.startJob(info.getSpec());
-                        }
-                        Thread.sleep(2000);
-                        for (FeedConnectJobInfo info : revisedConnectJobInfos) {
-                            hcc.startJob(info.getSpec());
-                            Thread.sleep(2000);
-                        }
-                    } catch (Exception e) {
-                        if (LOGGER.isLoggable(Level.WARNING)) {
-                            LOGGER.warning("Unable to start revised job post failure");
-                        }
-                    }
-
-                    break;
-                case REMOVE_NODE:
-                    throw new IllegalStateException("Invalid work submitted");
-            }
-        }
-    }
-
-    private void replaceNode(JobSpecification jobSpec, String failedNodeId, String replacementNode) {
-        Set<Constraint> userConstraints = jobSpec.getUserConstraints();
-        List<Constraint> locationConstraintsToReplace = new ArrayList<Constraint>();
-        List<Constraint> countConstraintsToReplace = new ArrayList<Constraint>();
-        List<OperatorDescriptorId> modifiedOperators = new ArrayList<OperatorDescriptorId>();
-        Map<OperatorDescriptorId, List<Constraint>> candidateConstraints = new HashMap<OperatorDescriptorId, List<Constraint>>();
-        Map<OperatorDescriptorId, Map<Integer, String>> newConstraints = new HashMap<OperatorDescriptorId, Map<Integer, String>>();
-        OperatorDescriptorId opId = null;
-        for (Constraint constraint : userConstraints) {
-            LValueConstraintExpression lexpr = constraint.getLValue();
-            ConstraintExpression cexpr = constraint.getRValue();
-            switch (lexpr.getTag()) {
-                case PARTITION_COUNT:
-                    opId = ((PartitionCountExpression) lexpr).getOperatorDescriptorId();
-                    if (modifiedOperators.contains(opId)) {
-                        countConstraintsToReplace.add(constraint);
-                    } else {
-                        List<Constraint> clist = candidateConstraints.get(opId);
-                        if (clist == null) {
-                            clist = new ArrayList<Constraint>();
-                            candidateConstraints.put(opId, clist);
-                        }
-                        clist.add(constraint);
-                    }
-                    break;
-                case PARTITION_LOCATION:
-                    opId = ((PartitionLocationExpression) lexpr).getOperatorDescriptorId();
-                    String oldLocation = (String) ((ConstantExpression) cexpr).getValue();
-                    if (oldLocation.equals(failedNodeId)) {
-                        locationConstraintsToReplace.add(constraint);
-                        modifiedOperators.add(((PartitionLocationExpression) lexpr).getOperatorDescriptorId());
-                        Map<Integer, String> newLocs = newConstraints.get(opId);
-                        if (newLocs == null) {
-                            newLocs = new HashMap<Integer, String>();
-                            newConstraints.put(opId, newLocs);
-                        }
-                        int partition = ((PartitionLocationExpression) lexpr).getPartition();
-                        newLocs.put(partition, replacementNode);
-                    } else {
-                        if (modifiedOperators.contains(opId)) {
-                            locationConstraintsToReplace.add(constraint);
-                            Map<Integer, String> newLocs = newConstraints.get(opId);
-                            if (newLocs == null) {
-                                newLocs = new HashMap<Integer, String>();
-                                newConstraints.put(opId, newLocs);
-                            }
-                            int partition = ((PartitionLocationExpression) lexpr).getPartition();
-                            newLocs.put(partition, oldLocation);
-                        } else {
-                            List<Constraint> clist = candidateConstraints.get(opId);
-                            if (clist == null) {
-                                clist = new ArrayList<Constraint>();
-                                candidateConstraints.put(opId, clist);
-                            }
-                            clist.add(constraint);
-                        }
-                    }
-                    break;
-                default:
-                    break;
-            }
-        }
-
-        jobSpec.getUserConstraints().removeAll(locationConstraintsToReplace);
-        jobSpec.getUserConstraints().removeAll(countConstraintsToReplace);
-
-        for (OperatorDescriptorId mopId : modifiedOperators) {
-            List<Constraint> clist = candidateConstraints.get(mopId);
-            if (clist != null && !clist.isEmpty()) {
-                jobSpec.getUserConstraints().removeAll(clist);
-
-                for (Constraint c : clist) {
-                    if (c.getLValue().getTag().equals(ExpressionTag.PARTITION_LOCATION)) {
-                        ConstraintExpression cexpr = c.getRValue();
-                        int partition = ((PartitionLocationExpression) c.getLValue()).getPartition();
-                        String oldLocation = (String) ((ConstantExpression) cexpr).getValue();
-                        newConstraints.get(mopId).put(partition, oldLocation);
-                    }
-                }
-            }
-        }
-
-        for (Entry<OperatorDescriptorId, Map<Integer, String>> entry : newConstraints.entrySet()) {
-            OperatorDescriptorId nopId = entry.getKey();
-            Map<Integer, String> clist = entry.getValue();
-            IOperatorDescriptor op = jobSpec.getOperatorMap().get(nopId);
-            String[] locations = new String[clist.size()];
-            for (int i = 0; i < locations.length; i++) {
-                locations[i] = clist.get(i);
-            }
-            PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, op, locations);
-        }
-
-    }
-
-    public void registerFeedWork(int workId, Map<String, List<FeedJobInfo>> impactedJobs) {
-        feedsWaitingForResponse.put(workId, impactedJobs);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/feeds/FeedsActivator.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedsActivator.java b/asterix-app/src/main/java/org/apache/asterix/feeds/FeedsActivator.java
deleted file mode 100644
index 7660007..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feeds/FeedsActivator.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feeds;
-
-import java.io.PrintWriter;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.api.common.SessionConfig;
-import org.apache.asterix.api.common.SessionConfig.OutputFormat;
-import org.apache.asterix.aql.translator.QueryTranslator;
-import org.apache.asterix.compiler.provider.AqlCompilationProvider;
-import org.apache.asterix.compiler.provider.ILangCompilationProvider;
-import org.apache.asterix.lang.common.base.Statement;
-import org.apache.asterix.lang.common.statement.ConnectFeedStatement;
-import org.apache.asterix.lang.common.statement.DataverseDecl;
-import org.apache.asterix.lang.common.struct.Identifier;
-import org.apache.asterix.om.util.AsterixAppContextInfo;
-import org.apache.hyracks.api.job.JobId;
-
-public class FeedsActivator implements Runnable {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedJobNotificationHandler.class.getName());
-    private static final ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
-
-    private List<FeedCollectInfo> feedsToRevive;
-    private Mode mode;
-
-    public enum Mode {
-        REVIVAL_POST_CLUSTER_REBOOT,
-        REVIVAL_POST_NODE_REJOIN
-    }
-
-    public FeedsActivator() {
-        this.mode = Mode.REVIVAL_POST_CLUSTER_REBOOT;
-    }
-
-    public FeedsActivator(List<FeedCollectInfo> feedsToRevive) {
-        this.feedsToRevive = feedsToRevive;
-        this.mode = Mode.REVIVAL_POST_NODE_REJOIN;
-    }
-
-    @Override
-    public void run() {
-        switch (mode) {
-            case REVIVAL_POST_CLUSTER_REBOOT:
-                //revivePostClusterReboot();
-                break;
-            case REVIVAL_POST_NODE_REJOIN:
-                try {
-                    Thread.sleep(10000);
-                } catch (InterruptedException e1) {
-                    if (LOGGER.isLoggable(Level.INFO)) {
-                        LOGGER.info("Attempt to resume feed interrupted");
-                    }
-                    throw new IllegalStateException(e1.getMessage());
-                }
-                for (FeedCollectInfo finfo : feedsToRevive) {
-                    try {
-                        JobId jobId = AsterixAppContextInfo.getInstance().getHcc().startJob(finfo.jobSpec);
-                        if (LOGGER.isLoggable(Level.INFO)) {
-                            LOGGER.info("Resumed feed :" + finfo.feedConnectionId + " job id " + jobId);
-                            LOGGER.info("Job:" + finfo.jobSpec);
-                        }
-                    } catch (Exception e) {
-                        if (LOGGER.isLoggable(Level.WARNING)) {
-                            LOGGER.warning("Unable to resume feed " + finfo.feedConnectionId + " " + e.getMessage());
-                        }
-                    }
-                }
-        }
-    }
-
-    public void reviveFeed(String dataverse, String feedName, String dataset, String feedPolicy) {
-        PrintWriter writer = new PrintWriter(System.out, true);
-        SessionConfig pc = new SessionConfig(writer, OutputFormat.ADM);
-        try {
-            DataverseDecl dataverseDecl = new DataverseDecl(new Identifier(dataverse));
-            ConnectFeedStatement stmt = new ConnectFeedStatement(new Identifier(dataverse), new Identifier(feedName),
-                    new Identifier(dataset), feedPolicy, 0);
-            stmt.setForceConnect(true);
-            List<Statement> statements = new ArrayList<Statement>();
-            statements.add(dataverseDecl);
-            statements.add(stmt);
-            QueryTranslator translator = new QueryTranslator(statements, pc, compilationProvider);
-            translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null,
-                    QueryTranslator.ResultDelivery.SYNC);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Resumed feed: " + dataverse + ":" + dataset + " using policy " + feedPolicy);
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Exception in resuming loser feed: " + dataverse + ":" + dataset + " using policy "
-                        + feedPolicy + " Exception " + e.getMessage());
-            }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java b/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java
index 2ee9dd4..77c6a54 100644
--- a/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java
+++ b/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java
@@ -44,6 +44,7 @@ import org.apache.asterix.external.api.IAdapterFactory;
 import org.apache.asterix.external.indexing.ExternalFile;
 import org.apache.asterix.external.indexing.FilesIndexDescription;
 import org.apache.asterix.external.indexing.IndexingConstants;
+import org.apache.asterix.external.operators.ExternalDataScanOperatorDescriptor;
 import org.apache.asterix.external.operators.ExternalDatasetIndexesAbortOperatorDescriptor;
 import org.apache.asterix.external.operators.ExternalDatasetIndexesCommitOperatorDescriptor;
 import org.apache.asterix.external.operators.ExternalDatasetIndexesRecoverOperatorDescriptor;
@@ -60,7 +61,6 @@ import org.apache.asterix.metadata.declared.AqlMetadataProvider;
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.entities.ExternalDatasetDetails;
 import org.apache.asterix.metadata.entities.Index;
-import org.apache.asterix.metadata.feeds.ExternalDataScanOperatorDescriptor;
 import org.apache.asterix.metadata.utils.DatasetUtils;
 import org.apache.asterix.metadata.utils.ExternalDatasetsRegistry;
 import org.apache.asterix.om.types.ARecordType;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/file/FeedOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/FeedOperations.java b/asterix-app/src/main/java/org/apache/asterix/file/FeedOperations.java
index cb55c5f..6a036c0 100644
--- a/asterix-app/src/main/java/org/apache/asterix/file/FeedOperations.java
+++ b/asterix-app/src/main/java/org/apache/asterix/file/FeedOperations.java
@@ -22,24 +22,24 @@ import java.util.Collection;
 import java.util.List;
 
 import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.feeds.FeedConnectJobInfo;
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedConstants;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.FeedPolicyAccessor;
-import org.apache.asterix.common.feeds.FeedTupleCommitResponseMessage;
-import org.apache.asterix.common.feeds.api.IFeedJoint;
-import org.apache.asterix.common.feeds.api.IFeedMessage;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.asterix.common.feeds.message.EndFeedMessage;
-import org.apache.asterix.common.feeds.message.ThrottlingEnabledFeedMessage;
 import org.apache.asterix.external.api.IAdapterFactory;
-import org.apache.asterix.feeds.FeedLifecycleListener;
+import org.apache.asterix.external.feed.api.IFeedJoint;
+import org.apache.asterix.external.feed.api.IFeedMessage;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.message.EndFeedMessage;
+import org.apache.asterix.external.feed.message.FeedTupleCommitResponseMessage;
+import org.apache.asterix.external.feed.message.PrepareStallMessage;
+import org.apache.asterix.external.feed.message.TerminateDataFlowMessage;
+import org.apache.asterix.external.feed.message.ThrottlingEnabledFeedMessage;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.external.feed.watch.FeedConnectJobInfo;
+import org.apache.asterix.external.operators.FeedMessageOperatorDescriptor;
+import org.apache.asterix.external.util.FeedConstants;
+import org.apache.asterix.feed.FeedLifecycleListener;
 import org.apache.asterix.metadata.declared.AqlMetadataProvider;
-import org.apache.asterix.metadata.entities.PrimaryFeed;
-import org.apache.asterix.metadata.feeds.FeedMessageOperatorDescriptor;
-import org.apache.asterix.metadata.feeds.PrepareStallMessage;
-import org.apache.asterix.metadata.feeds.TerminateDataFlowMessage;
+import org.apache.asterix.metadata.entities.Feed;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
@@ -63,7 +63,7 @@ public class FeedOperations {
      * @return JobSpecification the Hyracks job specification for receiving data from external source
      * @throws Exception
      */
-    public static Pair<JobSpecification, IAdapterFactory> buildFeedIntakeJobSpec(PrimaryFeed primaryFeed,
+    public static Pair<JobSpecification, IAdapterFactory> buildFeedIntakeJobSpec(Feed primaryFeed,
             AqlMetadataProvider metadataProvider, FeedPolicyAccessor policyAccessor) throws Exception {
 
         JobSpecification spec = JobSpecificationUtils.createJobSpecification();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/file/SecondaryBTreeOperationsHelper.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/SecondaryBTreeOperationsHelper.java b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryBTreeOperationsHelper.java
index a579d2c..6d23f3c 100644
--- a/asterix-app/src/main/java/org/apache/asterix/file/SecondaryBTreeOperationsHelper.java
+++ b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryBTreeOperationsHelper.java
@@ -31,9 +31,9 @@ import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
 import org.apache.asterix.common.ioopcallbacks.LSMBTreeWithBuddyIOOperationCallbackFactory;
 import org.apache.asterix.external.indexing.IndexingConstants;
+import org.apache.asterix.external.operators.ExternalDataScanOperatorDescriptor;
 import org.apache.asterix.metadata.declared.AqlMetadataProvider;
 import org.apache.asterix.metadata.entities.Index;
-import org.apache.asterix.metadata.feeds.ExternalDataScanOperatorDescriptor;
 import org.apache.asterix.metadata.utils.ExternalDatasetsRegistry;
 import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.transaction.management.opcallbacks.SecondaryIndexOperationTrackerProvider;



[20/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedRuntimeId.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedRuntimeId.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedRuntimeId.java
deleted file mode 100644
index bf3c2c1..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedRuntimeId.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.io.Serializable;
-
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-
-public class FeedRuntimeId implements Serializable {
-
-    private static final long serialVersionUID = 1L;
-
-    public static final String DEFAULT_OPERAND_ID = "N/A";
-
-    private final FeedRuntimeType runtimeType;
-    private final int partition;
-    private final String operandId;
-
-    public FeedRuntimeId(FeedRuntimeType runtimeType, int partition, String operandId) {
-        this.runtimeType = runtimeType;
-        this.partition = partition;
-        this.operandId = operandId;
-    }
-
-    @Override
-    public String toString() {
-        return runtimeType + "[" + partition + "]" + "{" + operandId + "}";
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (this == o) {
-            return true;
-        }
-        if (!(o instanceof FeedRuntimeId)) {
-            return false;
-        }
-        FeedRuntimeId other = (FeedRuntimeId) o;
-        return (other.getFeedRuntimeType().equals(runtimeType) && other.getOperandId().equals(operandId) && other
-                .getPartition() == partition);
-    }
-
-    @Override
-    public int hashCode() {
-        return toString().hashCode();
-    }
-
-    public FeedRuntimeType getFeedRuntimeType() {
-        return runtimeType;
-    }
-
-    public int getPartition() {
-        return partition;
-    }
-
-    public FeedRuntimeType getRuntimeType() {
-        return runtimeType;
-    }
-
-    public String getOperandId() {
-        return operandId;
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedRuntimeInputHandler.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedRuntimeInputHandler.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedRuntimeInputHandler.java
deleted file mode 100644
index 6642df1..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedRuntimeInputHandler.java
+++ /dev/null
@@ -1,440 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.Iterator;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.feeds.DataBucket.ContentType;
-import org.apache.asterix.common.feeds.api.IExceptionHandler;
-import org.apache.asterix.common.feeds.api.IFeedManager;
-import org.apache.asterix.common.feeds.api.IFeedMemoryComponent;
-import org.apache.asterix.common.feeds.api.IFeedMessage;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.Mode;
-import org.apache.asterix.common.feeds.message.FeedCongestionMessage;
-import org.apache.asterix.common.feeds.message.ThrottlingEnabledFeedMessage;
-import org.apache.hyracks.api.comm.IFrameWriter;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-
-/**
- * Provides for error-handling and input-side buffering for a feed runtime.
- */
-public class FeedRuntimeInputHandler implements IFrameWriter {
-
-    private static Logger LOGGER = Logger.getLogger(FeedRuntimeInputHandler.class.getName());
-
-    private final FeedConnectionId connectionId;
-    private final FeedRuntimeId runtimeId;
-    private final FeedPolicyAccessor feedPolicyAccessor;
-    private boolean bufferingEnabled;
-    private final IExceptionHandler exceptionHandler;
-    private final FeedFrameDiscarder discarder;
-    private final FeedFrameSpiller spiller;
-    private final FeedPolicyAccessor fpa;
-    private final IFeedManager feedManager;
-
-    private IFrameWriter coreOperator;
-    private MonitoredBuffer mBuffer;
-    private DataBucketPool pool;
-    private FrameCollection frameCollection;
-    private Mode mode;
-    private Mode lastMode;
-    private boolean finished;
-    private long nProcessed;
-    private boolean throttlingEnabled;
-
-    private FrameEventCallback frameEventCallback;
-
-    public FeedRuntimeInputHandler(IHyracksTaskContext ctx, FeedConnectionId connectionId, FeedRuntimeId runtimeId,
-            IFrameWriter coreOperator, FeedPolicyAccessor fpa, boolean bufferingEnabled, FrameTupleAccessor fta,
-            RecordDescriptor recordDesc, IFeedManager feedManager, int nPartitions) throws IOException {
-        this.connectionId = connectionId;
-        this.runtimeId = runtimeId;
-        this.coreOperator = coreOperator;
-        this.bufferingEnabled = bufferingEnabled;
-        this.feedPolicyAccessor = fpa;
-        this.spiller = new FeedFrameSpiller(ctx, connectionId, runtimeId, fpa);
-        this.discarder = new FeedFrameDiscarder(connectionId, runtimeId, fpa, this);
-        this.exceptionHandler = new FeedExceptionHandler(ctx, fta, recordDesc, feedManager, connectionId);
-        this.mode = Mode.PROCESS;
-        this.lastMode = Mode.PROCESS;
-        this.finished = false;
-        this.fpa = fpa;
-        this.feedManager = feedManager;
-        this.pool = (DataBucketPool) feedManager.getFeedMemoryManager()
-                .getMemoryComponent(IFeedMemoryComponent.Type.POOL);
-        this.frameCollection = (FrameCollection) feedManager.getFeedMemoryManager()
-                .getMemoryComponent(IFeedMemoryComponent.Type.COLLECTION);
-        this.frameEventCallback = new FrameEventCallback(fpa, this, coreOperator);
-        this.mBuffer = MonitoredBuffer.getMonitoredBuffer(ctx, this, coreOperator, fta, recordDesc,
-                feedManager.getFeedMetricCollector(), connectionId, runtimeId, exceptionHandler, frameEventCallback,
-                nPartitions, fpa);
-        this.mBuffer.start();
-        this.throttlingEnabled = false;
-    }
-
-    @Override
-    public synchronized void nextFrame(ByteBuffer frame) throws HyracksDataException {
-        try {
-            switch (mode) {
-                case PROCESS:
-                    switch (lastMode) {
-                        case SPILL:
-                        case POST_SPILL_DISCARD:
-                            setMode(Mode.PROCESS_SPILL);
-                            processSpilledBacklog();
-                            break;
-                        case STALL:
-                            setMode(Mode.PROCESS_BACKLOG);
-                            processBufferredBacklog();
-                            break;
-                        default:
-                            break;
-                    }
-                    process(frame);
-                    break;
-                case PROCESS_BACKLOG:
-                case PROCESS_SPILL:
-                    process(frame);
-                    break;
-                case SPILL:
-                    spill(frame);
-                    break;
-                case DISCARD:
-                case POST_SPILL_DISCARD:
-                    discard(frame);
-                    break;
-                case STALL:
-                    switch (runtimeId.getFeedRuntimeType()) {
-                        case COLLECT:
-                        case COMPUTE_COLLECT:
-                        case COMPUTE:
-                        case STORE:
-                            bufferDataUntilRecovery(frame);
-                            break;
-                        default:
-                            if (LOGGER.isLoggable(Level.WARNING)) {
-                                LOGGER.warning("Discarding frame during " + mode + " mode " + this.runtimeId);
-                            }
-                            break;
-                    }
-                    break;
-                case END:
-                case FAIL:
-                    if (LOGGER.isLoggable(Level.WARNING)) {
-                        LOGGER.warning("Ignoring incoming tuples in " + mode + " mode");
-                    }
-                    break;
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-            throw new HyracksDataException(e);
-        }
-    }
-
-    private void bufferDataUntilRecovery(ByteBuffer frame) throws Exception {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Bufferring data until recovery is complete " + this.runtimeId);
-        }
-        if (frameCollection == null) {
-            this.frameCollection = (FrameCollection) feedManager.getFeedMemoryManager()
-                    .getMemoryComponent(IFeedMemoryComponent.Type.COLLECTION);
-        }
-        if (frameCollection == null) {
-            discarder.processMessage(frame);
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Running low on memory! DISCARDING FRAME ");
-            }
-        } else {
-            boolean success = frameCollection.addFrame(frame);
-            if (!success) {
-                if (fpa.spillToDiskOnCongestion()) {
-                    if (frame != null) {
-                        spiller.processMessage(frame);
-                    } // TODO handle the else case
-                } else {
-                    discarder.processMessage(frame);
-                }
-            }
-        }
-    }
-
-    public void reportUnresolvableCongestion() throws HyracksDataException {
-        if (this.runtimeId.getFeedRuntimeType().equals(FeedRuntimeType.COMPUTE)) {
-            FeedCongestionMessage congestionReport = new FeedCongestionMessage(connectionId, runtimeId,
-                    mBuffer.getInflowRate(), mBuffer.getOutflowRate(), mode);
-            feedManager.getFeedMessageService().sendMessage(congestionReport);
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Congestion reported " + this.connectionId + " " + this.runtimeId);
-            }
-        } else {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Unresolvable congestion at " + this.connectionId + " " + this.runtimeId);
-            }
-        }
-    }
-
-    private void processBufferredBacklog() throws HyracksDataException {
-        try {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Processing backlog " + this.runtimeId);
-            }
-
-            if (frameCollection != null) {
-                Iterator<ByteBuffer> backlog = frameCollection.getFrameCollectionIterator();
-                while (backlog.hasNext()) {
-                    process(backlog.next());
-                    nProcessed++;
-                }
-                DataBucket bucket = pool.getDataBucket();
-                bucket.setContentType(ContentType.EOSD);
-                bucket.setDesiredReadCount(1);
-                mBuffer.sendMessage(bucket);
-                feedManager.getFeedMemoryManager().releaseMemoryComponent(frameCollection);
-                frameCollection = null;
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-            throw new HyracksDataException(e);
-        }
-    }
-
-    private void processSpilledBacklog() throws HyracksDataException {
-        try {
-            Iterator<ByteBuffer> backlog = spiller.replayData();
-            while (backlog.hasNext()) {
-                process(backlog.next());
-                nProcessed++;
-            }
-            DataBucket bucket = pool.getDataBucket();
-            bucket.setContentType(ContentType.EOSD);
-            bucket.setDesiredReadCount(1);
-            mBuffer.sendMessage(bucket);
-            spiller.reset();
-        } catch (Exception e) {
-            e.printStackTrace();
-            throw new HyracksDataException(e);
-        }
-    }
-
-    protected void process(ByteBuffer frame) throws HyracksDataException {
-        boolean frameProcessed = false;
-        while (!frameProcessed) {
-            try {
-                if (!bufferingEnabled) {
-                    coreOperator.nextFrame(frame); // synchronous
-                    mBuffer.sendReport(frame);
-                } else {
-                    DataBucket bucket = pool.getDataBucket();
-                    if (bucket != null) {
-                        if (frame != null) {
-                            bucket.reset(frame); // created a copy here
-                            bucket.setContentType(ContentType.DATA);
-                        } else {
-                            bucket.setContentType(ContentType.EOD);
-                        }
-                        bucket.setDesiredReadCount(1);
-                        mBuffer.sendMessage(bucket);
-                        mBuffer.sendReport(frame);
-                        nProcessed++;
-                    } else {
-                        if (fpa.spillToDiskOnCongestion()) {
-                            if (frame != null) {
-                                boolean spilled = spiller.processMessage(frame);
-                                if (spilled) {
-                                    setMode(Mode.SPILL);
-                                } else {
-                                    reportUnresolvableCongestion();
-                                }
-                            }
-                        } else if (fpa.discardOnCongestion()) {
-                            boolean discarded = discarder.processMessage(frame);
-                            if (!discarded) {
-                                reportUnresolvableCongestion();
-                            }
-                        } else if (fpa.throttlingEnabled()) {
-                            setThrottlingEnabled(true);
-                        } else {
-                            reportUnresolvableCongestion();
-                        }
-
-                    }
-                }
-                frameProcessed = true;
-            } catch (Exception e) {
-                if (feedPolicyAccessor.continueOnSoftFailure()) {
-                    frame = exceptionHandler.handleException(e, frame);
-                    if (frame == null) {
-                        frameProcessed = true;
-                        if (LOGGER.isLoggable(Level.WARNING)) {
-                            LOGGER.warning("Encountered exception! " + e.getMessage()
-                                    + "Insufficient information, Cannot extract failing tuple");
-                        }
-                    }
-                } else {
-                    if (LOGGER.isLoggable(Level.WARNING)) {
-                        LOGGER.warning("Ingestion policy does not require recovering from tuple. Feed would terminate");
-                    }
-                    mBuffer.close(false);
-                    throw new HyracksDataException(e);
-                }
-            }
-        }
-    }
-
-    private void spill(ByteBuffer frame) throws Exception {
-        boolean success = spiller.processMessage(frame);
-        if (!success) {
-            // limit reached
-            setMode(Mode.POST_SPILL_DISCARD);
-            reportUnresolvableCongestion();
-        }
-    }
-
-    private void discard(ByteBuffer frame) throws Exception {
-        boolean success = discarder.processMessage(frame);
-        if (!success) { // limit reached
-            reportUnresolvableCongestion();
-        }
-    }
-
-    public Mode getMode() {
-        return mode;
-    }
-
-    public synchronized void setMode(Mode mode) {
-        if (mode.equals(this.mode)) {
-            return;
-        }
-        this.lastMode = this.mode;
-        this.mode = mode;
-        if (mode.equals(Mode.END)) {
-            this.close();
-        }
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Switched from " + lastMode + " to " + mode + " " + this.runtimeId);
-        }
-    }
-
-    @Override
-    public void close() {
-        if (mBuffer != null) {
-            boolean disableMonitoring = !this.mode.equals(Mode.STALL);
-            if (frameCollection != null) {
-                feedManager.getFeedMemoryManager().releaseMemoryComponent(frameCollection);
-            }
-            if (pool != null) {
-                feedManager.getFeedMemoryManager().releaseMemoryComponent(pool);
-            }
-            mBuffer.close(false, disableMonitoring);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Closed input side handler for " + this.runtimeId + " disabled monitoring "
-                        + disableMonitoring + " Mode for runtime " + this.mode);
-            }
-        }
-    }
-
-    public IFrameWriter getCoreOperator() {
-        return coreOperator;
-    }
-
-    public void setCoreOperator(IFrameWriter coreOperator) {
-        this.coreOperator = coreOperator;
-        mBuffer.setFrameWriter(coreOperator);
-        frameEventCallback.setCoreOperator(coreOperator);
-    }
-
-    public boolean isFinished() {
-        return finished;
-    }
-
-    public void setFinished(boolean finished) {
-        this.finished = finished;
-    }
-
-    public long getProcessed() {
-        return nProcessed;
-    }
-
-    public FeedRuntimeId getRuntimeId() {
-        return runtimeId;
-    }
-
-    @Override
-    public void open() throws HyracksDataException {
-        coreOperator.open();
-    }
-
-    @Override
-    public void fail() throws HyracksDataException {
-        coreOperator.fail();
-    }
-
-    public void reset(int nPartitions) {
-        this.mBuffer.setNumberOfPartitions(nPartitions);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Reset number of partitions to " + nPartitions + " for " + this.runtimeId);
-        }
-        if (mBuffer != null) {
-            mBuffer.reset();
-        }
-    }
-
-    public FeedConnectionId getConnectionId() {
-        return connectionId;
-    }
-
-    public IFeedManager getFeedManager() {
-        return feedManager;
-    }
-
-    public MonitoredBuffer getmBuffer() {
-        return mBuffer;
-    }
-
-    public boolean isThrottlingEnabled() {
-        return throttlingEnabled;
-    }
-
-    public void setThrottlingEnabled(boolean throttlingEnabled) {
-        if (this.throttlingEnabled != throttlingEnabled) {
-            this.throttlingEnabled = throttlingEnabled;
-            IFeedMessage throttlingEnabledMesg = new ThrottlingEnabledFeedMessage(connectionId, runtimeId);
-            feedManager.getFeedMessageService().sendMessage(throttlingEnabledMesg);
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Throttling " + throttlingEnabled + " for " + this.connectionId + "[" + runtimeId + "]");
-            }
-        }
-    }
-
-    public boolean isBufferingEnabled() {
-        return bufferingEnabled;
-    }
-
-    public void setBufferingEnabled(boolean bufferingEnabled) {
-        this.bufferingEnabled = bufferingEnabled;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedRuntimeManager.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedRuntimeManager.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedRuntimeManager.java
deleted file mode 100644
index abd5daa..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedRuntimeManager.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.io.IOException;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.feeds.api.IFeedConnectionManager;
-
-public class FeedRuntimeManager {
-
-    private static Logger LOGGER = Logger.getLogger(FeedRuntimeManager.class.getName());
-
-    private final FeedConnectionId connectionId;
-    private final IFeedConnectionManager connectionManager;
-    private final Map<FeedRuntimeId, FeedRuntime> feedRuntimes;
-
-    private final ExecutorService executorService;
-
-    public FeedRuntimeManager(FeedConnectionId connectionId, IFeedConnectionManager feedConnectionManager) {
-        this.connectionId = connectionId;
-        this.feedRuntimes = new ConcurrentHashMap<FeedRuntimeId, FeedRuntime>();
-        this.executorService = Executors.newCachedThreadPool();
-        this.connectionManager = feedConnectionManager;
-    }
-
-    public void close() throws IOException {
-        if (executorService != null) {
-            executorService.shutdownNow();
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Shut down executor service for :" + connectionId);
-            }
-        }
-    }
-
-    public FeedRuntime getFeedRuntime(FeedRuntimeId runtimeId) {
-        return feedRuntimes.get(runtimeId);
-    }
-
-    public void registerFeedRuntime(FeedRuntimeId runtimeId, FeedRuntime feedRuntime) {
-        feedRuntimes.put(runtimeId, feedRuntime);
-    }
-
-    public synchronized void deregisterFeedRuntime(FeedRuntimeId runtimeId) {
-        feedRuntimes.remove(runtimeId);
-        if (feedRuntimes.isEmpty()) {
-            connectionManager.deregisterFeed(connectionId);
-        }
-    }
-
-    public ExecutorService getExecutorService() {
-        return executorService;
-    }
-
-    public Set<FeedRuntimeId> getFeedRuntimes() {
-        return feedRuntimes.keySet();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedRuntimeReport.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedRuntimeReport.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedRuntimeReport.java
deleted file mode 100644
index d7717ac..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedRuntimeReport.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-public class FeedRuntimeReport {
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedTupleCommitAckMessage.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedTupleCommitAckMessage.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedTupleCommitAckMessage.java
deleted file mode 100644
index ada6566..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedTupleCommitAckMessage.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import javax.xml.bind.DatatypeConverter;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import org.apache.asterix.common.feeds.message.FeedMessage;
-
-public class FeedTupleCommitAckMessage extends FeedMessage {
-
-    private static final long serialVersionUID = 1L;
-
-    private final FeedConnectionId connectionId;
-    private int intakePartition;
-    private int base;
-    private byte[] commitAcks;
-
-    public FeedTupleCommitAckMessage(FeedConnectionId connectionId, int intakePartition, int base, byte[] commitAcks) {
-        super(MessageType.COMMIT_ACK);
-        this.connectionId = connectionId;
-        this.intakePartition = intakePartition;
-        this.base = base;
-        this.commitAcks = commitAcks;
-    }
-
-    @Override
-    public JSONObject toJSON() throws JSONException {
-        JSONObject obj = new JSONObject();
-        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
-        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
-        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
-        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
-        obj.put(FeedConstants.MessageConstants.INTAKE_PARTITION, intakePartition);
-        obj.put(FeedConstants.MessageConstants.BASE, base);
-        String commitAcksString = DatatypeConverter.printBase64Binary(commitAcks);
-        obj.put(FeedConstants.MessageConstants.COMMIT_ACKS, commitAcksString);
-        return obj;
-    }
-
-    public static FeedTupleCommitAckMessage read(JSONObject obj) throws JSONException {
-        FeedId feedId = new FeedId(obj.getString(FeedConstants.MessageConstants.DATAVERSE),
-                obj.getString(FeedConstants.MessageConstants.FEED));
-        FeedConnectionId connectionId = new FeedConnectionId(feedId,
-                obj.getString(FeedConstants.MessageConstants.DATASET));
-        int intakePartition = obj.getInt(FeedConstants.MessageConstants.INTAKE_PARTITION);
-        int base = obj.getInt(FeedConstants.MessageConstants.BASE);
-        String commitAcksString = obj.getString(FeedConstants.MessageConstants.COMMIT_ACKS);
-        byte[] commitAcks = DatatypeConverter.parseBase64Binary(commitAcksString);
-        return new FeedTupleCommitAckMessage(connectionId, intakePartition, base, commitAcks);
-    }
-
-    public FeedConnectionId getConnectionId() {
-        return connectionId;
-    }
-
-    public int getIntakePartition() {
-        return intakePartition;
-    }
-
-    public byte[] getCommitAcks() {
-        return commitAcks;
-    }
-
-    public void reset(int intakePartition, int base, byte[] commitAcks) {
-        this.intakePartition = intakePartition;
-        this.base = base;
-        this.commitAcks = commitAcks;
-    }
-
-    public int getBase() {
-        return base;
-    }
-
-    public void setBase(int base) {
-        this.base = base;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedTupleCommitResponseMessage.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedTupleCommitResponseMessage.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedTupleCommitResponseMessage.java
deleted file mode 100644
index cc32034..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedTupleCommitResponseMessage.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import org.apache.asterix.common.feeds.message.FeedMessage;
-
-public class FeedTupleCommitResponseMessage extends FeedMessage {
-
-    private static final long serialVersionUID = 1L;
-
-    private final FeedConnectionId connectionId;
-    private final int intakePartition;
-    private final int maxWindowAcked;
-
-    public FeedTupleCommitResponseMessage(FeedConnectionId connectionId, int intakePartition, int maxWindowAcked) {
-        super(MessageType.COMMIT_ACK_RESPONSE);
-        this.connectionId = connectionId;
-        this.intakePartition = intakePartition;
-        this.maxWindowAcked = maxWindowAcked;
-    }
-
-    @Override
-    public JSONObject toJSON() throws JSONException {
-        JSONObject obj = new JSONObject();
-        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
-        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
-        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
-        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
-        obj.put(FeedConstants.MessageConstants.INTAKE_PARTITION, intakePartition);
-        obj.put(FeedConstants.MessageConstants.MAX_WINDOW_ACKED, maxWindowAcked);
-        return obj;
-    }
-
-    @Override
-    public String toString() {
-        return connectionId + "[" + intakePartition + "]" + "(" + maxWindowAcked + ")";
-    }
-
-    public static FeedTupleCommitResponseMessage read(JSONObject obj) throws JSONException {
-        FeedId feedId = new FeedId(obj.getString(FeedConstants.MessageConstants.DATAVERSE),
-                obj.getString(FeedConstants.MessageConstants.FEED));
-        FeedConnectionId connectionId = new FeedConnectionId(feedId,
-                obj.getString(FeedConstants.MessageConstants.DATASET));
-        int intakePartition = obj.getInt(FeedConstants.MessageConstants.INTAKE_PARTITION);
-        int maxWindowAcked = obj.getInt(FeedConstants.MessageConstants.MAX_WINDOW_ACKED);
-        return new FeedTupleCommitResponseMessage(connectionId, intakePartition, maxWindowAcked);
-    }
-
-    public FeedConnectionId getConnectionId() {
-        return connectionId;
-    }
-
-    public int getMaxWindowAcked() {
-        return maxWindowAcked;
-    }
-
-    public int getIntakePartition() {
-        return intakePartition;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FrameCollection.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FrameCollection.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FrameCollection.java
deleted file mode 100644
index 9ed547e..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FrameCollection.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.nio.ByteBuffer;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-
-import org.apache.asterix.common.feeds.api.IFeedMemoryComponent;
-import org.apache.asterix.common.feeds.api.IFeedMemoryManager;
-
-/**
- * Represents an expandable collection of frames.
- */
-public class FrameCollection implements IFeedMemoryComponent {
-
-    /** A unique identifier for the feed memory component **/
-    private final int componentId;
-
-    /** A collection of frames (each being a ByteBuffer) **/
-    private final List<ByteBuffer> frames = new LinkedList<ByteBuffer>();
-
-    /** The permitted maximum size, the collection may grow to **/
-    private int maxSize;
-
-    /** The {@link IFeedMemoryManager} for the NodeController **/
-    private final IFeedMemoryManager memoryManager;
-
-    public FrameCollection(int componentId, IFeedMemoryManager memoryManager, int maxSize) {
-        this.componentId = componentId;
-        this.maxSize = maxSize;
-        this.memoryManager = memoryManager;
-    }
-
-    public boolean addFrame(ByteBuffer frame) {
-        if (frames.size() == maxSize) {
-            boolean expansionGranted = memoryManager.expandMemoryComponent(this);
-            if (!expansionGranted) {
-                return false;
-            }
-        }
-        ByteBuffer storageBuffer = ByteBuffer.allocate(frame.capacity());
-        storageBuffer.put(frame);
-        frames.add(storageBuffer);
-        storageBuffer.flip();
-        return true;
-    }
-
-    public Iterator<ByteBuffer> getFrameCollectionIterator() {
-        return frames.iterator();
-    }
-
-    @Override
-    public int getTotalAllocation() {
-        return frames.size();
-    }
-
-    @Override
-    public Type getType() {
-        return Type.COLLECTION;
-    }
-
-    @Override
-    public int getComponentId() {
-        return componentId;
-    }
-
-    @Override
-    public void expand(int delta) {
-        maxSize = maxSize + delta;
-    }
-
-    @Override
-    public void reset() {
-        frames.clear();
-        maxSize = IFeedMemoryManager.START_COLLECTION_SIZE;
-    }
-
-    @Override
-    public String toString() {
-        return "FrameCollection" + "[" + componentId + "]" + "(" + frames.size() + "/" + maxSize + ")";
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FrameDistributor.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FrameDistributor.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FrameDistributor.java
deleted file mode 100644
index 9e106fb..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FrameDistributor.java
+++ /dev/null
@@ -1,360 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.nio.ByteBuffer;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.feeds.api.IFeedMemoryComponent.Type;
-import org.apache.asterix.common.feeds.api.IFeedMemoryManager;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.hyracks.api.comm.IFrameWriter;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-
-public class FrameDistributor {
-
-    private static final Logger LOGGER = Logger.getLogger(FrameDistributor.class.getName());
-
-    private static final long MEMORY_AVAILABLE_POLL_PERIOD = 1000; // 1 second
-
-    private final FeedId feedId;
-    private final FeedRuntimeType feedRuntimeType;
-    private final int partition;
-    private final IFeedMemoryManager memoryManager;
-    private final boolean enableSynchronousTransfer;
-    /** A map storing the registered frame readers ({@code FeedFrameCollector}. **/
-    private final Map<IFrameWriter, FeedFrameCollector> registeredCollectors;
-    private final FrameTupleAccessor fta;
-
-    private DataBucketPool pool;
-    private DistributionMode distributionMode;
-    private boolean spillToDiskRequired = false;
-
-    public enum DistributionMode {
-        /**
-         * A single feed frame collector is registered for receiving tuples.
-         * Tuple is sent via synchronous call, that is no buffering is involved
-         **/
-        SINGLE,
-
-        /**
-         * Multiple feed frame collectors are concurrently registered for
-         * receiving tuples.
-         **/
-        SHARED,
-
-        /**
-         * Feed tuples are not being processed, irrespective of # of registered
-         * feed frame collectors.
-         **/
-        INACTIVE
-    }
-
-    public FrameDistributor(FeedId feedId, FeedRuntimeType feedRuntimeType, int partition,
-            boolean enableSynchronousTransfer, IFeedMemoryManager memoryManager, FrameTupleAccessor fta)
-                    throws HyracksDataException {
-        this.feedId = feedId;
-        this.feedRuntimeType = feedRuntimeType;
-        this.partition = partition;
-        this.memoryManager = memoryManager;
-        this.enableSynchronousTransfer = enableSynchronousTransfer;
-        this.registeredCollectors = new HashMap<IFrameWriter, FeedFrameCollector>();
-        this.distributionMode = DistributionMode.INACTIVE;
-        this.fta = fta;
-    }
-
-    public void notifyEndOfFeed() {
-        DataBucket bucket = getDataBucket();
-        if (bucket != null) {
-            sendEndOfFeedDataBucket(bucket);
-        } else {
-            while (bucket == null) {
-                try {
-                    Thread.sleep(MEMORY_AVAILABLE_POLL_PERIOD);
-                    bucket = getDataBucket();
-                } catch (InterruptedException e) {
-                    break;
-                }
-            }
-            if (bucket != null) {
-                sendEndOfFeedDataBucket(bucket);
-            }
-        }
-    }
-
-    private void sendEndOfFeedDataBucket(DataBucket bucket) {
-        bucket.setContentType(DataBucket.ContentType.EOD);
-        nextBucket(bucket);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("End of feed data packet sent " + this.feedId);
-        }
-    }
-
-    public synchronized void registerFrameCollector(FeedFrameCollector frameCollector) {
-        DistributionMode currentMode = distributionMode;
-        switch (distributionMode) {
-            case INACTIVE:
-                if (!enableSynchronousTransfer) {
-                    pool = (DataBucketPool) memoryManager.getMemoryComponent(Type.POOL);
-                    frameCollector.start();
-                }
-                registeredCollectors.put(frameCollector.getFrameWriter(), frameCollector);
-                setMode(DistributionMode.SINGLE);
-                break;
-            case SINGLE:
-                pool = (DataBucketPool) memoryManager.getMemoryComponent(Type.POOL);
-                registeredCollectors.put(frameCollector.getFrameWriter(), frameCollector);
-                for (FeedFrameCollector reader : registeredCollectors.values()) {
-                    reader.start();
-                }
-                setMode(DistributionMode.SHARED);
-                break;
-            case SHARED:
-                frameCollector.start();
-                registeredCollectors.put(frameCollector.getFrameWriter(), frameCollector);
-                break;
-        }
-        evaluateIfSpillIsEnabled();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info(
-                    "Switching to " + distributionMode + " mode from " + currentMode + " mode " + " Feed id " + feedId);
-        }
-    }
-
-    public synchronized void deregisterFrameCollector(FeedFrameCollector frameCollector) {
-        switch (distributionMode) {
-            case INACTIVE:
-                throw new IllegalStateException(
-                        "Invalid attempt to deregister frame collector in " + distributionMode + " mode.");
-            case SHARED:
-                frameCollector.closeCollector();
-                registeredCollectors.remove(frameCollector.getFrameWriter());
-                int nCollectors = registeredCollectors.size();
-                if (nCollectors == 1) {
-                    FeedFrameCollector loneCollector = registeredCollectors.values().iterator().next();
-                    setMode(DistributionMode.SINGLE);
-                    loneCollector.setState(FeedFrameCollector.State.TRANSITION);
-                    loneCollector.closeCollector();
-                    memoryManager.releaseMemoryComponent(pool);
-                    evaluateIfSpillIsEnabled();
-                } else {
-                    if (!spillToDiskRequired) {
-                        evaluateIfSpillIsEnabled();
-                    }
-                }
-                break;
-            case SINGLE:
-                frameCollector.closeCollector();
-                setMode(DistributionMode.INACTIVE);
-                spillToDiskRequired = false;
-                break;
-
-        }
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Deregistered frame reader" + frameCollector + " from feed distributor for " + feedId);
-        }
-    }
-
-    public void evaluateIfSpillIsEnabled() {
-        if (!spillToDiskRequired) {
-            for (FeedFrameCollector collector : registeredCollectors.values()) {
-                spillToDiskRequired = spillToDiskRequired
-                        || collector.getFeedPolicyAccessor().spillToDiskOnCongestion();
-                if (spillToDiskRequired) {
-                    break;
-                }
-            }
-        }
-    }
-
-    public boolean deregisterFrameCollector(IFrameWriter frameWriter) {
-        FeedFrameCollector collector = registeredCollectors.get(frameWriter);
-        if (collector != null) {
-            deregisterFrameCollector(collector);
-            return true;
-        }
-        return false;
-    }
-
-    public synchronized void setMode(DistributionMode mode) {
-        this.distributionMode = mode;
-    }
-
-    public boolean isRegistered(IFrameWriter writer) {
-        return registeredCollectors.get(writer) != null;
-    }
-
-    public synchronized void nextFrame(ByteBuffer frame) throws HyracksDataException {
-        switch (distributionMode) {
-            case INACTIVE:
-                break;
-            case SINGLE:
-                FeedFrameCollector collector = registeredCollectors.values().iterator().next();
-                switch (collector.getState()) {
-                    case HANDOVER:
-                    case ACTIVE:
-                        if (enableSynchronousTransfer) {
-                            collector.nextFrame(frame); // processing is synchronous
-                        } else {
-                            handleDataBucket(frame);
-                        }
-                        break;
-                    case TRANSITION:
-                        handleDataBucket(frame);
-                        break;
-                    case FINISHED:
-                        if (LOGGER.isLoggable(Level.WARNING)) {
-                            LOGGER.warning("Discarding fetched tuples, feed has ended [" + registeredCollectors.get(0)
-                                    + "]" + " Feed Id " + feedId + " frame distributor " + this.getFeedRuntimeType());
-                        }
-                        registeredCollectors.remove(0);
-                        break;
-                }
-                break;
-            case SHARED:
-                handleDataBucket(frame);
-                break;
-        }
-    }
-
-    private void nextBucket(DataBucket bucket) {
-        for (FeedFrameCollector collector : registeredCollectors.values()) {
-            collector.sendMessage(bucket); // asynchronous call
-        }
-    }
-
-    private void handleDataBucket(ByteBuffer frame) throws HyracksDataException {
-        DataBucket bucket = getDataBucket();
-        if (bucket == null) {
-            handleFrameDuringMemoryCongestion(frame);
-        } else {
-            bucket.reset(frame);
-            bucket.setDesiredReadCount(registeredCollectors.size());
-            nextBucket(bucket);
-        }
-    }
-
-    private void handleFrameDuringMemoryCongestion(ByteBuffer frame) throws HyracksDataException {
-        if (LOGGER.isLoggable(Level.WARNING)) {
-            LOGGER.warning("Unable to allocate memory, will evaluate the need to spill");
-        }
-        // wait till memory is available
-    }
-
-    private DataBucket getDataBucket() {
-        DataBucket bucket = null;
-        if (pool != null) {
-            bucket = pool.getDataBucket();
-            if (bucket != null) {
-                bucket.setDesiredReadCount(registeredCollectors.size());
-                return bucket;
-            } else {
-                return null;
-            }
-        }
-        return null;
-    }
-
-    public DistributionMode getMode() {
-        return distributionMode;
-    }
-
-    public void close() {
-        switch (distributionMode) {
-            case INACTIVE:
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("FrameDistributor is " + distributionMode);
-                }
-                break;
-            case SINGLE:
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Disconnecting single frame reader in " + distributionMode + " mode " + " for  feedId "
-                            + feedId + " " + this.feedRuntimeType);
-                }
-                setMode(DistributionMode.INACTIVE);
-                if (!enableSynchronousTransfer) {
-                    notifyEndOfFeed(); // send EOD Data Bucket
-                    waitForCollectorsToFinish();
-                }
-                registeredCollectors.values().iterator().next().disconnect();
-                break;
-            case SHARED:
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Signalling End Of Feed; currently operating in " + distributionMode + " mode");
-                }
-                notifyEndOfFeed(); // send EOD Data Bucket
-                waitForCollectorsToFinish();
-                break;
-        }
-    }
-
-    private void waitForCollectorsToFinish() {
-        synchronized (registeredCollectors.values()) {
-            while (!allCollectorsFinished()) {
-                try {
-                    registeredCollectors.values().wait();
-                } catch (InterruptedException e) {
-                    e.printStackTrace();
-                }
-            }
-        }
-    }
-
-    private boolean allCollectorsFinished() {
-        boolean allFinished = true;
-        for (FeedFrameCollector collector : registeredCollectors.values()) {
-            allFinished = allFinished && collector.getState().equals(FeedFrameCollector.State.FINISHED);
-        }
-        return allFinished;
-    }
-
-    public Collection<FeedFrameCollector> getRegisteredCollectors() {
-        return registeredCollectors.values();
-    }
-
-    public Map<IFrameWriter, FeedFrameCollector> getRegisteredReaders() {
-        return registeredCollectors;
-    }
-
-    public FeedId getFeedId() {
-        return feedId;
-    }
-
-    public DistributionMode getDistributionMode() {
-        return distributionMode;
-    }
-
-    public FeedRuntimeType getFeedRuntimeType() {
-        return feedRuntimeType;
-    }
-
-    public int getPartition() {
-        return partition;
-    }
-
-    public FrameTupleAccessor getFta() {
-        return fta;
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FrameEventCallback.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FrameEventCallback.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FrameEventCallback.java
deleted file mode 100644
index 5551ce6..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FrameEventCallback.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.feeds.api.IFeedRuntime.Mode;
-import org.apache.asterix.common.feeds.api.IFrameEventCallback;
-import org.apache.hyracks.api.comm.IFrameWriter;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-public class FrameEventCallback implements IFrameEventCallback {
-
-    private static final Logger LOGGER = Logger.getLogger(FrameEventCallback.class.getName());
-
-    private final FeedPolicyAccessor fpa;
-    private final FeedRuntimeInputHandler inputSideHandler;
-    private IFrameWriter coreOperator;
-
-    public FrameEventCallback(FeedPolicyAccessor fpa, FeedRuntimeInputHandler inputSideHandler,
-            IFrameWriter coreOperator) {
-        this.fpa = fpa;
-        this.inputSideHandler = inputSideHandler;
-        this.coreOperator = coreOperator;
-    }
-
-    @Override
-    public void frameEvent(FrameEvent event) {
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Frame Event for " + inputSideHandler.getRuntimeId() + " " + event);
-        }
-        if (!event.equals(FrameEvent.FINISHED_PROCESSING_SPILLAGE)
-                && inputSideHandler.getMode().equals(Mode.PROCESS_SPILL)) {
-            return;
-        }
-        switch (event) {
-            case PENDING_WORK_THRESHOLD_REACHED:
-                if (fpa.spillToDiskOnCongestion()) {
-                    inputSideHandler.setMode(Mode.SPILL);
-                } else if (fpa.discardOnCongestion()) {
-                    inputSideHandler.setMode(Mode.DISCARD);
-                } else if (fpa.throttlingEnabled()) {
-                    inputSideHandler.setThrottlingEnabled(true);
-                } else {
-                    try {
-                        inputSideHandler.reportUnresolvableCongestion();
-                    } catch (HyracksDataException e) {
-                        if (LOGGER.isLoggable(Level.WARNING)) {
-                            LOGGER.warning("Unable to report congestion!!!");
-                        }
-                    }
-                }
-                break;
-            case FINISHED_PROCESSING:
-                inputSideHandler.setFinished(true);
-                synchronized (coreOperator) {
-                    coreOperator.notifyAll();
-                }
-                break;
-            case PENDING_WORK_DONE:
-                switch (inputSideHandler.getMode()) {
-                    case SPILL:
-                    case DISCARD:
-                    case POST_SPILL_DISCARD:
-                        inputSideHandler.setMode(Mode.PROCESS);
-                        break;
-                    default:
-                        if (LOGGER.isLoggable(Level.INFO)) {
-                            LOGGER.info("Received " + event + " ignoring as operating in " + inputSideHandler.getMode());
-                        }
-                }
-                break;
-            case FINISHED_PROCESSING_SPILLAGE:
-                inputSideHandler.setMode(Mode.PROCESS);
-                break;
-            default:
-                break;
-        }
-    }
-
-    public void setCoreOperator(IFrameWriter coreOperator) {
-        this.coreOperator = coreOperator;
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/IngestionRuntime.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/IngestionRuntime.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/IngestionRuntime.java
deleted file mode 100644
index 926df39..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/IngestionRuntime.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.util.logging.Level;
-
-import org.apache.asterix.common.feeds.api.IAdapterRuntimeManager;
-import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-
-public class IngestionRuntime extends SubscribableRuntime {
-
-    private final IAdapterRuntimeManager adapterRuntimeManager;
-
-    public IngestionRuntime(FeedId feedId, FeedRuntimeId runtimeId, DistributeFeedFrameWriter feedWriter,
-            RecordDescriptor recordDesc, IAdapterRuntimeManager adaptorRuntimeManager) {
-        super(feedId, runtimeId, null, feedWriter, recordDesc);
-        this.adapterRuntimeManager = adaptorRuntimeManager;
-    }
-
-    public void subscribeFeed(FeedPolicyAccessor fpa, CollectionRuntime collectionRuntime) throws Exception {
-        FeedFrameCollector reader = dWriter.subscribeFeed(fpa, collectionRuntime.getInputHandler(),
-                collectionRuntime.getConnectionId());
-        collectionRuntime.setFrameCollector(reader);
-        
-        if (dWriter.getDistributionMode().equals(FrameDistributor.DistributionMode.SINGLE)) {
-            adapterRuntimeManager.start();
-        }
-        subscribers.add(collectionRuntime);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Subscribed feed collection [" + collectionRuntime + "] to " + this);
-        }
-    }
-
-    public void unsubscribeFeed(CollectionRuntime collectionRuntime) throws Exception {
-        dWriter.unsubscribeFeed(collectionRuntime.getInputHandler());
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Unsubscribed feed collection [" + collectionRuntime + "] from " + this);
-        }
-        if (dWriter.getDistributionMode().equals(FrameDistributor.DistributionMode.INACTIVE)) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Stopping adapter for " + this + " as no more registered collectors");
-            }
-            adapterRuntimeManager.stop();
-        }
-        subscribers.remove(collectionRuntime);
-    }
-
-    public void endOfFeed() {
-        dWriter.notifyEndOfFeed();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Notified End Of Feed  [" + this + "]");
-        }
-    }
-
-    public IAdapterRuntimeManager getAdapterRuntimeManager() {
-        return adapterRuntimeManager;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/IntakePartitionStatistics.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/IntakePartitionStatistics.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/IntakePartitionStatistics.java
deleted file mode 100644
index 5601f73..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/IntakePartitionStatistics.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.util.BitSet;
-
-public class IntakePartitionStatistics {
-
-    public static int ACK_WINDOW_SIZE = 1024;
-    private BitSet bitSet;
-
-    public IntakePartitionStatistics(int partition, int base) {
-        this.bitSet = new BitSet(ACK_WINDOW_SIZE);
-    }
-
-    public void ackRecordId(int recordId) {
-        int posIndexWithinBase = recordId % ACK_WINDOW_SIZE;
-        this.bitSet.set(posIndexWithinBase);
-    }
-
-    public byte[] getAckInfo() {
-        return bitSet.toByteArray();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/IntakeSideMonitoredBuffer.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/IntakeSideMonitoredBuffer.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/IntakeSideMonitoredBuffer.java
deleted file mode 100644
index 10b7ddb..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/IntakeSideMonitoredBuffer.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import org.apache.asterix.common.feeds.api.IExceptionHandler;
-import org.apache.asterix.common.feeds.api.IFeedMetricCollector;
-import org.apache.asterix.common.feeds.api.IFrameEventCallback;
-import org.apache.asterix.common.feeds.api.IFramePostProcessor;
-import org.apache.asterix.common.feeds.api.IFramePreprocessor;
-import org.apache.hyracks.api.comm.IFrameWriter;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-
-public class IntakeSideMonitoredBuffer extends MonitoredBuffer {
-
-    public IntakeSideMonitoredBuffer(IHyracksTaskContext ctx, FeedRuntimeInputHandler inputHandler, IFrameWriter frameWriter,
-            FrameTupleAccessor fta, RecordDescriptor recordDesc, IFeedMetricCollector metricCollector,
-            FeedConnectionId connectionId, FeedRuntimeId runtimeId, IExceptionHandler exceptionHandler,
-            IFrameEventCallback callback, int nPartitions, FeedPolicyAccessor policyAccessor) {
-        super(ctx, inputHandler, frameWriter, fta,  recordDesc, metricCollector, connectionId, runtimeId,
-                exceptionHandler, callback, nPartitions, policyAccessor);
-    }
-
-    @Override
-    protected boolean monitorProcessingRate() {
-        return false;
-    }
-
-    @Override
-    protected boolean logInflowOutflowRate() {
-        return false;
-    }
-
-    @Override
-    protected IFramePreprocessor getFramePreProcessor() {
-        return null;
-    }
-
-    @Override
-    protected IFramePostProcessor getFramePostProcessor() {
-        return null;
-    }
-
-    @Override
-    protected boolean monitorInputQueueLength() {
-        return false;
-    }
-
-    @Override
-    protected boolean reportOutflowRate() {
-        return false;
-    }
-
-    @Override
-    protected boolean reportInflowRate() {
-        return true;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/MessageListener.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/MessageListener.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/MessageListener.java
deleted file mode 100644
index c2753d4..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/MessageListener.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.ServerSocket;
-import java.net.Socket;
-import java.nio.CharBuffer;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-public class MessageListener {
-
-    private static final Logger LOGGER = Logger.getLogger(MessageListener.class.getName());
-
-    private int port;
-    private final LinkedBlockingQueue<String> outbox;
-
-    private ExecutorService executorService = Executors.newFixedThreadPool(10);
-
-    private MessageListenerServer listenerServer;
-
-    public MessageListener(int port, LinkedBlockingQueue<String> outbox) {
-        this.port = port;
-        this.outbox = outbox;
-    }
-
-    public void stop() {
-        listenerServer.stop();
-        if (!executorService.isShutdown()) {
-            executorService.shutdownNow();
-        }
-    }
-
-    public void start() throws IOException {
-        listenerServer = new MessageListenerServer(port, outbox);
-        executorService.execute(listenerServer);
-    }
-
-    private static class MessageListenerServer implements Runnable {
-
-        private final int port;
-        private final LinkedBlockingQueue<String> outbox;
-        private ServerSocket server;
-
-        private static final char EOL = (char) "\n".getBytes()[0];
-
-        public MessageListenerServer(int port, LinkedBlockingQueue<String> outbox) {
-            this.port = port;
-            this.outbox = outbox;
-        }
-
-        public void stop() {
-            try {
-                server.close();
-            } catch (IOException e) {
-                e.printStackTrace();
-            }
-        }
-
-        @Override
-        public void run() {
-            Socket client = null;
-            try {
-                server = new ServerSocket(port);
-                client = server.accept();
-                InputStream in = client.getInputStream();
-                CharBuffer buffer = CharBuffer.allocate(5000);
-                char ch;
-                while (true) {
-                    ch = (char) in.read();
-                    if (((int) ch) == -1) {
-                        break;
-                    }
-                    while (ch != EOL) {
-                        buffer.put(ch);
-                        ch = (char) in.read();
-                    }
-                    buffer.flip();
-                    String s = new String(buffer.array());
-                    synchronized (outbox) {
-                        outbox.add(s + "\n");
-                    }
-                    buffer.position(0);
-                    buffer.limit(5000);
-                }
-
-            } catch (Exception e) {
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("Unable to start Message listener" + server);
-                }
-            } finally {
-                if (server != null) {
-                    try {
-                        server.close();
-                    } catch (Exception e) {
-                        e.printStackTrace();
-                    }
-                }
-            }
-
-        }
-
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/MessageReceiver.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/MessageReceiver.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/MessageReceiver.java
deleted file mode 100644
index 6490c6a..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/MessageReceiver.java
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.feeds.api.IMessageReceiver;
-
-public abstract class MessageReceiver<T> implements IMessageReceiver<T> {
-
-    protected static final Logger LOGGER = Logger.getLogger(MessageReceiver.class.getName());
-
-    protected final LinkedBlockingQueue<T> inbox;
-    protected ExecutorService executor;
-
-    public MessageReceiver() {
-        inbox = new LinkedBlockingQueue<T>();
-    }
-
-    public abstract void processMessage(T message) throws Exception;
-
-    @Override
-    public void start() {
-        executor = Executors.newSingleThreadExecutor();
-        executor.execute(new MessageReceiverRunnable<T>(this));
-    }
-
-    @Override
-    public synchronized void sendMessage(T message) {
-        inbox.add(message);
-    }
-
-    @Override
-    public void close(boolean processPending) {
-        if (executor != null) {
-            executor.shutdown();
-            executor = null;
-            if (processPending) {
-                flushPendingMessages();
-            } else {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Will discard the pending frames " + inbox.size());
-                }
-            }
-        }
-    }
-
-    private static class MessageReceiverRunnable<T> implements Runnable {
-
-        private final LinkedBlockingQueue<T> inbox;
-        private final MessageReceiver<T> messageReceiver;
-
-        public MessageReceiverRunnable(MessageReceiver<T> messageReceiver) {
-            this.inbox = messageReceiver.inbox;
-            this.messageReceiver = messageReceiver;
-        }
-
-        @Override
-        public void run() {
-            while (true) {
-                try {
-                    T message = inbox.take();
-                    messageReceiver.processMessage(message);
-                } catch (InterruptedException e) {
-                    e.printStackTrace();
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
-            }
-        }
-    }
-
-    protected void flushPendingMessages() {
-        while (!inbox.isEmpty()) {
-            T message = null;
-            try {
-                message = inbox.take();
-                processMessage(message);
-            } catch (InterruptedException ie) {
-                // ignore exception but break from the loop
-                break;
-            } catch (Exception e) {
-                e.printStackTrace();
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("Exception " + e + " in processing message " + message);
-                }
-            }
-        }
-    }
-
-}


[04/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java
index 09e193a..dc9fb50 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/FeedTupleTranslator.java
@@ -33,14 +33,13 @@ import org.apache.asterix.builders.OrderedListBuilder;
 import org.apache.asterix.builders.RecordBuilder;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.external.feed.api.IFeed;
+import org.apache.asterix.external.feed.api.IFeed.FeedType;
 import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.bootstrap.MetadataPrimaryIndexes;
 import org.apache.asterix.metadata.bootstrap.MetadataRecordTypes;
 import org.apache.asterix.metadata.entities.Feed;
-import org.apache.asterix.metadata.entities.Feed.FeedType;
-import org.apache.asterix.metadata.entities.PrimaryFeed;
-import org.apache.asterix.metadata.entities.SecondaryFeed;
 import org.apache.asterix.om.base.AMutableString;
 import org.apache.asterix.om.base.ANull;
 import org.apache.asterix.om.base.ARecord;
@@ -82,7 +81,7 @@ public class FeedTupleTranslator extends AbstractTupleTranslator<Feed> {
         int recordLength = frameTuple.getFieldLength(FEED_PAYLOAD_TUPLE_FIELD_INDEX);
         ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength);
         DataInput in = new DataInputStream(stream);
-        ARecord feedRecord = (ARecord) recordSerDes.deserialize(in);
+        ARecord feedRecord = recordSerDes.deserialize(in);
         return createFeedFromARecord(feedRecord);
     }
 
@@ -103,18 +102,18 @@ public class FeedTupleTranslator extends AbstractTupleTranslator<Feed> {
         String feedType = ((AString) feedRecord.getValueByPos(MetadataRecordTypes.FEED_ARECORD_FEED_TYPE_FIELD_INDEX))
                 .getStringValue();
 
-        FeedType feedTypeEnum = FeedType.valueOf(feedType.toUpperCase());
+        IFeed.FeedType feedTypeEnum = IFeed.FeedType.valueOf(feedType.toUpperCase());
         switch (feedTypeEnum) {
             case PRIMARY: {
                 ARecord feedTypeDetailsRecord = (ARecord) feedRecord
                         .getValueByPos(MetadataRecordTypes.FEED_ARECORD_PRIMARY_TYPE_DETAILS_FIELD_INDEX);
                 String adapterName = ((AString) feedTypeDetailsRecord
                         .getValueByPos(MetadataRecordTypes.FEED_ARECORD_PRIMARY_FIELD_DETAILS_ADAPTOR_NAME_FIELD_INDEX))
-                        .getStringValue();
+                                .getStringValue();
 
-                IACursor cursor = ((AUnorderedList) feedTypeDetailsRecord
-                        .getValueByPos(MetadataRecordTypes.FEED_ARECORD_PRIMARY_FIELD_DETAILS_ADAPTOR_CONFIGURATION_FIELD_INDEX))
-                        .getCursor();
+                IACursor cursor = ((AUnorderedList) feedTypeDetailsRecord.getValueByPos(
+                        MetadataRecordTypes.FEED_ARECORD_PRIMARY_FIELD_DETAILS_ADAPTOR_CONFIGURATION_FIELD_INDEX))
+                                .getCursor();
                 String key;
                 String value;
                 Map<String, String> adaptorConfiguration = new HashMap<String, String>();
@@ -126,7 +125,8 @@ public class FeedTupleTranslator extends AbstractTupleTranslator<Feed> {
                             .getStringValue();
                     adaptorConfiguration.put(key, value);
                 }
-                feed = new PrimaryFeed(dataverseName, feedName, adapterName, adaptorConfiguration, signature);
+                feed = new Feed(dataverseName, feedName, signature, FeedType.PRIMARY, feedName, adapterName,
+                        adaptorConfiguration);
 
             }
                 break;
@@ -136,9 +136,9 @@ public class FeedTupleTranslator extends AbstractTupleTranslator<Feed> {
 
                 String sourceFeedName = ((AString) feedTypeDetailsRecord
                         .getValueByPos(MetadataRecordTypes.FEED_TYPE_SECONDARY_ARECORD_SOURCE_FEED_NAME_FIELD_INDEX))
-                        .getStringValue();
+                                .getStringValue();
 
-                feed = new SecondaryFeed(dataverseName, feedName, sourceFeedName, signature);
+                feed = new Feed(dataverseName, feedName, signature, FeedType.SECONDARY, sourceFeedName, null, null);
 
             }
                 break;
@@ -215,7 +215,6 @@ public class FeedTupleTranslator extends AbstractTupleTranslator<Feed> {
 
         switch (feed.getFeedType()) {
             case PRIMARY: {
-                PrimaryFeed primaryFeed = (PrimaryFeed) feed;
 
                 IARecordBuilder primaryDetailsRecordBuilder = new RecordBuilder();
                 OrderedListBuilder listBuilder = new OrderedListBuilder();
@@ -229,16 +228,16 @@ public class FeedTupleTranslator extends AbstractTupleTranslator<Feed> {
 
                 // write field 0
                 fieldValue.reset();
-                aString.setValue(primaryFeed.getAdaptorName());
+                aString.setValue(feed.getAdapterName());
                 stringSerde.serialize(aString, primaryRecordfieldValue.getDataOutput());
                 primaryDetailsRecordBuilder.addField(
                         MetadataRecordTypes.FEED_ARECORD_PRIMARY_FIELD_DETAILS_ADAPTOR_NAME_FIELD_INDEX,
                         primaryRecordfieldValue);
 
                 // write field 1
-                listBuilder
-                        .reset((AUnorderedListType) MetadataRecordTypes.PRIMARY_FEED_DETAILS_RECORDTYPE.getFieldTypes()[MetadataRecordTypes.FEED_ARECORD_PRIMARY_FIELD_DETAILS_ADAPTOR_CONFIGURATION_FIELD_INDEX]);
-                for (Map.Entry<String, String> property : primaryFeed.getAdaptorConfiguration().entrySet()) {
+                listBuilder.reset((AUnorderedListType) MetadataRecordTypes.PRIMARY_FEED_DETAILS_RECORDTYPE
+                        .getFieldTypes()[MetadataRecordTypes.FEED_ARECORD_PRIMARY_FIELD_DETAILS_ADAPTOR_CONFIGURATION_FIELD_INDEX]);
+                for (Map.Entry<String, String> property : feed.getAdapterConfiguration().entrySet()) {
                     String name = property.getKey();
                     String value = property.getValue();
                     primaryRecordItemValue.reset();
@@ -262,15 +261,13 @@ public class FeedTupleTranslator extends AbstractTupleTranslator<Feed> {
                 break;
 
             case SECONDARY:
-                SecondaryFeed secondaryFeed = (SecondaryFeed) feed;
-
                 IARecordBuilder secondaryDetailsRecordBuilder = new RecordBuilder();
                 ArrayBackedValueStorage secondaryFieldValue = new ArrayBackedValueStorage();
                 secondaryDetailsRecordBuilder.reset(MetadataRecordTypes.SECONDARY_FEED_DETAILS_RECORDTYPE);
 
                 // write field 0
                 fieldValue.reset();
-                aString.setValue(secondaryFeed.getSourceFeedName());
+                aString.setValue(feed.getSourceFeedName());
                 stringSerde.serialize(aString, secondaryFieldValue.getDataOutput());
                 secondaryDetailsRecordBuilder.addField(
                         MetadataRecordTypes.FEED_ARECORD_SECONDARY_FIELD_DETAILS_SOURCE_FEED_NAME_FIELD_INDEX,

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/AbstractDatasourceAdapter.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/AbstractDatasourceAdapter.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/AbstractDatasourceAdapter.java
deleted file mode 100644
index d65468e..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/AbstractDatasourceAdapter.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.util.Map;
-
-import org.apache.asterix.common.feeds.api.IDataSourceAdapter;
-import org.apache.asterix.om.types.IAType;
-import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-
-/**
- * Represents the base class that is required to be extended by every
- * implementation of the IDatasourceAdapter interface.
- */
-public abstract class AbstractDatasourceAdapter implements IDataSourceAdapter {
-
-    private static final long serialVersionUID = 1L;
-
-    public static final String KEY_PARSER_FACTORY = "parser";
-
-    protected Map<String, Object> configuration;
-    protected transient AlgebricksPartitionConstraint partitionConstraint;
-    protected IAType atype;
-    protected IHyracksTaskContext ctx;
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/AbstractFeedDatasourceAdapter.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/AbstractFeedDatasourceAdapter.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/AbstractFeedDatasourceAdapter.java
deleted file mode 100644
index c231ad9..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/AbstractFeedDatasourceAdapter.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import org.apache.asterix.common.feeds.api.IDataSourceAdapter;
-import org.apache.asterix.external.feeds.FeedPolicyEnforcer;
-
-
-public abstract class AbstractFeedDatasourceAdapter implements IDataSourceAdapter {
-
-    private static final long serialVersionUID = 1L;
-
-    protected FeedPolicyEnforcer policyEnforcer;
-
-    public FeedPolicyEnforcer getPolicyEnforcer() {
-        return policyEnforcer;
-    }
-
-    public void setFeedPolicyEnforcer(FeedPolicyEnforcer policyEnforcer) {
-        this.policyEnforcer = policyEnforcer;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/AdapterExecutor.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/AdapterExecutor.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/AdapterExecutor.java
deleted file mode 100644
index 6c2f14c..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/AdapterExecutor.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.feeds.DistributeFeedFrameWriter;
-import org.apache.asterix.common.feeds.api.IAdapterRuntimeManager;
-import org.apache.asterix.common.feeds.api.IAdapterRuntimeManager.State;
-import org.apache.asterix.common.feeds.api.IDataSourceAdapter;
-
-public class AdapterExecutor implements Runnable {
-
-    private static final Logger LOGGER = Logger.getLogger(AdapterExecutor.class.getName());
-
-    private final DistributeFeedFrameWriter writer;
-
-    private final IDataSourceAdapter adapter;
-
-    private final IAdapterRuntimeManager adapterManager;
-
-    public AdapterExecutor(int partition, DistributeFeedFrameWriter writer, IDataSourceAdapter adapter,
-            IAdapterRuntimeManager adapterManager) {
-        this.writer = writer;
-        this.adapter = adapter;
-        this.adapterManager = adapterManager;
-    }
-
-    @Override
-    public void run() {
-        int partition = adapterManager.getPartition();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Starting ingestion for partition:" + partition);
-        }
-        boolean continueIngestion = true;
-        boolean failedIngestion = false;
-        while (continueIngestion) {
-            try {
-                adapter.start(partition, writer);
-                continueIngestion = false;
-            } catch (Exception e) {
-                if (LOGGER.isLoggable(Level.SEVERE)) {
-                    LOGGER.severe("Exception during feed ingestion " + e.getMessage());
-                    e.printStackTrace();
-                }
-                continueIngestion = adapter.handleException(e);
-                failedIngestion = !continueIngestion;
-            }
-        }
-
-        adapterManager.setState(failedIngestion ? State.FAILED_INGESTION : State.FINISHED_INGESTION);
-        synchronized (adapterManager) {
-            adapterManager.notifyAll();
-        }
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/AdapterIdentifier.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/AdapterIdentifier.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/AdapterIdentifier.java
deleted file mode 100644
index f7e528b..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/AdapterIdentifier.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.io.Serializable;
-
-/**
- * A unique identifier for a data source adapter.
- */
-public class AdapterIdentifier implements Serializable {
-
-    private static final long serialVersionUID = 1L;
-
-    private final String namespace;
-    private final String name;
-
-    public AdapterIdentifier(String namespace, String name) {
-        this.namespace = namespace;
-        this.name = name;
-    }
-
-    public String getNamespace() {
-        return namespace;
-    }
-
-    public String getName() {
-        return name;
-    }
-
-    @Override
-    public int hashCode() {
-        return (namespace + "@" + name).hashCode();
-
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (o == null) {
-            return false;
-        }
-        if (this == o) {
-            return true;
-        }
-        if (!(o instanceof AdapterIdentifier)) {
-            return false;
-        }
-        return namespace.equals(((AdapterIdentifier) o).getNamespace())
-                && name.equals(((AdapterIdentifier) o).getName());
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/AdapterRuntimeManager.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/AdapterRuntimeManager.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/AdapterRuntimeManager.java
deleted file mode 100644
index aacb3da..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/AdapterRuntimeManager.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.feeds.DistributeFeedFrameWriter;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.IngestionRuntime;
-import org.apache.asterix.common.feeds.api.IAdapterRuntimeManager;
-import org.apache.asterix.common.feeds.api.IDataSourceAdapter;
-import org.apache.asterix.common.feeds.api.IIntakeProgressTracker;
-
-public class AdapterRuntimeManager implements IAdapterRuntimeManager {
-
-    private static final Logger LOGGER = Logger.getLogger(AdapterRuntimeManager.class.getName());
-
-    private final FeedId feedId;
-
-    private final IDataSourceAdapter feedAdapter;
-
-    private final IIntakeProgressTracker tracker;
-
-    private final AdapterExecutor adapterExecutor;
-
-    private final int partition;
-
-    private final ExecutorService executorService;
-
-    private IngestionRuntime ingestionRuntime;
-
-    private State state;
-
-    public AdapterRuntimeManager(FeedId feedId, IDataSourceAdapter feedAdapter, IIntakeProgressTracker tracker,
-            DistributeFeedFrameWriter writer, int partition) {
-        this.feedId = feedId;
-        this.feedAdapter = feedAdapter;
-        this.tracker = tracker;
-        this.partition = partition;
-        this.adapterExecutor = new AdapterExecutor(partition, writer, feedAdapter, this);
-        this.executorService = Executors.newSingleThreadExecutor();
-        this.state = State.INACTIVE_INGESTION;
-    }
-
-    @Override
-    public void start() throws Exception {
-        state = State.ACTIVE_INGESTION;
-        executorService.execute(adapterExecutor);
-    }
-
-    @Override
-    public void stop() {
-        try {
-            feedAdapter.stop();
-        } catch (Exception exception) {
-            if (LOGGER.isLoggable(Level.SEVERE)) {
-                LOGGER.severe("Unable to stop adapter " + feedAdapter + ", encountered exception " + exception);
-            }
-        } finally {
-            state = State.FINISHED_INGESTION;
-            executorService.shutdown();
-        }
-    }
-
-    @Override
-    public FeedId getFeedId() {
-        return feedId;
-    }
-
-    @Override
-    public String toString() {
-        return feedId + "[" + partition + "]";
-    }
-
-    @Override
-    public IDataSourceAdapter getFeedAdapter() {
-        return feedAdapter;
-    }
-
-    public IIntakeProgressTracker getTracker() {
-        return tracker;
-    }
-
-    @Override
-    public synchronized State getState() {
-        return state;
-    }
-
-    @Override
-    public synchronized void setState(State state) {
-        this.state = state;
-    }
-
-    public AdapterExecutor getAdapterExecutor() {
-        return adapterExecutor;
-    }
-
-    @Override
-    public int getPartition() {
-        return partition;
-    }
-
-    public IngestionRuntime getIngestionRuntime() {
-        return ingestionRuntime;
-    }
-
-    @Override
-    public IIntakeProgressTracker getProgressTracker() {
-        return tracker;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/BuiltinFeedPolicies.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/BuiltinFeedPolicies.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/BuiltinFeedPolicies.java
index a144d5f..8ef6732 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/BuiltinFeedPolicies.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/BuiltinFeedPolicies.java
@@ -21,37 +21,37 @@ package org.apache.asterix.metadata.feeds;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.asterix.common.feeds.FeedPolicyAccessor;
-import org.apache.asterix.metadata.bootstrap.MetadataConstants;
-import org.apache.asterix.metadata.entities.FeedPolicy;
+import org.apache.asterix.common.config.MetadataConstants;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.metadata.entities.FeedPolicyEntity;
 
 public class BuiltinFeedPolicies {
 
-    public static final FeedPolicy BRITTLE = initializeBrittlePolicy();
+    public static final FeedPolicyEntity BRITTLE = initializeBrittlePolicy();
 
-    public static final FeedPolicy BASIC = initializeBasicPolicy();
+    public static final FeedPolicyEntity BASIC = initializeBasicPolicy();
 
-    public static final FeedPolicy BASIC_FT = initializeBasicFTPolicy();
+    public static final FeedPolicyEntity BASIC_FT = initializeBasicFTPolicy();
 
-    public static final FeedPolicy ADVANCED_FT = initializeAdvancedFTPolicy();
+    public static final FeedPolicyEntity ADVANCED_FT = initializeAdvancedFTPolicy();
 
-    public static final FeedPolicy ADVANCED_FT_DISCARD = initializeAdvancedFTDiscardPolicy();
+    public static final FeedPolicyEntity ADVANCED_FT_DISCARD = initializeAdvancedFTDiscardPolicy();
 
-    public static final FeedPolicy ADVANCED_FT_SPILL = initializeAdvancedFTSpillPolicy();
+    public static final FeedPolicyEntity ADVANCED_FT_SPILL = initializeAdvancedFTSpillPolicy();
 
-    public static final FeedPolicy ADVANCED_FT_THROTTLE = initializeAdvancedFTThrottlePolicy();
+    public static final FeedPolicyEntity ADVANCED_FT_THROTTLE = initializeAdvancedFTThrottlePolicy();
 
-    public static final FeedPolicy ELASTIC = initializeAdvancedFTElasticPolicy();
+    public static final FeedPolicyEntity ELASTIC = initializeAdvancedFTElasticPolicy();
 
-    public static final FeedPolicy[] policies = new FeedPolicy[] { BRITTLE, BASIC, BASIC_FT, ADVANCED_FT,
+    public static final FeedPolicyEntity[] policies = new FeedPolicyEntity[] { BRITTLE, BASIC, BASIC_FT, ADVANCED_FT,
             ADVANCED_FT_DISCARD, ADVANCED_FT_SPILL, ADVANCED_FT_THROTTLE, ELASTIC };
 
-    public static final FeedPolicy DEFAULT_POLICY = BASIC_FT;
+    public static final FeedPolicyEntity DEFAULT_POLICY = BASIC_FT;
 
     public static final String CONFIG_FEED_POLICY_KEY = "policy";
 
-    public static FeedPolicy getFeedPolicy(String policyName) {
-        for (FeedPolicy policy : policies) {
+    public static FeedPolicyEntity getFeedPolicy(String policyName) {
+        for (FeedPolicyEntity policy : policies) {
             if (policy.getPolicyName().equalsIgnoreCase(policyName)) {
                 return policy;
             }
@@ -60,7 +60,7 @@ public class BuiltinFeedPolicies {
     }
 
     //Brittle
-    private static FeedPolicy initializeBrittlePolicy() {
+    private static FeedPolicyEntity initializeBrittlePolicy() {
         Map<String, String> policyParams = new HashMap<String, String>();
         policyParams.put(FeedPolicyAccessor.SOFT_FAILURE_CONTINUE, "false");
         policyParams.put(FeedPolicyAccessor.SOFT_FAILURE_LOG_DATA, "false");
@@ -71,11 +71,11 @@ public class BuiltinFeedPolicies {
         policyParams.put(FeedPolicyAccessor.AT_LEAST_ONE_SEMANTICS, "false");
 
         String description = "Brittle";
-        return new FeedPolicy(MetadataConstants.METADATA_DATAVERSE_NAME, "Brittle", description, policyParams);
+        return new FeedPolicyEntity(MetadataConstants.METADATA_DATAVERSE_NAME, "Brittle", description, policyParams);
     }
 
     //Basic
-    private static FeedPolicy initializeBasicPolicy() {
+    private static FeedPolicyEntity initializeBasicPolicy() {
         Map<String, String> policyParams = new HashMap<String, String>();
         policyParams.put(FeedPolicyAccessor.SOFT_FAILURE_CONTINUE, "false");
         policyParams.put(FeedPolicyAccessor.SOFT_FAILURE_LOG_DATA, "true");
@@ -85,11 +85,11 @@ public class BuiltinFeedPolicies {
         policyParams.put(FeedPolicyAccessor.AT_LEAST_ONE_SEMANTICS, "false");
 
         String description = "Basic";
-        return new FeedPolicy(MetadataConstants.METADATA_DATAVERSE_NAME, "Basic", description, policyParams);
+        return new FeedPolicyEntity(MetadataConstants.METADATA_DATAVERSE_NAME, "Basic", description, policyParams);
     }
 
     // BasicFT
-    private static FeedPolicy initializeBasicFTPolicy() {
+    private static FeedPolicyEntity initializeBasicFTPolicy() {
         Map<String, String> policyParams = new HashMap<String, String>();
         policyParams.put(FeedPolicyAccessor.SOFT_FAILURE_CONTINUE, "true");
         policyParams.put(FeedPolicyAccessor.SOFT_FAILURE_LOG_DATA, "true");
@@ -103,11 +103,11 @@ public class BuiltinFeedPolicies {
         policyParams.put(FeedPolicyAccessor.THROTTLING_ENABLED, "false");
 
         String description = "Basic Monitored Fault-Tolerant";
-        return new FeedPolicy(MetadataConstants.METADATA_DATAVERSE_NAME, "BasicFT", description, policyParams);
+        return new FeedPolicyEntity(MetadataConstants.METADATA_DATAVERSE_NAME, "BasicFT", description, policyParams);
     }
 
     // AdvancedFT
-    private static FeedPolicy initializeAdvancedFTPolicy() {
+    private static FeedPolicyEntity initializeAdvancedFTPolicy() {
         Map<String, String> policyParams = new HashMap<String, String>();
         policyParams.put(FeedPolicyAccessor.SOFT_FAILURE_CONTINUE, "true");
         policyParams.put(FeedPolicyAccessor.SOFT_FAILURE_LOG_DATA, "true");
@@ -118,11 +118,11 @@ public class BuiltinFeedPolicies {
         policyParams.put(FeedPolicyAccessor.AT_LEAST_ONE_SEMANTICS, "true");
 
         String description = "Basic Monitored Fault-Tolerant with at least once semantics";
-        return new FeedPolicy(MetadataConstants.METADATA_DATAVERSE_NAME, "AdvancedFT", description, policyParams);
+        return new FeedPolicyEntity(MetadataConstants.METADATA_DATAVERSE_NAME, "AdvancedFT", description, policyParams);
     }
 
     // AdvancedFT_Discard
-    private static FeedPolicy initializeAdvancedFTDiscardPolicy() {
+    private static FeedPolicyEntity initializeAdvancedFTDiscardPolicy() {
         Map<String, String> policyParams = new HashMap<String, String>();
         policyParams.put(FeedPolicyAccessor.SOFT_FAILURE_CONTINUE, "true");
         policyParams.put(FeedPolicyAccessor.SOFT_FAILURE_LOG_DATA, "true");
@@ -133,14 +133,14 @@ public class BuiltinFeedPolicies {
         policyParams.put(FeedPolicyAccessor.MAX_FRACTION_DISCARD, "100");
         policyParams.put(FeedPolicyAccessor.TIME_TRACKING, "false");
         policyParams.put(FeedPolicyAccessor.LOGGING_STATISTICS, "true");
-       
+
         String description = "AdvancedFT 100% Discard during congestion";
-        return new FeedPolicy(MetadataConstants.METADATA_DATAVERSE_NAME, "AdvancedFT_Discard", description,
+        return new FeedPolicyEntity(MetadataConstants.METADATA_DATAVERSE_NAME, "AdvancedFT_Discard", description,
                 policyParams);
     }
 
     // AdvancedFT_Spill
-    private static FeedPolicy initializeAdvancedFTSpillPolicy() {
+    private static FeedPolicyEntity initializeAdvancedFTSpillPolicy() {
         Map<String, String> policyParams = new HashMap<String, String>();
         policyParams.put(FeedPolicyAccessor.SOFT_FAILURE_CONTINUE, "true");
         policyParams.put(FeedPolicyAccessor.SOFT_FAILURE_LOG_DATA, "true");
@@ -152,11 +152,11 @@ public class BuiltinFeedPolicies {
         policyParams.put(FeedPolicyAccessor.TIME_TRACKING, "true");
 
         String description = "AdvancedFT 100% Discard during congestion";
-        return new FeedPolicy(MetadataConstants.METADATA_DATAVERSE_NAME, "AdvancedFT_Spill", description, policyParams);
+        return new FeedPolicyEntity(MetadataConstants.METADATA_DATAVERSE_NAME, "AdvancedFT_Spill", description, policyParams);
     }
 
     // AdvancedFT_Spill
-    private static FeedPolicy initializeAdvancedFTThrottlePolicy() {
+    private static FeedPolicyEntity initializeAdvancedFTThrottlePolicy() {
         Map<String, String> policyParams = new HashMap<String, String>();
         policyParams.put(FeedPolicyAccessor.SOFT_FAILURE_CONTINUE, "true");
         policyParams.put(FeedPolicyAccessor.SOFT_FAILURE_LOG_DATA, "true");
@@ -169,12 +169,12 @@ public class BuiltinFeedPolicies {
         policyParams.put(FeedPolicyAccessor.THROTTLING_ENABLED, "true");
 
         String description = "AdvancedFT Throttle during congestion";
-        return new FeedPolicy(MetadataConstants.METADATA_DATAVERSE_NAME, "AdvancedFT_Throttle", description,
+        return new FeedPolicyEntity(MetadataConstants.METADATA_DATAVERSE_NAME, "AdvancedFT_Throttle", description,
                 policyParams);
     }
 
     // AdvancedFT_Elastic
-    private static FeedPolicy initializeAdvancedFTElasticPolicy() {
+    private static FeedPolicyEntity initializeAdvancedFTElasticPolicy() {
         Map<String, String> policyParams = new HashMap<String, String>();
         policyParams.put(FeedPolicyAccessor.SOFT_FAILURE_CONTINUE, "true");
         policyParams.put(FeedPolicyAccessor.SOFT_FAILURE_LOG_DATA, "true");
@@ -185,7 +185,7 @@ public class BuiltinFeedPolicies {
         policyParams.put(FeedPolicyAccessor.LOGGING_STATISTICS, "true");
 
         String description = "Basic Monitored Fault-Tolerant Elastic";
-        return new FeedPolicy(MetadataConstants.METADATA_DATAVERSE_NAME, "AdvancedFT_Elastic", description,
+        return new FeedPolicyEntity(MetadataConstants.METADATA_DATAVERSE_NAME, "AdvancedFT_Elastic", description,
                 policyParams);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/CollectTransformFeedFrameWriter.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/CollectTransformFeedFrameWriter.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/CollectTransformFeedFrameWriter.java
deleted file mode 100644
index 30b369a..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/CollectTransformFeedFrameWriter.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.nio.ByteBuffer;
-
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.api.IFeedOperatorOutputSideHandler;
-import org.apache.asterix.common.feeds.api.ISubscribableRuntime;
-import org.apache.hyracks.api.comm.IFrame;
-import org.apache.hyracks.api.comm.IFrameWriter;
-import org.apache.hyracks.api.comm.VSizeFrame;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import org.apache.hyracks.dataflow.common.comm.util.FrameUtils;
-
-public class CollectTransformFeedFrameWriter implements IFeedOperatorOutputSideHandler {
-
-    private final FeedConnectionId connectionId;
-    private IFrameWriter downstreamWriter;
-    private final FrameTupleAccessor inputFrameTupleAccessor;
-    private final FrameTupleAppender tupleAppender;
-    private final IFrame frame;
-
-    private ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(1);
-
-    public CollectTransformFeedFrameWriter(IHyracksTaskContext ctx, IFrameWriter downstreamWriter,
-            ISubscribableRuntime sourceRuntime, RecordDescriptor outputRecordDescriptor, FeedConnectionId connectionId)
-            throws HyracksDataException {
-        this.downstreamWriter = downstreamWriter;
-        RecordDescriptor inputRecordDescriptor = sourceRuntime.getRecordDescriptor();
-        inputFrameTupleAccessor = new FrameTupleAccessor(inputRecordDescriptor);
-        tupleAppender = new FrameTupleAppender();
-        frame = new VSizeFrame(ctx);
-        tupleAppender.reset(frame, true);
-        this.connectionId = connectionId;
-    }
-
-    @Override
-    public void open() throws HyracksDataException {
-        downstreamWriter.open();
-    }
-
-    @Override
-    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        inputFrameTupleAccessor.reset(buffer);
-        int nTuple = inputFrameTupleAccessor.getTupleCount();
-        for (int t = 0; t < nTuple; t++) {
-            tupleBuilder.addField(inputFrameTupleAccessor, t, 0);
-            appendTupleToFrame();
-            tupleBuilder.reset();
-        }
-    }
-
-    private void appendTupleToFrame() throws HyracksDataException {
-        if (!tupleAppender.append(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray(), 0,
-                tupleBuilder.getSize())) {
-            FrameUtils.flushFrame(frame.getBuffer(), downstreamWriter);
-            tupleAppender.reset(frame, true);
-            if (!tupleAppender.append(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray(), 0,
-                    tupleBuilder.getSize())) {
-                throw new IllegalStateException();
-            }
-        }
-    }
-
-    @Override
-    public void fail() throws HyracksDataException {
-        downstreamWriter.fail();
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        downstreamWriter.close();
-    }
-
-    @Override
-    public FeedId getFeedId() {
-        return connectionId.getFeedId();
-    }
-
-    @Override
-    public Type getType() {
-        return Type.COLLECT_TRANSFORM_FEED_OUTPUT_HANDLER;
-    }
-
-    public IFrameWriter getDownstreamWriter() {
-        return downstreamWriter;
-    }
-
-    public FeedConnectionId getConnectionId() {
-        return connectionId;
-    }
-
-    public void reset(IFrameWriter writer) {
-        this.downstreamWriter = writer;
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/ExternalDataScanOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/ExternalDataScanOperatorDescriptor.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/ExternalDataScanOperatorDescriptor.java
deleted file mode 100644
index fee99d8..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/ExternalDataScanOperatorDescriptor.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import org.apache.asterix.common.feeds.api.IDataSourceAdapter;
-import org.apache.asterix.external.api.IAdapterFactory;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.dataflow.IOperatorNodePushable;
-import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.job.JobSpecification;
-import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-import org.apache.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
-
-/*
- * A single activity operator that provides the functionality of scanning data using an
- * instance of the configured adapter.
- */
-public class ExternalDataScanOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    private IAdapterFactory adapterFactory;
-
-    public ExternalDataScanOperatorDescriptor(JobSpecification spec, RecordDescriptor rDesc,
-            IAdapterFactory dataSourceAdapterFactory) {
-        super(spec, 0, 1);
-        recordDescriptors[0] = rDesc;
-        this.adapterFactory = dataSourceAdapterFactory;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions)
-                    throws HyracksDataException {
-
-        return new AbstractUnaryOutputSourceOperatorNodePushable() {
-
-            @Override
-            public void initialize() throws HyracksDataException {
-                IDataSourceAdapter adapter = null;
-                try {
-                    writer.open();
-                    adapter = adapterFactory.createAdapter(ctx, partition);
-                    adapter.start(partition, writer);
-                } catch (Throwable th) {
-                    writer.fail();
-                    throw new HyracksDataException(th);
-                } finally {
-                    writer.close();
-                }
-            }
-        };
-
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedActivityIdFactory.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedActivityIdFactory.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedActivityIdFactory.java
deleted file mode 100644
index a0a4af9..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedActivityIdFactory.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.util.concurrent.atomic.AtomicInteger;
-
-public class FeedActivityIdFactory {
-    private static AtomicInteger id = new AtomicInteger();
-    private static boolean isInitialized = false;
-
-    public static boolean isInitialized() {
-        return isInitialized;
-    }
-
-    public static void initialize(int initialId) {
-        id.set(initialId);
-        isInitialized = true;
-    }
-
-    public static int generateFeedActivityId() {
-        return id.incrementAndGet();
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedCollectOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedCollectOperatorDescriptor.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedCollectOperatorDescriptor.java
deleted file mode 100644
index 715b68b..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedCollectOperatorDescriptor.java
+++ /dev/null
@@ -1,170 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.IngestionRuntime;
-import org.apache.asterix.common.feeds.SubscribableFeedRuntimeId;
-import org.apache.asterix.common.feeds.api.IFeedLifecycleListener.ConnectionLocation;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.asterix.common.feeds.api.IFeedSubscriptionManager;
-import org.apache.asterix.common.feeds.api.ISubscribableRuntime;
-import org.apache.asterix.om.types.ARecordType;
-import org.apache.asterix.om.types.IAType;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.dataflow.IOperatorNodePushable;
-import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.job.JobSpecification;
-import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-
-/**
- * FeedCollectOperatorDescriptor is responsible for ingesting data from an external source. This
- * operator uses a user specified for a built-in adaptor for retrieving data from the external
- * data source.
- */
-public class FeedCollectOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-    private static final Logger LOGGER = Logger.getLogger(FeedCollectOperatorDescriptor.class.getName());
-
-    /** The type associated with the ADM data output from the feed adaptor */
-    private final IAType outputType;
-
-    /** unique identifier for a feed instance. */
-    private final FeedConnectionId connectionId;
-
-    /** Map representation of policy parameters */
-    private final Map<String, String> feedPolicyProperties;
-
-    /** The (singleton) instance of {@code IFeedIngestionManager} **/
-    private IFeedSubscriptionManager subscriptionManager;
-
-    /** The source feed from which the feed derives its data from. **/
-    private final FeedId sourceFeedId;
-
-    /** The subscription location at which the recipient feed receives tuples from the source feed **/
-    private final ConnectionLocation subscriptionLocation;
-
-    public FeedCollectOperatorDescriptor(JobSpecification spec, FeedConnectionId feedConnectionId, FeedId sourceFeedId,
-            ARecordType atype, RecordDescriptor rDesc, Map<String, String> feedPolicyProperties,
-            ConnectionLocation subscriptionLocation) {
-        super(spec, 0, 1);
-        recordDescriptors[0] = rDesc;
-        this.outputType = atype;
-        this.connectionId = feedConnectionId;
-        this.feedPolicyProperties = feedPolicyProperties;
-        this.sourceFeedId = sourceFeedId;
-        this.subscriptionLocation = subscriptionLocation;
-    }
-
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
-            throws HyracksDataException {
-        IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
-                .getApplicationContext().getApplicationObject();
-        this.subscriptionManager = runtimeCtx.getFeedManager().getFeedSubscriptionManager();
-        ISubscribableRuntime sourceRuntime = null;
-        IOperatorNodePushable nodePushable = null;
-        switch (subscriptionLocation) {
-            case SOURCE_FEED_INTAKE_STAGE:
-                try {
-                    SubscribableFeedRuntimeId feedSubscribableRuntimeId = new SubscribableFeedRuntimeId(sourceFeedId,
-                            FeedRuntimeType.INTAKE, partition);
-                    sourceRuntime = getIntakeRuntime(feedSubscribableRuntimeId);
-                    if (sourceRuntime == null) {
-                        throw new HyracksDataException("Source intake task not found for source feed id "
-                                + sourceFeedId);
-                    }
-                    nodePushable = new FeedCollectOperatorNodePushable(ctx, sourceFeedId, connectionId,
-                            feedPolicyProperties, partition, nPartitions, sourceRuntime);
-
-                } catch (Exception exception) {
-                    if (LOGGER.isLoggable(Level.SEVERE)) {
-                        LOGGER.severe("Initialization of the feed adaptor failed with exception " + exception);
-                    }
-                    throw new HyracksDataException("Initialization of the feed adapter failed", exception);
-                }
-                break;
-            case SOURCE_FEED_COMPUTE_STAGE:
-                SubscribableFeedRuntimeId feedSubscribableRuntimeId = new SubscribableFeedRuntimeId(sourceFeedId,
-                        FeedRuntimeType.COMPUTE, partition);
-                sourceRuntime = (ISubscribableRuntime) subscriptionManager
-                        .getSubscribableRuntime(feedSubscribableRuntimeId);
-                if (sourceRuntime == null) {
-                    throw new HyracksDataException("Source compute task not found for source feed id " + sourceFeedId
-                            + " " + FeedRuntimeType.COMPUTE + "[" + partition + "]");
-                }
-                nodePushable = new FeedCollectOperatorNodePushable(ctx, sourceFeedId, connectionId,
-                        feedPolicyProperties, partition, nPartitions, sourceRuntime);
-                break;
-        }
-        return nodePushable;
-    }
-
-    public FeedConnectionId getFeedConnectionId() {
-        return connectionId;
-    }
-
-    public Map<String, String> getFeedPolicyProperties() {
-        return feedPolicyProperties;
-    }
-
-    public IAType getOutputType() {
-        return outputType;
-    }
-
-    public RecordDescriptor getRecordDescriptor() {
-        return recordDescriptors[0];
-    }
-
-    public FeedId getSourceFeedId() {
-        return sourceFeedId;
-    }
-
-    private IngestionRuntime getIntakeRuntime(SubscribableFeedRuntimeId subscribableRuntimeId) {
-        int waitCycleCount = 0;
-        ISubscribableRuntime ingestionRuntime = subscriptionManager.getSubscribableRuntime(subscribableRuntimeId);
-        while (ingestionRuntime == null && waitCycleCount < 10) {
-            try {
-                Thread.sleep(2000);
-                waitCycleCount++;
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("waiting to obtain ingestion runtime for subscription " + subscribableRuntimeId);
-                }
-            } catch (InterruptedException e) {
-                e.printStackTrace();
-                break;
-            }
-            ingestionRuntime = subscriptionManager.getSubscribableRuntime(subscribableRuntimeId);
-        }
-        return (IngestionRuntime) ingestionRuntime;
-    }
-
-    public ConnectionLocation getSubscriptionLocation() {
-        return subscriptionLocation;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedCollectOperatorNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedCollectOperatorNodePushable.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedCollectOperatorNodePushable.java
deleted file mode 100644
index 8f9c8f3..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedCollectOperatorNodePushable.java
+++ /dev/null
@@ -1,208 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
-import org.apache.asterix.common.feeds.CollectionRuntime;
-import org.apache.asterix.common.feeds.FeedCollectRuntimeInputHandler;
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedFrameCollector.State;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.FeedPolicyAccessor;
-import org.apache.asterix.common.feeds.FeedRuntimeId;
-import org.apache.asterix.common.feeds.FeedRuntimeInputHandler;
-import org.apache.asterix.common.feeds.SubscribableFeedRuntimeId;
-import org.apache.asterix.common.feeds.api.IFeedManager;
-import org.apache.asterix.common.feeds.api.IFeedOperatorOutputSideHandler;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.Mode;
-import org.apache.asterix.common.feeds.api.ISubscribableRuntime;
-import org.apache.hyracks.api.comm.IFrameWriter;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import org.apache.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
-
-/**
- * The runtime for @see{FeedIntakeOperationDescriptor}
- */
-public class FeedCollectOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
-
-    private static Logger LOGGER = Logger.getLogger(FeedCollectOperatorNodePushable.class.getName());
-
-    private final int partition;
-    private final FeedConnectionId connectionId;
-    private final Map<String, String> feedPolicy;
-    private final FeedPolicyAccessor policyAccessor;
-    private final IFeedManager feedManager;
-    private final ISubscribableRuntime sourceRuntime;
-    private final IHyracksTaskContext ctx;
-    private final int nPartitions;
-
-    private RecordDescriptor outputRecordDescriptor;
-    private FeedRuntimeInputHandler inputSideHandler;
-    private CollectionRuntime collectRuntime;
-
-    public FeedCollectOperatorNodePushable(IHyracksTaskContext ctx, FeedId sourceFeedId,
-            FeedConnectionId feedConnectionId, Map<String, String> feedPolicy, int partition, int nPartitions,
-            ISubscribableRuntime sourceRuntime) {
-        this.ctx = ctx;
-        this.partition = partition;
-        this.nPartitions = nPartitions;
-        this.connectionId = feedConnectionId;
-        this.sourceRuntime = sourceRuntime;
-        this.feedPolicy = feedPolicy;
-        policyAccessor = new FeedPolicyAccessor(feedPolicy);
-        IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
-                .getApplicationContext().getApplicationObject();
-        this.feedManager = runtimeCtx.getFeedManager();
-    }
-
-    @Override
-    public void initialize() throws HyracksDataException {
-        try {
-            outputRecordDescriptor = recordDesc;
-            FeedRuntimeType sourceRuntimeType = ((SubscribableFeedRuntimeId) sourceRuntime.getRuntimeId())
-                    .getFeedRuntimeType();
-            switch (sourceRuntimeType) {
-                case INTAKE:
-                    handleCompleteConnection();
-                    break;
-                case COMPUTE:
-                    handlePartialConnection();
-                    break;
-                default:
-                    throw new IllegalStateException("Invalid source type " + sourceRuntimeType);
-            }
-
-            State state = collectRuntime.waitTillCollectionOver();
-            if (state.equals(State.FINISHED)) {
-                feedManager.getFeedConnectionManager().deRegisterFeedRuntime(connectionId,
-                        collectRuntime.getRuntimeId());
-                writer.close();
-                inputSideHandler.close();
-            } else if (state.equals(State.HANDOVER)) {
-                inputSideHandler.setMode(Mode.STALL);
-                writer.close();
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Ending Collect Operator, the input side handler is now in " + Mode.STALL
-                            + " and the output writer " + writer + " has been closed ");
-                }
-            }
-        } catch (InterruptedException ie) {
-            handleInterruptedException(ie);
-        } catch (Exception e) {
-            e.printStackTrace();
-            throw new HyracksDataException(e);
-        }
-    }
-
-    private void handleCompleteConnection() throws Exception {
-        FeedRuntimeId runtimeId = new FeedRuntimeId(FeedRuntimeType.COLLECT, partition,
-                FeedRuntimeId.DEFAULT_OPERAND_ID);
-        collectRuntime = (CollectionRuntime) feedManager.getFeedConnectionManager().getFeedRuntime(connectionId,
-                runtimeId);
-        if (collectRuntime == null) {
-            beginNewFeed(runtimeId);
-        } else {
-            reviveOldFeed();
-        }
-    }
-
-    private void beginNewFeed(FeedRuntimeId runtimeId) throws Exception {
-        writer.open();
-        IFrameWriter outputSideWriter = writer;
-        if (((SubscribableFeedRuntimeId) sourceRuntime.getRuntimeId()).getFeedRuntimeType().equals(
-                FeedRuntimeType.COMPUTE)) {
-            outputSideWriter = new CollectTransformFeedFrameWriter(ctx, writer, sourceRuntime, outputRecordDescriptor,
-                    connectionId);
-            this.recordDesc = sourceRuntime.getRecordDescriptor();
-        }
-
-        FrameTupleAccessor tupleAccessor = new FrameTupleAccessor(recordDesc);
-        inputSideHandler = new FeedCollectRuntimeInputHandler(ctx, connectionId, runtimeId, outputSideWriter, policyAccessor,
-                false,  tupleAccessor, recordDesc,
-                feedManager, nPartitions);
-
-        collectRuntime = new CollectionRuntime(connectionId, runtimeId, inputSideHandler, outputSideWriter,
-                sourceRuntime, feedPolicy);
-        feedManager.getFeedConnectionManager().registerFeedRuntime(connectionId, collectRuntime);
-        sourceRuntime.subscribeFeed(policyAccessor, collectRuntime);
-    }
-
-    private void reviveOldFeed() throws HyracksDataException {
-        writer.open();
-        collectRuntime.getFrameCollector().setState(State.ACTIVE);
-        inputSideHandler = collectRuntime.getInputHandler();
-
-        IFrameWriter innerWriter = inputSideHandler.getCoreOperator();
-        if (innerWriter instanceof CollectTransformFeedFrameWriter) {
-            ((CollectTransformFeedFrameWriter) innerWriter).reset(this.writer);
-        } else {
-            inputSideHandler.setCoreOperator(writer);
-        }
-
-        inputSideHandler.setMode(Mode.PROCESS);
-    }
-
-    private void handlePartialConnection() throws Exception {
-        FeedRuntimeId runtimeId = new FeedRuntimeId(FeedRuntimeType.COMPUTE_COLLECT, partition,
-                FeedRuntimeId.DEFAULT_OPERAND_ID);
-        writer.open();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Beginning new feed (from existing partial connection):" + connectionId);
-        }
-        IFeedOperatorOutputSideHandler wrapper = new CollectTransformFeedFrameWriter(ctx, writer, sourceRuntime,
-                outputRecordDescriptor, connectionId);
-
-        inputSideHandler = new FeedRuntimeInputHandler(ctx, connectionId, runtimeId, wrapper, policyAccessor, false,
-                 new FrameTupleAccessor(recordDesc), recordDesc, feedManager,
-                nPartitions);
-
-        collectRuntime = new CollectionRuntime(connectionId, runtimeId, inputSideHandler, wrapper, sourceRuntime,
-                feedPolicy);
-        feedManager.getFeedConnectionManager().registerFeedRuntime(connectionId, collectRuntime);
-        recordDesc = sourceRuntime.getRecordDescriptor();
-        sourceRuntime.subscribeFeed(policyAccessor, collectRuntime);
-    }
-
-    private void handleInterruptedException(InterruptedException ie) throws HyracksDataException {
-        if (policyAccessor.continueOnHardwareFailure()) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Continuing on failure as per feed policy, switching to " + Mode.STALL
-                        + " until failure is resolved");
-            }
-            inputSideHandler.setMode(Mode.STALL);
-        } else {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Failure during feed ingestion. Deregistering feed runtime " + collectRuntime
-                        + " as feed is not configured to handle failures");
-            }
-            feedManager.getFeedConnectionManager().deRegisterFeedRuntime(connectionId, collectRuntime.getRuntimeId());
-            writer.close();
-            throw new HyracksDataException(ie);
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedConnectionManager.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedConnectionManager.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedConnectionManager.java
deleted file mode 100644
index 7356c94..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedConnectionManager.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedRuntime;
-import org.apache.asterix.common.feeds.FeedRuntimeId;
-import org.apache.asterix.common.feeds.FeedRuntimeManager;
-import org.apache.asterix.common.feeds.api.IFeedConnectionManager;
-
-/**
- * An implementation of the IFeedManager interface.
- * Provider necessary central repository for registering/retrieving
- * artifacts/services associated with a feed.
- */
-public class FeedConnectionManager implements IFeedConnectionManager {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedConnectionManager.class.getName());
-
-    private Map<FeedConnectionId, FeedRuntimeManager> feedRuntimeManagers = new HashMap<FeedConnectionId, FeedRuntimeManager>();
-    private final String nodeId;
-
-    public FeedConnectionManager(String nodeId) {
-        this.nodeId = nodeId;
-    }
-
-    public FeedRuntimeManager getFeedRuntimeManager(FeedConnectionId feedId) {
-        return feedRuntimeManagers.get(feedId);
-    }
-
-    @Override
-    public void deregisterFeed(FeedConnectionId feedId) {
-        try {
-            FeedRuntimeManager mgr = feedRuntimeManagers.get(feedId);
-            if (mgr != null) {
-                mgr.close();
-                feedRuntimeManagers.remove(feedId);
-            }
-        } catch (Exception e) {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Exception in closing feed runtime" + e.getMessage());
-            }
-        }
-
-    }
-
-    @Override
-    public synchronized void registerFeedRuntime(FeedConnectionId connectionId, FeedRuntime feedRuntime)
-            throws Exception {
-        FeedRuntimeManager runtimeMgr = feedRuntimeManagers.get(connectionId);
-        if (runtimeMgr == null) {
-            runtimeMgr = new FeedRuntimeManager(connectionId, this);
-            feedRuntimeManagers.put(connectionId, runtimeMgr);
-        }
-        runtimeMgr.registerFeedRuntime(feedRuntime.getRuntimeId(), feedRuntime);
-    }
-
-    @Override
-    public void deRegisterFeedRuntime(FeedConnectionId connectionId, FeedRuntimeId feedRuntimeId) {
-        FeedRuntimeManager runtimeMgr = feedRuntimeManagers.get(connectionId);
-        if (runtimeMgr != null) {
-            runtimeMgr.deregisterFeedRuntime(feedRuntimeId);
-        }
-    }
-
-    @Override
-    public FeedRuntime getFeedRuntime(FeedConnectionId connectionId, FeedRuntimeId feedRuntimeId) {
-        FeedRuntimeManager runtimeMgr = feedRuntimeManagers.get(connectionId);
-        return runtimeMgr != null ? runtimeMgr.getFeedRuntime(feedRuntimeId) : null;
-    }
-
-    @Override
-    public String toString() {
-        return "FeedManager " + "[" + nodeId + "]";
-    }
-
-    @Override
-    public List<FeedRuntimeId> getRegisteredRuntimes() {
-        List<FeedRuntimeId> runtimes = new ArrayList<FeedRuntimeId>();
-        for (Entry<FeedConnectionId, FeedRuntimeManager> entry : feedRuntimeManagers.entrySet()) {
-            runtimes.addAll(entry.getValue().getFeedRuntimes());
-        }
-        return runtimes;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedFrameTupleDecorator.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedFrameTupleDecorator.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedFrameTupleDecorator.java
deleted file mode 100644
index 6ee14d8..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedFrameTupleDecorator.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.util.concurrent.atomic.AtomicInteger;
-
-import org.apache.asterix.builders.IARecordBuilder;
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.feeds.FeedConstants.StatisticsConstants;
-import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
-import org.apache.asterix.om.base.AInt32;
-import org.apache.asterix.om.base.AInt64;
-import org.apache.asterix.om.base.AMutableInt32;
-import org.apache.asterix.om.base.AMutableInt64;
-import org.apache.asterix.om.base.AMutableString;
-import org.apache.asterix.om.base.AString;
-import org.apache.asterix.om.types.BuiltinType;
-import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
-
-public class FeedFrameTupleDecorator {
-
-    private AMutableString aString = new AMutableString("");
-    private AMutableInt64 aInt64 = new AMutableInt64(0);
-    private AMutableInt32 aInt32 = new AMutableInt32(0);
-    private AtomicInteger tupleId;
-
-    @SuppressWarnings("unchecked")
-    private static ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
-            .getSerializerDeserializer(BuiltinType.ASTRING);
-    @SuppressWarnings("unchecked")
-    private static ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
-            .getSerializerDeserializer(BuiltinType.AINT32);
-    @SuppressWarnings("unchecked")
-    private static ISerializerDeserializer<AInt64> int64Serde = AqlSerializerDeserializerProvider.INSTANCE
-            .getSerializerDeserializer(BuiltinType.AINT64);
-
-    private final int partition;
-    private final ArrayBackedValueStorage attrNameStorage;
-    private final ArrayBackedValueStorage attrValueStorage;
-
-    public FeedFrameTupleDecorator(int partition) {
-        this.tupleId = new AtomicInteger(0);
-        this.partition = partition;
-        this.attrNameStorage = new ArrayBackedValueStorage();
-        this.attrValueStorage = new ArrayBackedValueStorage();
-    }
-
-    public void addLongAttribute(String attrName, long attrValue, IARecordBuilder recordBuilder)
-            throws HyracksDataException, AsterixException {
-        attrNameStorage.reset();
-        aString.setValue(attrName);
-        stringSerde.serialize(aString, attrNameStorage.getDataOutput());
-
-        attrValueStorage.reset();
-        aInt64.setValue(attrValue);
-        int64Serde.serialize(aInt64, attrValueStorage.getDataOutput());
-
-        recordBuilder.addField(attrNameStorage, attrValueStorage);
-    }
-
-    public void addIntegerAttribute(String attrName, int attrValue, IARecordBuilder recordBuilder)
-            throws HyracksDataException, AsterixException {
-        attrNameStorage.reset();
-        aString.setValue(attrName);
-        stringSerde.serialize(aString, attrNameStorage.getDataOutput());
-
-        attrValueStorage.reset();
-        aInt32.setValue(attrValue);
-        int32Serde.serialize(aInt32, attrValueStorage.getDataOutput());
-
-        recordBuilder.addField(attrNameStorage, attrValueStorage);
-    }
-
-    public void addTupleId(IARecordBuilder recordBuilder) throws HyracksDataException, AsterixException {
-        addIntegerAttribute(StatisticsConstants.INTAKE_TUPLEID, tupleId.incrementAndGet(), recordBuilder);
-    }
-
-    public void addIntakePartition(IARecordBuilder recordBuilder) throws HyracksDataException, AsterixException {
-        addIntegerAttribute(StatisticsConstants.INTAKE_PARTITION, partition, recordBuilder);
-    }
-
-    public void addIntakeTimestamp(IARecordBuilder recordBuilder) throws HyracksDataException, AsterixException {
-        addLongAttribute(StatisticsConstants.INTAKE_TIMESTAMP, System.currentTimeMillis(), recordBuilder);
-    }
-
-    public void addStoreTimestamp(IARecordBuilder recordBuilder) throws HyracksDataException, AsterixException {
-        addLongAttribute(StatisticsConstants.STORE_TIMESTAMP, System.currentTimeMillis(), recordBuilder);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedIntakeOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedIntakeOperatorDescriptor.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedIntakeOperatorDescriptor.java
deleted file mode 100644
index 54c9af5..0000000
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedIntakeOperatorDescriptor.java
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.metadata.feeds;
-
-import java.util.Map;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.FeedPolicyAccessor;
-import org.apache.asterix.common.feeds.IngestionRuntime;
-import org.apache.asterix.common.feeds.SubscribableFeedRuntimeId;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.asterix.common.feeds.api.IFeedSubscriptionManager;
-import org.apache.asterix.external.api.IAdapterFactory;
-import org.apache.asterix.external.library.ExternalLibraryManager;
-import org.apache.asterix.metadata.entities.PrimaryFeed;
-import org.apache.asterix.om.types.ARecordType;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.dataflow.IOperatorNodePushable;
-import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.job.JobSpecification;
-import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-
-/**
- * An operator responsible for establishing connection with external data source and parsing,
- * translating the received content.It uses an instance of feed adaptor to perform these functions.
- */
-public class FeedIntakeOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    private static final Logger LOGGER = Logger.getLogger(FeedIntakeOperatorDescriptor.class.getName());
-
-    /** The unique identifier of the feed that is being ingested. **/
-    private final FeedId feedId;
-
-    private final FeedPolicyAccessor policyAccessor;
-
-    /** The adaptor factory that is used to create an instance of the feed adaptor **/
-    private IAdapterFactory adaptorFactory;
-
-    /** The library that contains the adapter in use. **/
-    private String adaptorLibraryName;
-
-    /**
-     * The adapter factory class that is used to create an instance of the feed adapter.
-     * This value is used only in the case of external adapters.
-     **/
-    private String adaptorFactoryClassName;
-
-    /** The configuration parameters associated with the adapter. **/
-    private Map<String, String> adaptorConfiguration;
-
-    private ARecordType adapterOutputType;
-
-    public FeedIntakeOperatorDescriptor(JobSpecification spec, PrimaryFeed primaryFeed, IAdapterFactory adapterFactory,
-            ARecordType adapterOutputType, FeedPolicyAccessor policyAccessor) {
-        super(spec, 0, 1);
-        this.feedId = new FeedId(primaryFeed.getDataverseName(), primaryFeed.getFeedName());
-        this.adaptorFactory = adapterFactory;
-        this.adapterOutputType = adapterOutputType;
-        this.policyAccessor = policyAccessor;
-    }
-
-    public FeedIntakeOperatorDescriptor(JobSpecification spec, PrimaryFeed primaryFeed, String adapterLibraryName,
-            String adapterFactoryClassName, ARecordType adapterOutputType, FeedPolicyAccessor policyAccessor) {
-        super(spec, 0, 1);
-        this.feedId = new FeedId(primaryFeed.getDataverseName(), primaryFeed.getFeedName());
-        this.adaptorFactoryClassName = adapterFactoryClassName;
-        this.adaptorLibraryName = adapterLibraryName;
-        this.adaptorConfiguration = primaryFeed.getAdaptorConfiguration();
-        this.adapterOutputType = adapterOutputType;
-        this.policyAccessor = policyAccessor;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
-        IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
-                .getApplicationContext().getApplicationObject();
-        IFeedSubscriptionManager feedSubscriptionManager = runtimeCtx.getFeedManager().getFeedSubscriptionManager();
-        SubscribableFeedRuntimeId feedIngestionId = new SubscribableFeedRuntimeId(feedId, FeedRuntimeType.INTAKE,
-                partition);
-        IngestionRuntime ingestionRuntime = (IngestionRuntime) feedSubscriptionManager
-                .getSubscribableRuntime(feedIngestionId);
-        if (adaptorFactory == null) {
-            try {
-                adaptorFactory = createExtenralAdapterFactory(ctx, partition);
-            } catch (Exception exception) {
-                throw new HyracksDataException(exception);
-            }
-
-        }
-        return new FeedIntakeOperatorNodePushable(ctx, feedId, adaptorFactory, partition, ingestionRuntime,
-                policyAccessor);
-    }
-
-    private IAdapterFactory createExtenralAdapterFactory(IHyracksTaskContext ctx, int partition) throws Exception {
-        IAdapterFactory adapterFactory = null;
-        ClassLoader classLoader = ExternalLibraryManager.getLibraryClassLoader(feedId.getDataverse(),
-                adaptorLibraryName);
-        if (classLoader != null) {
-            adapterFactory = ((IAdapterFactory) (classLoader.loadClass(adaptorFactoryClassName).newInstance()));
-            adapterFactory.configure(adaptorConfiguration, adapterOutputType);
-        } else {
-            String message = "Unable to create adapter as class loader not configured for library " + adaptorLibraryName
-                    + " in dataverse " + feedId.getDataverse();
-            LOGGER.severe(message);
-            throw new IllegalArgumentException(message);
-        }
-        return adapterFactory;
-    }
-
-    public FeedId getFeedId() {
-        return feedId;
-    }
-
-}


[19/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/MonitoredBuffer.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/MonitoredBuffer.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/MonitoredBuffer.java
deleted file mode 100644
index e5a22b5..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/MonitoredBuffer.java
+++ /dev/null
@@ -1,388 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.nio.ByteBuffer;
-import java.util.Map;
-import java.util.Timer;
-import java.util.TimerTask;
-import java.util.logging.Level;
-
-import org.apache.asterix.common.feeds.MonitoredBufferTimerTasks.LogInputOutputRateTask;
-import org.apache.asterix.common.feeds.MonitoredBufferTimerTasks.MonitorInputQueueLengthTimerTask;
-import org.apache.asterix.common.feeds.MonitoredBufferTimerTasks.MonitoreProcessRateTimerTask;
-import org.apache.asterix.common.feeds.MonitoredBufferTimerTasks.MonitoredBufferStorageTimerTask;
-import org.apache.asterix.common.feeds.api.IExceptionHandler;
-import org.apache.asterix.common.feeds.api.IFeedMetricCollector;
-import org.apache.asterix.common.feeds.api.IFeedMetricCollector.MetricType;
-import org.apache.asterix.common.feeds.api.IFeedMetricCollector.ValueType;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.Mode;
-import org.apache.asterix.common.feeds.api.IFrameEventCallback;
-import org.apache.asterix.common.feeds.api.IFrameEventCallback.FrameEvent;
-import org.apache.asterix.common.feeds.api.IFramePostProcessor;
-import org.apache.asterix.common.feeds.api.IFramePreprocessor;
-import org.apache.hyracks.api.comm.IFrameWriter;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-
-public abstract class MonitoredBuffer extends MessageReceiver<DataBucket> {
-
-    protected static final long LOG_INPUT_OUTPUT_RATE_FREQUENCY = 5000; // 5 seconds
-    protected static final long INPUT_QUEUE_MEASURE_FREQUENCY = 1000; // 1 second
-    protected static final long PROCESSING_RATE_MEASURE_FREQUENCY = 10000; // 10 seconds
-
-    protected static final int PROCESS_RATE_REFRESH = 2; // refresh processing rate every 10th frame
-
-    protected final IHyracksTaskContext ctx;
-    protected final FeedConnectionId connectionId;
-    protected final FeedRuntimeId runtimeId;
-    protected final FrameTupleAccessor inflowFta;
-    protected final FrameTupleAccessor outflowFta;
-    protected final FeedRuntimeInputHandler inputHandler;
-    protected final IFrameEventCallback callback;
-    protected final Timer timer;
-    private final IExceptionHandler exceptionHandler;
-    protected final FeedPolicyAccessor policyAccessor;
-    protected int nPartitions;
-
-    private IFrameWriter frameWriter;
-    protected IFeedMetricCollector metricCollector;
-    protected boolean monitorProcessingRate = false;
-    protected boolean monitorInputQueueLength = false;
-    protected boolean logInflowOutflowRate = false;
-    protected boolean reportOutflowRate = false;
-    protected boolean reportInflowRate = false;
-
-    protected int inflowReportSenderId = -1;
-    protected int outflowReportSenderId = -1;
-    protected TimerTask monitorInputQueueLengthTask;
-    protected TimerTask processingRateTask;
-    protected TimerTask logInflowOutflowRateTask;
-    protected MonitoredBufferStorageTimerTask storageTimeTrackingRateTask;
-    protected StorageFrameHandler storageFromeHandler;
-
-    protected int processingRate = -1;
-    protected int frameCount = 0;
-    private long avgDelayPersistence = 0;
-    private boolean active;
-    private Map<Integer, Long> tupleTimeStats;
-    IFramePostProcessor postProcessor = null;
-    IFramePreprocessor preProcessor = null;
-
-    public static MonitoredBuffer getMonitoredBuffer(IHyracksTaskContext ctx, FeedRuntimeInputHandler inputHandler,
-            IFrameWriter frameWriter, FrameTupleAccessor fta, RecordDescriptor recordDesc,
-            IFeedMetricCollector metricCollector, FeedConnectionId connectionId, FeedRuntimeId runtimeId,
-            IExceptionHandler exceptionHandler, IFrameEventCallback callback, int nPartitions,
-            FeedPolicyAccessor policyAccessor) {
-        switch (runtimeId.getFeedRuntimeType()) {
-            case COMPUTE:
-                return new ComputeSideMonitoredBuffer(ctx, inputHandler, frameWriter, fta, recordDesc, metricCollector,
-                        connectionId, runtimeId, exceptionHandler, callback, nPartitions, policyAccessor);
-            case STORE:
-                return new StorageSideMonitoredBuffer(ctx, inputHandler, frameWriter, fta, recordDesc, metricCollector,
-                        connectionId, runtimeId, exceptionHandler, callback, nPartitions, policyAccessor);
-            case COLLECT:
-                return new IntakeSideMonitoredBuffer(ctx, inputHandler, frameWriter, fta, recordDesc, metricCollector,
-                        connectionId, runtimeId, exceptionHandler, callback, nPartitions, policyAccessor);
-            default:
-                return new BasicMonitoredBuffer(ctx, inputHandler, frameWriter, fta, recordDesc, metricCollector,
-                        connectionId, runtimeId, exceptionHandler, callback, nPartitions, policyAccessor);
-        }
-    }
-
-    protected MonitoredBuffer(IHyracksTaskContext ctx, FeedRuntimeInputHandler inputHandler, IFrameWriter frameWriter,
-            FrameTupleAccessor fta, RecordDescriptor recordDesc, IFeedMetricCollector metricCollector,
-            FeedConnectionId connectionId, FeedRuntimeId runtimeId, IExceptionHandler exceptionHandler,
-            IFrameEventCallback callback, int nPartitions, FeedPolicyAccessor policyAccessor) {
-        this.ctx = ctx;
-        this.connectionId = connectionId;
-        this.frameWriter = frameWriter;
-        this.inflowFta = new FrameTupleAccessor(recordDesc);
-        this.outflowFta = new FrameTupleAccessor(recordDesc);
-        this.runtimeId = runtimeId;
-        this.metricCollector = metricCollector;
-        this.exceptionHandler = exceptionHandler;
-        this.callback = callback;
-        this.inputHandler = inputHandler;
-        this.timer = new Timer();
-        this.policyAccessor = policyAccessor;
-        this.nPartitions = nPartitions;
-        this.active = true;
-        initializeMonitoring();
-    }
-
-    protected abstract boolean monitorProcessingRate();
-
-    protected abstract boolean logInflowOutflowRate();
-
-    protected abstract boolean reportOutflowRate();
-
-    protected abstract boolean reportInflowRate();
-
-    protected abstract boolean monitorInputQueueLength();
-
-    protected abstract IFramePreprocessor getFramePreProcessor();
-
-    protected abstract IFramePostProcessor getFramePostProcessor();
-
-    protected void initializeMonitoring() {
-        monitorProcessingRate = monitorProcessingRate();
-        monitorInputQueueLength = monitorInputQueueLength();
-        reportInflowRate = reportInflowRate();
-        reportOutflowRate = reportOutflowRate();
-        logInflowOutflowRate = policyAccessor.isLoggingStatisticsEnabled() || logInflowOutflowRate();
-
-        if (monitorProcessingRate && policyAccessor.isElastic()) { // check possibility to scale in
-            this.processingRateTask = new MonitoreProcessRateTimerTask(this, inputHandler.getFeedManager(),
-                    connectionId, nPartitions);
-            this.timer.scheduleAtFixedRate(processingRateTask, 0, PROCESSING_RATE_MEASURE_FREQUENCY);
-        }
-
-        if (monitorInputQueueLength && (policyAccessor.isElastic() || policyAccessor.throttlingEnabled()
-                || policyAccessor.spillToDiskOnCongestion() || policyAccessor.discardOnCongestion())) {
-            this.monitorInputQueueLengthTask = new MonitorInputQueueLengthTimerTask(this, callback);
-            this.timer.scheduleAtFixedRate(monitorInputQueueLengthTask, 0, INPUT_QUEUE_MEASURE_FREQUENCY);
-        }
-
-        if (logInflowOutflowRate || reportInflowRate || reportOutflowRate) {
-            this.logInflowOutflowRateTask = new LogInputOutputRateTask(this, logInflowOutflowRate, reportInflowRate,
-                    reportOutflowRate);
-            this.timer.scheduleAtFixedRate(logInflowOutflowRateTask, 0, LOG_INPUT_OUTPUT_RATE_FREQUENCY);
-            this.inflowReportSenderId = metricCollector.createReportSender(connectionId, runtimeId,
-                    ValueType.INFLOW_RATE, MetricType.RATE);
-            this.outflowReportSenderId = metricCollector.createReportSender(connectionId, runtimeId,
-                    ValueType.OUTFLOW_RATE, MetricType.RATE);
-        }
-    }
-
-    protected void deinitializeMonitoring() {
-        if (monitorInputQueueLengthTask != null) {
-            monitorInputQueueLengthTask.cancel();
-        }
-        if (processingRateTask != null) {
-            processingRateTask.cancel();
-        }
-        if (logInflowOutflowRate || reportInflowRate || reportOutflowRate) {
-            metricCollector.removeReportSender(inflowReportSenderId);
-            metricCollector.removeReportSender(outflowReportSenderId);
-            logInflowOutflowRateTask.cancel();
-        }
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Disabled monitoring for " + this.runtimeId);
-        }
-    }
-
-    protected void postProcessFrame(long startTime, ByteBuffer frame) throws Exception {
-        if (monitorProcessingRate) {
-            frameCount++;
-            if (frameCount % PROCESS_RATE_REFRESH == 0) {
-                long endTime = System.currentTimeMillis();
-                processingRate = (int) ((double) outflowFta.getTupleCount() * 1000 / (endTime - startTime));
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Processing Rate :" + processingRate + " tuples/sec");
-                }
-                frameCount = 0;
-            }
-        }
-
-        if (logInflowOutflowRate || reportOutflowRate) {
-            metricCollector.sendReport(outflowReportSenderId, outflowFta.getTupleCount());
-        }
-
-        postProcessFrame(frame);
-
-    }
-
-    protected void preProcessFrame(ByteBuffer frame) throws Exception {
-        if (postProcessor == null) {
-            preProcessor = getFramePreProcessor();
-        }
-        if (preProcessor != null) {
-            preProcessor.preProcess(frame);
-        }
-    }
-
-    protected void postProcessFrame(ByteBuffer frame) throws Exception {
-        if (postProcessor == null) {
-            postProcessor = getFramePostProcessor();
-        }
-        if (postProcessor != null) {
-            outflowFta.reset(frame);
-            postProcessor.postProcessFrame(frame, outflowFta);
-        }
-    }
-
-    @Override
-    public void sendMessage(DataBucket message) {
-        inbox.add(message);
-    }
-
-    public void sendReport(ByteBuffer frame) {
-        if ((logInflowOutflowRate || reportInflowRate) && !(inputHandler.getMode().equals(Mode.PROCESS_BACKLOG)
-                || inputHandler.getMode().equals(Mode.PROCESS_SPILL))) {
-            inflowFta.reset(frame);
-            metricCollector.sendReport(inflowReportSenderId, inflowFta.getTupleCount());
-        }
-    }
-
-    /** return rate in terms of tuples/sec **/
-    public int getInflowRate() {
-        return metricCollector.getMetric(inflowReportSenderId);
-    }
-
-    /** return rate in terms of tuples/sec **/
-    public int getOutflowRate() {
-        return metricCollector.getMetric(outflowReportSenderId);
-    }
-
-    /** return the number of pending frames from the input queue **/
-    public int getWorkSize() {
-        return inbox.size();
-    }
-
-    /** reset the number of partitions (cardinality) for the runtime **/
-    public void setNumberOfPartitions(int nPartitions) {
-        if (processingRateTask != null) {
-            int currentPartitions = ((MonitoreProcessRateTimerTask) processingRateTask).getNumberOfPartitions();
-            if (currentPartitions != nPartitions) {
-                ((MonitoreProcessRateTimerTask) processingRateTask).setNumberOfPartitions(nPartitions);
-            }
-        }
-    }
-
-    public FeedRuntimeInputHandler getInputHandler() {
-        return inputHandler;
-    }
-
-    public synchronized void close(boolean processPending, boolean disableMonitoring) {
-        super.close(processPending);
-        if (disableMonitoring) {
-            deinitializeMonitoring();
-        }
-        active = false;
-    }
-
-    @Override
-    public synchronized void processMessage(DataBucket message) throws Exception {
-        if (!active) {
-            message.doneReading();
-            return;
-        }
-        switch (message.getContentType()) {
-            case DATA:
-                boolean finishedProcessing = false;
-                ByteBuffer frameReceived = message.getContent();
-                ByteBuffer frameToProcess = null;
-                if (inputHandler.isThrottlingEnabled()) {
-                    inflowFta.reset(frameReceived);
-                    int pRate = getProcessingRate();
-                    int inflowRate = getInflowRate();
-                    if (inflowRate > pRate) {
-                        double retainFraction = (pRate * 0.8 / inflowRate);
-                        frameToProcess = throttleFrame(inflowFta, retainFraction);
-                        inflowFta.reset(frameToProcess);
-                        if (LOGGER.isLoggable(Level.INFO)) {
-                            LOGGER.info("Throttling at fraction " + retainFraction + "inflow rate " + inflowRate
-                                    + " no of tuples remaining " + inflowFta.getTupleCount());
-
-                        }
-                    } else {
-                        frameToProcess = frameReceived;
-                    }
-                } else {
-                    frameToProcess = frameReceived;
-                }
-                outflowFta.reset(frameToProcess);
-                long startTime = 0;
-                while (!finishedProcessing) {
-                    try {
-                        inflowFta.reset(frameToProcess);
-                        startTime = System.currentTimeMillis();
-                        preProcessFrame(frameToProcess);
-                        frameWriter.nextFrame(frameToProcess);
-                        postProcessFrame(startTime, frameToProcess);
-                        finishedProcessing = true;
-                    } catch (Exception e) {
-                        e.printStackTrace();
-                        frameToProcess = exceptionHandler.handleException(e, frameToProcess);
-                        finishedProcessing = true;
-                    }
-                }
-                message.doneReading();
-                break;
-            case EOD:
-                message.doneReading();
-                timer.cancel();
-                callback.frameEvent(FrameEvent.FINISHED_PROCESSING);
-                break;
-            case EOSD:
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Done processing spillage");
-                }
-                message.doneReading();
-                callback.frameEvent(FrameEvent.FINISHED_PROCESSING_SPILLAGE);
-                break;
-
-        }
-    }
-
-    private ByteBuffer throttleFrame(FrameTupleAccessor fta, double retainFraction) throws HyracksDataException {
-        int desiredTuples = (int) (fta.getTupleCount() * retainFraction);
-        return FeedFrameUtil.getSampledFrame(ctx, fta, desiredTuples);
-    }
-
-    public Mode getMode() {
-        return inputHandler.getMode();
-    }
-
-    public FeedRuntimeId getRuntimeId() {
-        return runtimeId;
-    }
-
-    public void setFrameWriter(IFrameWriter frameWriter) {
-        this.frameWriter = frameWriter;
-    }
-
-    public void reset() {
-        active = true;
-        if (logInflowOutflowRate) {
-            metricCollector.resetReportSender(inflowReportSenderId);
-            metricCollector.resetReportSender(outflowReportSenderId);
-        }
-    }
-
-    public int getProcessingRate() {
-        return processingRate;
-    }
-
-    public Map<Integer, Long> getTupleTimeStats() {
-        return tupleTimeStats;
-    }
-
-    public long getAvgDelayRecordPersistence() {
-        return avgDelayPersistence;
-    }
-
-    public MonitoredBufferStorageTimerTask getStorageTimeTrackingRateTask() {
-        return storageTimeTrackingRateTask;
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/MonitoredBufferTimerTasks.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/MonitoredBufferTimerTasks.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/MonitoredBufferTimerTasks.java
deleted file mode 100644
index 3434b4f..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/MonitoredBufferTimerTasks.java
+++ /dev/null
@@ -1,294 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.TimerTask;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.config.AsterixFeedProperties;
-import org.apache.asterix.common.feeds.api.IFeedManager;
-import org.apache.asterix.common.feeds.api.IFeedMessageService;
-import org.apache.asterix.common.feeds.api.IFeedMetricCollector.ValueType;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.Mode;
-import org.apache.asterix.common.feeds.api.IFrameEventCallback;
-import org.apache.asterix.common.feeds.api.IFrameEventCallback.FrameEvent;
-import org.apache.asterix.common.feeds.message.FeedReportMessage;
-import org.apache.asterix.common.feeds.message.ScaleInReportMessage;
-import org.apache.asterix.common.feeds.message.StorageReportFeedMessage;
-
-public class MonitoredBufferTimerTasks {
-
-    private static final Logger LOGGER = Logger.getLogger(MonitorInputQueueLengthTimerTask.class.getName());
-
-    public static class MonitoredBufferStorageTimerTask extends TimerTask {
-
-        private static final int PERSISTENCE_DELAY_VIOLATION_MAX = 5;
-
-        private final StorageSideMonitoredBuffer mBuffer;
-        private final IFeedManager feedManager;
-        private final int partition;
-        private final FeedConnectionId connectionId;
-        private final FeedPolicyAccessor policyAccessor;
-        private final StorageFrameHandler storageFromeHandler;
-        private final StorageReportFeedMessage storageReportMessage;
-        private final FeedTupleCommitAckMessage tupleCommitAckMessage;
-
-        private Map<Integer, Integer> maxIntakeBaseCovered;
-        private int countDelayExceeded = 0;
-
-        public MonitoredBufferStorageTimerTask(StorageSideMonitoredBuffer mBuffer, IFeedManager feedManager,
-                FeedConnectionId connectionId, int partition, FeedPolicyAccessor policyAccessor,
-                StorageFrameHandler storageFromeHandler) {
-            this.mBuffer = mBuffer;
-            this.feedManager = feedManager;
-            this.connectionId = connectionId;
-            this.partition = partition;
-            this.policyAccessor = policyAccessor;
-            this.storageFromeHandler = storageFromeHandler;
-            this.storageReportMessage = new StorageReportFeedMessage(this.connectionId, this.partition, 0, false, 0, 0);
-            this.tupleCommitAckMessage = new FeedTupleCommitAckMessage(this.connectionId, 0, 0, null);
-            this.maxIntakeBaseCovered = new HashMap<Integer, Integer>();
-        }
-
-        @Override
-        public void run() {
-            if (mBuffer.isAckingEnabled() && !mBuffer.getInputHandler().isThrottlingEnabled()) {
-                ackRecords();
-            }
-            if (mBuffer.isTimeTrackingEnabled()) {
-                checkLatencyViolation();
-            }
-        }
-
-        private void ackRecords() {
-            Set<Integer> partitions = storageFromeHandler.getPartitionsWithStats();
-            List<Integer> basesCovered = new ArrayList<Integer>();
-            for (int intakePartition : partitions) {
-                Map<Integer, IntakePartitionStatistics> baseAcks = storageFromeHandler
-                        .getBaseAcksForPartition(intakePartition);
-                for (Entry<Integer, IntakePartitionStatistics> entry : baseAcks.entrySet()) {
-                    int base = entry.getKey();
-                    IntakePartitionStatistics stats = entry.getValue();
-                    Integer maxIntakeBaseForPartition = maxIntakeBaseCovered.get(intakePartition);
-                    if (maxIntakeBaseForPartition == null || maxIntakeBaseForPartition < base) {
-                        tupleCommitAckMessage.reset(intakePartition, base, stats.getAckInfo());
-                        feedManager.getFeedMessageService().sendMessage(tupleCommitAckMessage);
-                    } else {
-                        basesCovered.add(base);
-                    }
-                }
-                for (Integer b : basesCovered) {
-                    baseAcks.remove(b);
-                }
-                basesCovered.clear();
-            }
-        }
-
-        private void checkLatencyViolation() {
-            long avgDelayPersistence = storageFromeHandler.getAvgDelayPersistence();
-            if (avgDelayPersistence > policyAccessor.getMaxDelayRecordPersistence()) {
-                countDelayExceeded++;
-                if (countDelayExceeded > PERSISTENCE_DELAY_VIOLATION_MAX) {
-                    storageReportMessage.reset(0, false, mBuffer.getAvgDelayRecordPersistence());
-                    feedManager.getFeedMessageService().sendMessage(storageReportMessage);
-                }
-            } else {
-                countDelayExceeded = 0;
-            }
-        }
-
-        public void receiveCommitAckResponse(FeedTupleCommitResponseMessage message) {
-            maxIntakeBaseCovered.put(message.getIntakePartition(), message.getMaxWindowAcked());
-        }
-    }
-
-    public static class LogInputOutputRateTask extends TimerTask {
-
-        private final MonitoredBuffer mBuffer;
-        private final boolean log;
-        private final boolean reportInflow;
-        private final boolean reportOutflow;
-
-        private final IFeedMessageService messageService;
-        private final FeedReportMessage message;
-
-        public LogInputOutputRateTask(MonitoredBuffer mBuffer, boolean log, boolean reportInflow, boolean reportOutflow) {
-            this.mBuffer = mBuffer;
-            this.log = log;
-            this.reportInflow = reportInflow;
-            this.reportOutflow = reportOutflow;
-            if (reportInflow || reportOutflow) {
-                ValueType vType = reportInflow ? ValueType.INFLOW_RATE : ValueType.OUTFLOW_RATE;
-                messageService = mBuffer.getInputHandler().getFeedManager().getFeedMessageService();
-                message = new FeedReportMessage(mBuffer.getInputHandler().getConnectionId(), mBuffer.getRuntimeId(),
-                        vType, 0);
-            } else {
-                messageService = null;
-                message = null;
-            }
-
-        }
-
-        @Override
-        public void run() {
-            int pendingWork = mBuffer.getWorkSize();
-            int outflowRate = mBuffer.getOutflowRate();
-            int inflowRate = mBuffer.getInflowRate();
-            if (log) {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info(mBuffer.getRuntimeId() + " " + "Inflow rate:" + inflowRate + " Outflow Rate:"
-                            + outflowRate + " Pending Work " + pendingWork);
-                }
-            }
-            if (reportInflow) {
-                message.reset(inflowRate);
-            } else if (reportOutflow) {
-                message.reset(outflowRate);
-            }
-            messageService.sendMessage(message);
-        }
-    }
-
-    public static class MonitorInputQueueLengthTimerTask extends TimerTask {
-
-        private final MonitoredBuffer mBuffer;
-        private final IFrameEventCallback callback;
-        private final int pendingWorkThreshold;
-        private final int maxSuccessiveThresholdPeriods;
-        private FrameEvent lastEvent = FrameEvent.NO_OP;
-        private int pendingWorkExceedCount = 0;
-
-        public MonitorInputQueueLengthTimerTask(MonitoredBuffer mBuffer, IFrameEventCallback callback) {
-            this.mBuffer = mBuffer;
-            this.callback = callback;
-            AsterixFeedProperties props = mBuffer.getInputHandler().getFeedManager().getAsterixFeedProperties();
-            pendingWorkThreshold = props.getPendingWorkThreshold();
-            maxSuccessiveThresholdPeriods = props.getMaxSuccessiveThresholdPeriod();
-        }
-
-        @Override
-        public void run() {
-            int pendingWork = mBuffer.getWorkSize();
-            if (mBuffer.getMode().equals(Mode.PROCESS_SPILL) || mBuffer.getMode().equals(Mode.PROCESS_BACKLOG)) {
-                return;
-            }
-
-            switch (lastEvent) {
-                case NO_OP:
-                case PENDING_WORK_DONE:
-                case FINISHED_PROCESSING_SPILLAGE:
-                    if (pendingWork > pendingWorkThreshold) {
-                        pendingWorkExceedCount++;
-                        if (pendingWorkExceedCount > maxSuccessiveThresholdPeriods) {
-                            pendingWorkExceedCount = 0;
-                            lastEvent = FrameEvent.PENDING_WORK_THRESHOLD_REACHED;
-                            callback.frameEvent(lastEvent);
-                        }
-                    } else if (pendingWork == 0 && mBuffer.getMode().equals(Mode.SPILL)) {
-                        lastEvent = FrameEvent.PENDING_WORK_DONE;
-                        callback.frameEvent(lastEvent);
-                    }
-                    break;
-                case PENDING_WORK_THRESHOLD_REACHED:
-                    if (((pendingWork * 1.0) / pendingWorkThreshold) <= 0.5) {
-                        lastEvent = FrameEvent.PENDING_WORK_DONE;
-                        callback.frameEvent(lastEvent);
-                    }
-                    break;
-                case FINISHED_PROCESSING:
-                    break;
-
-            }
-        }
-    }
-
-    /**
-     * A timer task to measure and compare the processing rate and inflow rate
-     * to look for possibility to scale-in, that is reduce the degree of cardinality
-     * of the compute operator.
-     */
-    public static class MonitoreProcessRateTimerTask extends TimerTask {
-
-        private final MonitoredBuffer mBuffer;
-        private final IFeedManager feedManager;
-        private int nPartitions;
-        private ScaleInReportMessage sMessage;
-        private boolean proposedChange;
-
-        public MonitoreProcessRateTimerTask(MonitoredBuffer mBuffer, IFeedManager feedManager,
-                FeedConnectionId connectionId, int nPartitions) {
-            this.mBuffer = mBuffer;
-            this.feedManager = feedManager;
-            this.nPartitions = nPartitions;
-            this.sMessage = new ScaleInReportMessage(connectionId, FeedRuntimeType.COMPUTE, 0, 0);
-            this.proposedChange = false;
-        }
-
-        public int getNumberOfPartitions() {
-            return nPartitions;
-        }
-
-        public void setNumberOfPartitions(int nPartitions) {
-            this.nPartitions = nPartitions;
-            proposedChange = false;
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Reset the number of partitions for " + mBuffer.getRuntimeId() + " to " + nPartitions);
-            }
-        }
-
-        @Override
-        public void run() {
-            if (!proposedChange) {
-                int inflowRate = mBuffer.getInflowRate();
-                int procRate = mBuffer.getProcessingRate();
-                if (inflowRate > 0 && procRate > 0) {
-                    if (inflowRate < procRate) {
-                        int possibleCardinality = (int) Math.ceil(nPartitions * inflowRate / (double) procRate);
-                        if (possibleCardinality < nPartitions
-                                && ((((nPartitions - possibleCardinality) * 1.0) / nPartitions) >= 0.25)) {
-                            sMessage.reset(nPartitions, possibleCardinality);
-                            feedManager.getFeedMessageService().sendMessage(sMessage);
-                            proposedChange = true;
-                            if (LOGGER.isLoggable(Level.INFO)) {
-                                LOGGER.info("Proposed scale-in " + sMessage);
-                            }
-                        }
-                    } else {
-                        if (LOGGER.isLoggable(Level.INFO)) {
-                            LOGGER.info("Inflow Rate (" + inflowRate + ") exceeds Processing Rate" + " (" + procRate
-                                    + ")");
-                        }
-                    }
-                }
-            } else {
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("Waiting for earlier proposal to scale in to be applied");
-                }
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/NodeLoad.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/NodeLoad.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/NodeLoad.java
deleted file mode 100644
index b654563..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/NodeLoad.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-
-public class NodeLoad implements Comparable<NodeLoad> {
-
-    private final String nodeId;
-
-    private int nRuntimes;
-
-    public NodeLoad(String nodeId) {
-        this.nodeId = nodeId;
-        this.nRuntimes = 0;
-    }
-
-    public void addLoad() {
-        nRuntimes++;
-    }
-
-    public void removeLoad(FeedRuntimeType runtimeType) {
-        nRuntimes--;
-    }
-
-    @Override
-    public int compareTo(NodeLoad o) {
-        if (this == o) {
-            return 0;
-        }
-        return nRuntimes - o.getnRuntimes();
-    }
-
-    public String getNodeId() {
-        return nodeId;
-    }
-
-    public int getnRuntimes() {
-        return nRuntimes;
-    }
-
-    public void setnRuntimes(int nRuntimes) {
-        this.nRuntimes = nRuntimes;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/NodeLoadReport.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/NodeLoadReport.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/NodeLoadReport.java
deleted file mode 100644
index a509341..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/NodeLoadReport.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import org.json.JSONException;
-import org.json.JSONObject;
-
-public class NodeLoadReport implements Comparable<NodeLoadReport> {
-
-    private final String nodeId;
-    private float cpuLoad;
-    private double usedHeap;
-    private int nRuntimes;
-
-    public NodeLoadReport(String nodeId, float cpuLoad, float usedHeap, int nRuntimes) {
-        this.nodeId = nodeId;
-        this.cpuLoad = cpuLoad;
-        this.usedHeap = usedHeap;
-        this.nRuntimes = nRuntimes;
-    }
-
-    public static NodeLoadReport read(JSONObject obj) throws JSONException {
-        NodeLoadReport r = new NodeLoadReport(obj.getString(FeedConstants.MessageConstants.NODE_ID),
-                (float) obj.getDouble(FeedConstants.MessageConstants.CPU_LOAD),
-                (float) obj.getDouble(FeedConstants.MessageConstants.HEAP_USAGE),
-                obj.getInt(FeedConstants.MessageConstants.N_RUNTIMES));
-        return r;
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (this == o) {
-            return true;
-        }
-        if (!(o instanceof NodeLoadReport)) {
-            return false;
-        }
-        return ((NodeLoadReport) o).nodeId.equals(nodeId);
-    }
-
-    @Override
-    public int hashCode() {
-        return nodeId.hashCode();
-    }
-
-    @Override
-    public int compareTo(NodeLoadReport o) {
-        if (nRuntimes != o.getnRuntimes()) {
-            return nRuntimes - o.getnRuntimes();
-        } else {
-            return (int) (this.cpuLoad - ((NodeLoadReport) o).cpuLoad);
-        }
-    }
-
-    public float getCpuLoad() {
-        return cpuLoad;
-    }
-
-    public void setCpuLoad(float cpuLoad) {
-        this.cpuLoad = cpuLoad;
-    }
-
-    public double getUsedHeap() {
-        return usedHeap;
-    }
-
-    public void setUsedHeap(double usedHeap) {
-        this.usedHeap = usedHeap;
-    }
-
-    public int getnRuntimes() {
-        return nRuntimes;
-    }
-
-    public void setnRuntimes(int nRuntimes) {
-        this.nRuntimes = nRuntimes;
-    }
-
-    public String getNodeId() {
-        return nodeId;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/NodeLoadReportService.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/NodeLoadReportService.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/NodeLoadReportService.java
deleted file mode 100644
index 6be0211..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/NodeLoadReportService.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.lang.management.ManagementFactory;
-import java.lang.management.MemoryMXBean;
-import java.lang.management.OperatingSystemMXBean;
-import java.util.List;
-import java.util.Timer;
-import java.util.TimerTask;
-
-import org.apache.asterix.common.feeds.api.IFeedManager;
-import org.apache.asterix.common.feeds.api.IFeedMessageService;
-import org.apache.asterix.common.feeds.api.IFeedService;
-import org.apache.asterix.common.feeds.message.NodeReportMessage;
-
-public class NodeLoadReportService implements IFeedService {
-
-    private static final int NODE_LOAD_REPORT_FREQUENCY = 2000;
-    private static final float CPU_CHANGE_THRESHOLD = 0.2f;
-    private static final float HEAP_CHANGE_THRESHOLD = 0.4f;
-
-    private final NodeLoadReportTask task;
-    private final Timer timer;
-
-    public NodeLoadReportService(String nodeId, IFeedManager feedManager) {
-        this.task = new NodeLoadReportTask(nodeId, feedManager);
-        this.timer = new Timer();
-    }
-
-    @Override
-    public void start() throws Exception {
-        timer.schedule(task, 0, NODE_LOAD_REPORT_FREQUENCY);
-    }
-
-    @Override
-    public void stop() {
-        timer.cancel();
-    }
-
-    private static class NodeLoadReportTask extends TimerTask {
-
-        private final IFeedManager feedManager;
-        private final NodeReportMessage message;
-        private final IFeedMessageService messageService;
-
-        private static OperatingSystemMXBean osBean = ManagementFactory.getOperatingSystemMXBean();
-        private static MemoryMXBean memBean = ManagementFactory.getMemoryMXBean();
-
-        public NodeLoadReportTask(String nodeId, IFeedManager feedManager) {
-            this.feedManager = feedManager;
-            this.message = new NodeReportMessage(0.0f, 0L, 0);
-            this.messageService = feedManager.getFeedMessageService();
-        }
-
-        @Override
-        public void run() {
-            List<FeedRuntimeId> runtimeIds = feedManager.getFeedConnectionManager().getRegisteredRuntimes();
-            int nRuntimes = runtimeIds.size();
-            double cpuLoad = getCpuLoad();
-            double usedHeap = getUsedHeap();
-            if (sendMessage(nRuntimes, cpuLoad, usedHeap)) {
-                message.reset(cpuLoad, usedHeap, nRuntimes);
-                messageService.sendMessage(message);
-            }
-        }
-
-        private boolean sendMessage(int nRuntimes, double cpuLoad, double usedHeap) {
-            if (message == null) {
-                return true;
-            }
-
-            boolean changeInCpu = (Math.abs(cpuLoad - message.getCpuLoad())
-                    / message.getCpuLoad()) > CPU_CHANGE_THRESHOLD;
-            boolean changeInUsedHeap = (Math.abs(usedHeap - message.getUsedHeap())
-                    / message.getUsedHeap()) > HEAP_CHANGE_THRESHOLD;
-            boolean changeInRuntimeSize = nRuntimes != message.getnRuntimes();
-            return changeInCpu || changeInUsedHeap || changeInRuntimeSize;
-        }
-
-        private double getCpuLoad() {
-            return osBean.getSystemLoadAverage();
-        }
-
-        private double getUsedHeap() {
-            return ((double) memBean.getHeapMemoryUsage().getUsed()) / memBean.getHeapMemoryUsage().getMax();
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/Series.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/Series.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/Series.java
deleted file mode 100644
index 6f438ad..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/Series.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import org.apache.asterix.common.feeds.api.IFeedMetricCollector.MetricType;
-
-public abstract class Series {
-
-    protected final MetricType type;
-    protected int runningSum;
-
-    public Series(MetricType type) {
-        this.type = type;
-    }
-
-    public abstract void addValue(int value);
-
-    public int getRunningSum() {
-        return runningSum;
-    }
-
-    public MetricType getType() {
-        return type;
-    }
-
-    public abstract void reset();
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/SeriesAvg.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/SeriesAvg.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/SeriesAvg.java
deleted file mode 100644
index 6bfe925..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/SeriesAvg.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import org.apache.asterix.common.feeds.api.IFeedMetricCollector.MetricType;
-
-public class SeriesAvg extends Series {
-
-    private int count;
-
-    public SeriesAvg() {
-        super(MetricType.AVG);
-    }
-
-    public int getAvg() {
-        return runningSum / count;
-    }
-
-    public synchronized void addValue(int value) {
-        if (value < 0) {
-            return;
-        }
-        runningSum += value;
-        count++;
-    }
-    
-    public  void reset(){
-        count = 0;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/SeriesRate.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/SeriesRate.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/SeriesRate.java
deleted file mode 100644
index 9d0b0ea..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/SeriesRate.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.util.Timer;
-import java.util.TimerTask;
-
-import org.apache.asterix.common.feeds.api.IFeedMetricCollector.MetricType;
-
-public class SeriesRate extends Series {
-
-    private static final long REFRESH_MEASUREMENT = 5000; // 5 seconds
-
-    private int rate;
-    private Timer timer;
-    private RateComputingTask task;
-
-    public SeriesRate() {
-        super(MetricType.RATE);
-        begin();
-    }
-
-    public int getRate() {
-        return rate;
-    }
-
-    public synchronized void addValue(int value) {
-        if (value < 0) {
-            return;
-        }
-        runningSum += value;
-    }
-
-    public void begin() {
-        if (timer == null) {
-            timer = new Timer();
-            task = new RateComputingTask(this);
-            timer.scheduleAtFixedRate(task, 0, REFRESH_MEASUREMENT);
-        }
-    }
-
-    public void end() {
-        if (timer != null) {
-            timer.cancel();
-        }
-    }
-
-    public void reset() {
-        rate = 0;
-        if (task != null) {
-            task.reset();
-        }
-    }
-
-    private class RateComputingTask extends TimerTask {
-
-        private int lastMeasured = 0;
-        private final SeriesRate series;
-
-        public RateComputingTask(SeriesRate series) {
-            this.series = series;
-        }
-
-        @Override
-        public void run() {
-            int currentValue = series.getRunningSum();
-            rate = (int) (((currentValue - lastMeasured) * 1000) / REFRESH_MEASUREMENT);
-            lastMeasured = currentValue;
-        }
-
-        public void reset() {
-            lastMeasured = 0;
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/StorageFrameHandler.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/StorageFrameHandler.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/StorageFrameHandler.java
deleted file mode 100644
index 5b99b8c..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/StorageFrameHandler.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.nio.ByteBuffer;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-
-import org.apache.asterix.common.feeds.FeedConstants.StatisticsConstants;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-
-public class StorageFrameHandler {
-
-    private final Map<Integer, Map<Integer, IntakePartitionStatistics>> intakeStatistics;
-    private long avgDelayPersistence;
-
-    public StorageFrameHandler() {
-        intakeStatistics = new HashMap<Integer, Map<Integer, IntakePartitionStatistics>>();
-        avgDelayPersistence = 0L;
-    }
-
-    public synchronized void updateTrackingInformation(ByteBuffer frame, FrameTupleAccessor frameAccessor) {
-        int nTuples = frameAccessor.getTupleCount();
-        long delay = 0;
-        long intakeTimestamp;
-        long currentTime = System.currentTimeMillis();
-        int partition = 0;
-        int recordId = 0;
-        for (int i = 0; i < nTuples; i++) {
-            int recordStart = frameAccessor.getTupleStartOffset(i) + frameAccessor.getFieldSlotsLength();
-            int openPartOffsetOrig = frame.getInt(recordStart + 6);
-            int numOpenFields = frame.getInt(recordStart + openPartOffsetOrig);
-
-            int recordIdOffset = openPartOffsetOrig + 4 + 8 * numOpenFields
-                    + (StatisticsConstants.INTAKE_TUPLEID.length() + 2) + 1;
-            recordId = frame.getInt(recordStart + recordIdOffset);
-
-            int partitionOffset = recordIdOffset + 4 + (StatisticsConstants.INTAKE_PARTITION.length() + 2) + 1;
-            partition = frame.getInt(recordStart + partitionOffset);
-
-            ackRecordId(partition, recordId);
-            int intakeTimestampValueOffset = partitionOffset + 4 + (StatisticsConstants.INTAKE_TIMESTAMP.length() + 2)
-                    + 1;
-            intakeTimestamp = frame.getLong(recordStart + intakeTimestampValueOffset);
-
-            int storeTimestampValueOffset = intakeTimestampValueOffset + 8
-                    + (StatisticsConstants.STORE_TIMESTAMP.length() + 2) + 1;
-            frame.putLong(recordStart + storeTimestampValueOffset, System.currentTimeMillis());
-            delay += currentTime - intakeTimestamp;
-        }
-        avgDelayPersistence = delay / nTuples;
-    }
-
-    private void ackRecordId(int partition, int recordId) {
-        Map<Integer, IntakePartitionStatistics> map = intakeStatistics.get(partition);
-        if (map == null) {
-            map = new HashMap<Integer, IntakePartitionStatistics>();
-            intakeStatistics.put(partition, map);
-        }
-        int base = (int) Math.ceil(recordId * 1.0 / IntakePartitionStatistics.ACK_WINDOW_SIZE);
-        IntakePartitionStatistics intakeStatsForBaseOfPartition = map.get(base);
-        if (intakeStatsForBaseOfPartition == null) {
-            intakeStatsForBaseOfPartition = new IntakePartitionStatistics(partition, base);
-            map.put(base, intakeStatsForBaseOfPartition);
-        }
-        intakeStatsForBaseOfPartition.ackRecordId(recordId);
-    }
-
-    public byte[] getAckData(int partition, int base) {
-        Map<Integer, IntakePartitionStatistics> intakeStats = intakeStatistics.get(partition);
-        if (intakeStats != null) {
-            IntakePartitionStatistics intakePartitionStats = intakeStats.get(base);
-            if (intakePartitionStats != null) {
-                return intakePartitionStats.getAckInfo();
-            }
-        }
-        return null;
-    }
-
-    public synchronized Map<Integer, IntakePartitionStatistics> getBaseAcksForPartition(int partition) {
-        Map<Integer, IntakePartitionStatistics> intakeStatsForPartition = intakeStatistics.get(partition);
-        Map<Integer, IntakePartitionStatistics> clone = new HashMap<Integer, IntakePartitionStatistics>();
-        for (Entry<Integer, IntakePartitionStatistics> entry : intakeStatsForPartition.entrySet()) {
-            clone.put(entry.getKey(), entry.getValue());
-        }
-        return intakeStatsForPartition;
-    }
-
-    public long getAvgDelayPersistence() {
-        return avgDelayPersistence;
-    }
-
-    public void setAvgDelayPersistence(long avgDelayPersistence) {
-        this.avgDelayPersistence = avgDelayPersistence;
-    }
-
-    public Set<Integer> getPartitionsWithStats() {
-        return intakeStatistics.keySet();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/StorageSideMonitoredBuffer.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/StorageSideMonitoredBuffer.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/StorageSideMonitoredBuffer.java
deleted file mode 100644
index 4027237..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/StorageSideMonitoredBuffer.java
+++ /dev/null
@@ -1,206 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.nio.ByteBuffer;
-
-import org.apache.asterix.common.feeds.FeedConstants.StatisticsConstants;
-import org.apache.asterix.common.feeds.api.IExceptionHandler;
-import org.apache.asterix.common.feeds.api.IFeedMetricCollector;
-import org.apache.asterix.common.feeds.api.IFrameEventCallback;
-import org.apache.asterix.common.feeds.api.IFramePostProcessor;
-import org.apache.asterix.common.feeds.api.IFramePreprocessor;
-import org.apache.hyracks.api.comm.IFrameWriter;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-
-public class StorageSideMonitoredBuffer extends MonitoredBuffer {
-
-    private static final long STORAGE_TIME_TRACKING_FREQUENCY = 5000; // 10
-                                                                      // seconds
-
-    private boolean ackingEnabled;
-    private final boolean timeTrackingEnabled;
-
-    public StorageSideMonitoredBuffer(IHyracksTaskContext ctx, FeedRuntimeInputHandler inputHandler,
-            IFrameWriter frameWriter, FrameTupleAccessor fta, RecordDescriptor recordDesc,
-            IFeedMetricCollector metricCollector, FeedConnectionId connectionId, FeedRuntimeId runtimeId,
-            IExceptionHandler exceptionHandler, IFrameEventCallback callback, int nPartitions,
-            FeedPolicyAccessor policyAccessor) {
-        super(ctx, inputHandler, frameWriter, fta, recordDesc, metricCollector, connectionId, runtimeId,
-                exceptionHandler, callback, nPartitions, policyAccessor);
-        timeTrackingEnabled = policyAccessor.isTimeTrackingEnabled();
-        ackingEnabled = policyAccessor.atleastOnceSemantics();
-        if (ackingEnabled || timeTrackingEnabled) {
-            storageFromeHandler = new StorageFrameHandler();
-            this.storageTimeTrackingRateTask = new MonitoredBufferTimerTasks.MonitoredBufferStorageTimerTask(this,
-                    inputHandler.getFeedManager(), connectionId, runtimeId.getPartition(), policyAccessor,
-                    storageFromeHandler);
-            this.timer.scheduleAtFixedRate(storageTimeTrackingRateTask, 0, STORAGE_TIME_TRACKING_FREQUENCY);
-        }
-    }
-
-    @Override
-    protected boolean monitorProcessingRate() {
-        return false;
-    }
-
-    @Override
-    protected boolean logInflowOutflowRate() {
-        return true;
-    }
-
-    @Override
-    public IFramePreprocessor getFramePreProcessor() {
-        return new IFramePreprocessor() {
-
-            @Override
-            public void preProcess(ByteBuffer frame) {
-                try {
-                    if (ackingEnabled) {
-                        storageFromeHandler.updateTrackingInformation(frame, inflowFta);
-                    }
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
-            }
-        };
-    }
-
-    @Override
-    protected IFramePostProcessor getFramePostProcessor() {
-        return new IFramePostProcessor() {
-
-            private static final long NORMAL_WINDOW_LIMIT = 400 * 1000;
-            private static final long HIGH_WINDOW_LIMIT = 800 * 1000;
-
-            private long delayNormalWindow = 0;
-            private long delayHighWindow = 0;
-            private long delayLowWindow = 0;
-
-            private int countNormalWindow;
-            private int countHighWindow;
-            private int countLowWindow;
-
-            private long beginIntakeTimestamp = 0;
-
-            @Override
-            public void postProcessFrame(ByteBuffer frame, FrameTupleAccessor frameAccessor) {
-                if (ackingEnabled || timeTrackingEnabled) {
-                    int nTuples = frameAccessor.getTupleCount();
-                    long intakeTimestamp;
-                    long currentTime = System.currentTimeMillis();
-                    for (int i = 0; i < nTuples; i++) {
-                        int recordStart = frameAccessor.getTupleStartOffset(i) + frameAccessor.getFieldSlotsLength();
-                        int openPartOffsetOrig = frame.getInt(recordStart + 6);
-                        int numOpenFields = frame.getInt(recordStart + openPartOffsetOrig);
-
-                        int recordIdOffset = openPartOffsetOrig + 4 + 8 * numOpenFields
-                                + (StatisticsConstants.INTAKE_TUPLEID.length() + 2) + 1;
-
-                        int partitionOffset = recordIdOffset + 4 + (StatisticsConstants.INTAKE_PARTITION.length() + 2)
-                                + 1;
-
-                        int intakeTimestampValueOffset = partitionOffset + 4
-                                + (StatisticsConstants.INTAKE_TIMESTAMP.length() + 2) + 1;
-                        intakeTimestamp = frame.getLong(recordStart + intakeTimestampValueOffset);
-                        if (beginIntakeTimestamp == 0) {
-                            beginIntakeTimestamp = intakeTimestamp;
-                            LOGGER.warning("Begin Timestamp: " + beginIntakeTimestamp);
-                        }
-
-                        updateRunningAvg(intakeTimestamp, currentTime);
-
-                        int storeTimestampValueOffset = intakeTimestampValueOffset + 8
-                                + (StatisticsConstants.STORE_TIMESTAMP.length() + 2) + 1;
-                        frame.putLong(recordStart + storeTimestampValueOffset, System.currentTimeMillis());
-                    }
-                    logRunningAvg();
-                    resetRunningAvg();
-                }
-            }
-
-            private void updateRunningAvg(long intakeTimestamp, long currentTime) {
-                long diffTimestamp = intakeTimestamp - beginIntakeTimestamp;
-                long delay = (currentTime - intakeTimestamp);
-                if (diffTimestamp < NORMAL_WINDOW_LIMIT) {
-                    delayNormalWindow += delay;
-                    countNormalWindow++;
-                } else if (diffTimestamp < HIGH_WINDOW_LIMIT) {
-                    delayHighWindow += delay;
-                    countHighWindow++;
-                } else {
-                    delayLowWindow += delay;
-                    countLowWindow++;
-                }
-            }
-
-            private void resetRunningAvg() {
-                delayNormalWindow = 0;
-                countNormalWindow = 0;
-                delayHighWindow = 0;
-                countHighWindow = 0;
-                delayLowWindow = 0;
-                countLowWindow = 0;
-            }
-
-            private void logRunningAvg() {
-                if (countNormalWindow != 0 && delayNormalWindow != 0) {
-                    LOGGER.warning("Window:" + 0 + ":" + "Avg Travel_Time:" + (delayNormalWindow / countNormalWindow));
-                }
-                if (countHighWindow != 0 && delayHighWindow != 0) {
-                    LOGGER.warning("Window:" + 1 + ":" + "Avg Travel_Time:" + (delayHighWindow / countHighWindow));
-                }
-                if (countLowWindow != 0 && delayLowWindow != 0) {
-                    LOGGER.warning("Window:" + 2 + ":" + "Avg Travel_Time:" + (delayLowWindow / countLowWindow));
-                }
-            }
-
-        };
-    }
-
-    public boolean isAckingEnabled() {
-        return ackingEnabled;
-    }
-
-    public void setAcking(boolean ackingEnabled) {
-        this.ackingEnabled = ackingEnabled;
-    }
-
-    public boolean isTimeTrackingEnabled() {
-        return timeTrackingEnabled;
-    }
-
-    @Override
-    protected boolean monitorInputQueueLength() {
-        return true;
-    }
-
-    @Override
-    protected boolean reportOutflowRate() {
-        return true;
-    }
-
-    @Override
-    protected boolean reportInflowRate() {
-        return false;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/SubscribableFeedRuntimeId.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/SubscribableFeedRuntimeId.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/SubscribableFeedRuntimeId.java
deleted file mode 100644
index 7eb5921..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/SubscribableFeedRuntimeId.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-
-public class SubscribableFeedRuntimeId extends FeedRuntimeId {
-    private static final long serialVersionUID = 1L;
-    private final FeedId feedId;
-
-    public SubscribableFeedRuntimeId(FeedId feedId, FeedRuntimeType runtimeType, int partition) {
-        super(runtimeType, partition, FeedRuntimeId.DEFAULT_OPERAND_ID);
-        this.feedId = feedId;
-    }
-
-    public FeedId getFeedId() {
-        return feedId;
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (this == o) {
-            return true;
-        }
-        if (!(o instanceof SubscribableFeedRuntimeId)) {
-            return false;
-        }
-
-        return (super.equals(o) && this.feedId.equals(((SubscribableFeedRuntimeId) o).getFeedId()));
-    }
-
-    @Override
-    public int hashCode() {
-        return super.hashCode() + feedId.hashCode();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/SubscribableRuntime.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/SubscribableRuntime.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/SubscribableRuntime.java
deleted file mode 100644
index b09b3ff..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/SubscribableRuntime.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.feeds.api.ISubscribableRuntime;
-import org.apache.asterix.common.feeds.api.ISubscriberRuntime;
-import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-
-public class SubscribableRuntime extends FeedRuntime implements ISubscribableRuntime {
-
-    protected static final Logger LOGGER = Logger.getLogger(SubscribableRuntime.class.getName());
-
-    protected final FeedId feedId;
-    protected final List<ISubscriberRuntime> subscribers;
-    protected final RecordDescriptor recordDescriptor;
-    protected final DistributeFeedFrameWriter dWriter;
-
-    public SubscribableRuntime(FeedId feedId, FeedRuntimeId runtimeId, FeedRuntimeInputHandler inputHandler,
-            DistributeFeedFrameWriter dWriter, RecordDescriptor recordDescriptor) {
-        super(runtimeId, inputHandler, dWriter);
-        this.feedId = feedId;
-        this.recordDescriptor = recordDescriptor;
-        this.dWriter = dWriter;
-        this.subscribers = new ArrayList<ISubscriberRuntime>();
-    }
-
-    public FeedId getFeedId() {
-        return feedId;
-    }
-
-    @Override
-    public String toString() {
-        return "SubscribableRuntime" + " [" + feedId + "]" + "(" + runtimeId + ")";
-    }
-
-    @Override
-    public synchronized void subscribeFeed(FeedPolicyAccessor fpa, CollectionRuntime collectionRuntime)
-            throws Exception {
-        FeedFrameCollector collector = dWriter.subscribeFeed(new FeedPolicyAccessor(collectionRuntime.getFeedPolicy()),
-                collectionRuntime.getInputHandler(), collectionRuntime.getConnectionId());
-        collectionRuntime.setFrameCollector(collector);
-        subscribers.add(collectionRuntime);
-    }
-
-    @Override
-    public synchronized void unsubscribeFeed(CollectionRuntime collectionRuntime) throws Exception {
-        dWriter.unsubscribeFeed(collectionRuntime.getFeedFrameWriter());
-        subscribers.remove(collectionRuntime);
-    }
-
-    @Override
-    public synchronized List<ISubscriberRuntime> getSubscribers() {
-        return subscribers;
-    }
-
-    @Override
-    public DistributeFeedFrameWriter getFeedFrameWriter() {
-        return dWriter;
-    }
-
-    public FeedRuntimeType getFeedRuntimeType() {
-        return runtimeId.getFeedRuntimeType();
-    }
-
-    @Override
-    public RecordDescriptor getRecordDescriptor() {
-        return recordDescriptor;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IAdapterRuntimeManager.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IAdapterRuntimeManager.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IAdapterRuntimeManager.java
deleted file mode 100644
index 2eb6caa..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IAdapterRuntimeManager.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import org.apache.asterix.common.feeds.FeedId;
-
-public interface IAdapterRuntimeManager {
-
-    public enum State {
-        /**
-         * Indicates that AsterixDB is maintaining the flow of data from external source into its storage.
-         */
-        ACTIVE_INGESTION,
-
-        /**
-         * Indicates that data from external source is being buffered and not
-         * pushed downstream
-         */
-
-        INACTIVE_INGESTION,
-        /**
-         * Indicates that feed ingestion activity has finished.
-         */
-        FINISHED_INGESTION,
-
-        /** Indicates the occurrence of a failure during the intake stage of a data ingestion pipeline **/
-        FAILED_INGESTION
-    }
-
-    /**
-     * Start feed ingestion
-     * @throws Exception
-     */
-    public void start() throws Exception;
-
-    /**
-     * Stop feed ingestion.
-     * @throws Exception
-     */
-    public void stop() throws Exception;
-
-    /**
-     * @return feedId associated with the feed that is being ingested
-     */
-    public FeedId getFeedId();
-
-    /**
-     * @return the instance of the feed adapter (an implementation of {@code IFeedAdapter}) in use.
-     */
-    public IDataSourceAdapter getFeedAdapter();
-
-    /**
-     * @return state associated with the AdapterRuntimeManager. See {@code State}.
-     */
-    public State getState();
-
-    /**
-     * @param state
-     */
-    public void setState(State state);
-
-    public IIntakeProgressTracker getProgressTracker();
-
-    public int getPartition();
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/ICentralFeedManager.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/ICentralFeedManager.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/ICentralFeedManager.java
deleted file mode 100644
index 0092e6b..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/ICentralFeedManager.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import java.io.IOException;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-
-public interface ICentralFeedManager {
-
-    public void start() throws AsterixException;
-
-    public void stop() throws AsterixException, IOException;
-
-    public IFeedTrackingManager getFeedTrackingManager();
-
-    public IFeedLoadManager getFeedLoadManager();
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IDataSourceAdapter.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IDataSourceAdapter.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IDataSourceAdapter.java
deleted file mode 100644
index 9dd4e76..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IDataSourceAdapter.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import java.io.Serializable;
-
-import org.apache.hyracks.api.comm.IFrameWriter;
-
-/**
- * A super interface implemented by a data source adapter. An adapter can be a
- * pull based or push based. This interface provides all common APIs that need
- * to be implemented by each adapter irrespective of the the kind of
- * adapter(pull or push).
- */
-public interface IDataSourceAdapter extends Serializable {
-
-    /**
-     * Triggers the adapter to begin ingesting data from the external source.
-     * 
-     * @param partition
-     *            The adapter could be running with a degree of parallelism.
-     *            partition corresponds to the i'th parallel instance.
-     * @param writer
-     *            The instance of frame writer that is used by the adapter to
-     *            write frame to. Adapter packs the fetched bytes (from external source),
-     *            packs them into frames and forwards the frames to an upstream receiving
-     *            operator using the instance of IFrameWriter.
-     * @throws Exception
-     */
-    public void start(int partition, IFrameWriter writer) throws Exception;
-
-    /**
-     * Discontinue the ingestion of data.
-     *
-     * @throws Exception
-     */
-    public boolean stop() throws Exception;
-
-    /**
-     * @param e
-     * @return true if the ingestion should continue post the exception else false
-     */
-    public boolean handleException(Throwable e);
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IExceptionHandler.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IExceptionHandler.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IExceptionHandler.java
deleted file mode 100644
index db2c890..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IExceptionHandler.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import java.nio.ByteBuffer;
-
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-/**
- * Handles an exception encountered during processing of a data frame.
- * In the case when the exception is of type {@code FrameDataException}, the causing
- * tuple is logged and a new frame with tuple after the exception-generating tuple
- * is returned. This funcitonality is used during feed ingestion to bypass an exception
- * generating tuple and thus avoid the data flow from terminating
- */
-public interface IExceptionHandler {
-
-    /**
-     * @param e
-     *            the exception that needs to be handled
-     * @param frame
-     *            the frame that was being processed when exception occurred
-     * @return returns a new frame with tuples after the exception generating tuple
-     * @throws HyracksDataException
-     */
-    public ByteBuffer handleException(Exception e, ByteBuffer frame);
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedConnectionManager.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedConnectionManager.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedConnectionManager.java
deleted file mode 100644
index 014a868..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedConnectionManager.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedRuntime;
-import org.apache.asterix.common.feeds.FeedRuntimeId;
-import org.apache.asterix.common.feeds.FeedRuntimeManager;
-
-/**
- * Handle (de)registration of feeds for delivery of control messages.
- */
-public interface IFeedConnectionManager {
-
-    /**
-     * Allows registration of a feedRuntime.
-     * 
-     * @param feedRuntime
-     * @throws Exception
-     */
-    public void registerFeedRuntime(FeedConnectionId connectionId, FeedRuntime feedRuntime) throws Exception;
-
-    /**
-     * Obtain feed runtime corresponding to a feedRuntimeId
-     * 
-     * @param feedRuntimeId
-     * @return
-     */
-    public FeedRuntime getFeedRuntime(FeedConnectionId connectionId, FeedRuntimeId feedRuntimeId);
-
-    /**
-     * De-register a feed
-     * 
-     * @param feedConnection
-     * @throws IOException
-     */
-    void deregisterFeed(FeedConnectionId feedConnection);
-
-    /**
-     * Obtain the feed runtime manager associated with a feed.
-     * 
-     * @param feedConnection
-     * @return
-     */
-    public FeedRuntimeManager getFeedRuntimeManager(FeedConnectionId feedConnection);
-
-    /**
-     * Allows de-registration of a feed runtime.
-     * 
-     * @param feedRuntimeId
-     */
-    void deRegisterFeedRuntime(FeedConnectionId connectionId, FeedRuntimeId feedRuntimeId);
-
-    public List<FeedRuntimeId> getRegisteredRuntimes();
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedFrameHandler.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedFrameHandler.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedFrameHandler.java
deleted file mode 100644
index 3a95e51..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/api/IFeedFrameHandler.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds.api;
-
-import java.nio.ByteBuffer;
-import java.util.Iterator;
-
-import org.apache.asterix.common.feeds.DataBucket;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-public interface IFeedFrameHandler {
-
-    public void handleFrame(ByteBuffer frame) throws HyracksDataException;
-
-    public void handleDataBucket(DataBucket bucket);
-
-    public void close();
-
-    public Iterator<ByteBuffer> replayData() throws HyracksDataException;
-
-    public String getSummary();
-
-}


[11/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feeds/FeedPolicyEnforcer.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feeds/FeedPolicyEnforcer.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feeds/FeedPolicyEnforcer.java
deleted file mode 100644
index ae5c050..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/feeds/FeedPolicyEnforcer.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.feeds;
-
-import java.rmi.RemoteException;
-import java.util.Map;
-
-import org.apache.asterix.common.exceptions.ACIDException;
-import org.apache.asterix.common.feeds.FeedConnectionId;
-import org.apache.asterix.common.feeds.FeedPolicyAccessor;
-
-public class FeedPolicyEnforcer {
-
-    private final FeedConnectionId connectionId;
-    private final FeedPolicyAccessor policyAccessor;
-
-    public FeedPolicyEnforcer(FeedConnectionId feedConnectionId, Map<String, String> feedPolicy) {
-        this.connectionId = feedConnectionId;
-        this.policyAccessor = new FeedPolicyAccessor(feedPolicy);
-    }
-
-    public boolean continueIngestionPostSoftwareFailure(Exception e) throws RemoteException, ACIDException {
-        return policyAccessor.continueOnSoftFailure();
-    }
-
-    public FeedPolicyAccessor getFeedPolicyAccessor() {
-        return policyAccessor;
-    }
-
-    public FeedConnectionId getFeedId() {
-        return connectionId;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/AbstractStreamRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/AbstractStreamRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/AbstractStreamRecordReader.java
index 3b59b98..93ba0a0 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/AbstractStreamRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/AbstractStreamRecordReader.java
@@ -37,6 +37,7 @@ public abstract class AbstractStreamRecordReader implements IRecordReader<char[]
     protected int bufferLength = 0;
     protected int bufferPosn = 0;
     protected IExternalIndexer indexer;
+    protected boolean done = false;
 
     @Override
     public IRawRecord<char[]> next() throws IOException {
@@ -45,7 +46,10 @@ public abstract class AbstractStreamRecordReader implements IRecordReader<char[]
 
     @Override
     public void close() throws IOException {
-        reader.close();
+        if (!done) {
+            reader.close();
+        }
+        done = true;
     }
 
     public void setInputStream(AInputStream inputStream) throws IOException {
@@ -72,4 +76,15 @@ public abstract class AbstractStreamRecordReader implements IRecordReader<char[]
     public void setIndexer(IExternalIndexer indexer) {
         this.indexer = indexer;
     }
+
+    @Override
+    public boolean stop() {
+        try {
+            reader.stop();
+            return true;
+        } catch (Exception e) {
+            e.printStackTrace();
+            return false;
+        }
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/LineRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/LineRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/LineRecordReader.java
index 9b11df6..2b33d7a 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/LineRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/LineRecordReader.java
@@ -32,6 +32,9 @@ public class LineRecordReader extends AbstractStreamRecordReader {
 
     @Override
     public boolean hasNext() throws IOException {
+        if (done) {
+            return false;
+        }
         /* We're reading data from in, but the head of the stream may be
          * already buffered in buffer, so we have several cases:
          * 1. No newline characters are in the buffer, so we need to copy
@@ -63,7 +66,7 @@ public class LineRecordReader extends AbstractStreamRecordReader {
                         recordNumber++;
                         return true;
                     }
-                    reader.close();
+                    close();
                     return false; //EOF
                 }
             }
@@ -92,11 +95,6 @@ public class LineRecordReader extends AbstractStreamRecordReader {
     }
 
     @Override
-    public boolean stop() {
-        return false;
-    }
-
-    @Override
     public void configure(Map<String, String> configuration) throws Exception {
         super.configure(configuration);
         if (ExternalDataUtils.hasHeader(configuration)) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/QuotedLineRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/QuotedLineRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/QuotedLineRecordReader.java
index 668876e..49e67e9 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/QuotedLineRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/QuotedLineRecordReader.java
@@ -44,6 +44,9 @@ public class QuotedLineRecordReader extends LineRecordReader {
 
     @Override
     public boolean hasNext() throws IOException {
+        if (done) {
+            return false;
+        }
         newlineLength = 0;
         prevCharCR = false;
         prevCharEscape = false;
@@ -65,6 +68,7 @@ public class QuotedLineRecordReader extends LineRecordReader {
                             recordNumber++;
                             return true;
                         }
+                        close();
                         return false;
                     }
                 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/SemiStructuredRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/SemiStructuredRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/SemiStructuredRecordReader.java
index 9864805..84c96d0 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/SemiStructuredRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/SemiStructuredRecordReader.java
@@ -67,6 +67,9 @@ public class SemiStructuredRecordReader extends AbstractStreamRecordReader {
 
     @Override
     public boolean hasNext() throws Exception {
+        if (done) {
+            return false;
+        }
         record.reset();
         boolean hasStarted = false;
         boolean hasFinished = false;
@@ -79,6 +82,7 @@ public class SemiStructuredRecordReader extends AbstractStreamRecordReader {
                 startPosn = bufferPosn = 0;
                 bufferLength = reader.read(inputBuffer);
                 if (bufferLength <= 0) {
+                    close();
                     return false; // EOF
                 }
             }
@@ -142,6 +146,12 @@ public class SemiStructuredRecordReader extends AbstractStreamRecordReader {
 
     @Override
     public boolean stop() {
-        return false;
+        try {
+            reader.stop();
+        } catch (Exception e) {
+            e.printStackTrace();
+            return false;
+        }
+        return true;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/TwitterPushRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/TwitterPushRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/TwitterPushRecordReader.java
index e7c141d..3ce6a81 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/TwitterPushRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/TwitterPushRecordReader.java
@@ -38,11 +38,16 @@ public class TwitterPushRecordReader implements IRecordReader<Status> {
     private LinkedBlockingQueue<Status> inputQ;
     private TwitterStream twitterStream;
     private GenericRecord<Status> record;
+    private boolean closed = false;
 
     @Override
     public void close() throws IOException {
-        twitterStream.clearListeners();
-        twitterStream.cleanUp();
+        if (!closed) {
+            twitterStream.clearListeners();
+            twitterStream.cleanUp();
+            twitterStream = null;
+            closed = true;
+        }
     }
 
     @Override
@@ -61,7 +66,7 @@ public class TwitterPushRecordReader implements IRecordReader<Status> {
 
     @Override
     public boolean hasNext() throws Exception {
-        return true;
+        return !closed;
     }
 
     @Override
@@ -81,7 +86,12 @@ public class TwitterPushRecordReader implements IRecordReader<Status> {
 
     @Override
     public boolean stop() {
-        return false;
+        try {
+            close();
+        } catch (Exception e) {
+            return false;
+        }
+        return true;
     }
 
     private class TweetListener implements StatusListener {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/factory/TwitterRecordReaderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/factory/TwitterRecordReaderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/factory/TwitterRecordReaderFactory.java
index 72aaa37..6840c11 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/factory/TwitterRecordReaderFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/factory/TwitterRecordReaderFactory.java
@@ -97,7 +97,7 @@ public class TwitterRecordReaderFactory implements IRecordReaderFactory<Status>
             pull = false;
         } else {
             throw new AsterixException("One of boolean parameters " + ExternalDataConstants.KEY_PULL + " and "
-                    + ExternalDataConstants.KEY_PUSH + "must be specified as part of adaptor configuration");
+                    + ExternalDataConstants.KEY_PUSH + " must be specified as part of adaptor configuration");
         }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStreamReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStreamReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStreamReader.java
index e573f74..7ba6032 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStreamReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStreamReader.java
@@ -18,6 +18,7 @@
  */
 package org.apache.asterix.external.input.stream;
 
+import java.io.IOException;
 import java.io.InputStreamReader;
 
 public class AInputStreamReader extends InputStreamReader {
@@ -31,4 +32,12 @@ public class AInputStreamReader extends InputStreamReader {
     public boolean skipError() throws Exception {
         return in.skipError();
     }
+
+    public void stop() throws IOException {
+        try {
+            in.stop();
+        } catch (Exception e) {
+            throw new IOException(e);
+        }
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/HDFSInputStreamProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/HDFSInputStreamProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/HDFSInputStreamProvider.java
index b3ad1c3..8f4c094 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/HDFSInputStreamProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/HDFSInputStreamProvider.java
@@ -63,7 +63,7 @@ public class HDFSInputStreamProvider<K> extends HDFSRecordReader<K, Text> implem
                 }
             } else if (value.getLength() == pos) {
                 pos++;
-                return ExternalDataConstants.EOL;
+                return ExternalDataConstants.BYTE_LF;
             }
             return value.getBytes()[pos++];
         }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFSInputStreamProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFSInputStreamProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFSInputStreamProvider.java
index b511617..22d0a87 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFSInputStreamProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFSInputStreamProvider.java
@@ -19,38 +19,44 @@
 package org.apache.asterix.external.input.stream;
 
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
 import java.io.IOException;
-import java.io.InputStream;
+import java.nio.file.Path;
 import java.util.Map;
 
 import org.apache.asterix.external.api.IInputStreamProvider;
+import org.apache.asterix.external.util.FeedLogManager;
+import org.apache.asterix.external.util.FeedUtils;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.dataflow.std.file.FileSplit;
 
 public class LocalFSInputStreamProvider implements IInputStreamProvider {
 
-    private FileSplit[] fileSplits;
-    private int partition;
+    private String expression;
+    private boolean isFeed;
+    private Path path;
+    private File feedLogFile;
 
     public LocalFSInputStreamProvider(FileSplit[] fileSplits, IHyracksTaskContext ctx,
-            Map<String, String> configuration, int partition) {
-        this.partition = partition;
-        this.fileSplits = fileSplits;
+            Map<String, String> configuration, int partition, String expression, boolean isFeed,
+            FileSplit[] feedLogFileSplits) {
+        this.expression = expression;
+        this.isFeed = isFeed;
+        this.path = fileSplits[partition].getLocalFile().getFile().toPath();
+        if (feedLogFileSplits != null) {
+            this.feedLogFile = FeedUtils
+                    .getAbsoluteFileRef(feedLogFileSplits[partition].getLocalFile().getFile().getPath(),
+                            feedLogFileSplits[partition].getIODeviceId(), ctx.getIOManager())
+                    .getFile();
+
+        }
     }
 
     @Override
-    public AInputStream getInputStream() throws Exception {
-        FileSplit split = fileSplits[partition];
-        File inputFile = split.getLocalFile().getFile();
-        InputStream in;
-        try {
-            in = new FileInputStream(inputFile);
-            return new BasicInputStream(in);
-        } catch (FileNotFoundException e) {
-            throw new IOException(e);
+    public AInputStream getInputStream() throws IOException {
+        FeedLogManager feedLogManager = null;
+        if (isFeed && feedLogFile != null) {
+            feedLogManager = new FeedLogManager(feedLogFile);
         }
+        return new LocalFileSystemInputStream(path, expression, feedLogManager, isFeed);
     }
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFileSystemInputStream.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFileSystemInputStream.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFileSystemInputStream.java
new file mode 100644
index 0000000..7eebe4c
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFileSystemInputStream.java
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.stream;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.nio.file.Path;
+
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.external.util.FeedLogManager;
+import org.apache.asterix.external.util.FileSystemWatcher;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class LocalFileSystemInputStream extends AInputStream {
+    private final FileSystemWatcher watcher;
+    private FileInputStream in;
+    private byte lastByte;
+
+    public LocalFileSystemInputStream(Path inputResource, String expression, FeedLogManager logManager, boolean isFeed)
+            throws IOException {
+        this.watcher = new FileSystemWatcher(logManager, inputResource, expression, isFeed);
+        this.watcher.init();
+    }
+
+    @Override
+    public void close() throws IOException {
+        IOException ioe = null;
+        if (in != null) {
+            try {
+                closeFile();
+            } catch (Exception e) {
+                ioe = new IOException(e);
+            }
+        }
+        try {
+            watcher.close();
+        } catch (Exception e) {
+            if (ioe == null) {
+                throw e;
+            }
+            ioe.addSuppressed(e);
+            throw ioe;
+        }
+    }
+
+    private void closeFile() throws IOException {
+        if (in != null) {
+            try {
+                in.close();
+            } finally {
+                in = null;
+            }
+        }
+    }
+
+    /**
+     * Closes the current input stream and opens the next one, if any.
+     */
+    private boolean advance() throws IOException {
+        closeFile();
+        if (watcher.hasNext()) {
+            in = new FileInputStream(watcher.next());
+            return true;
+        }
+        return false;
+    }
+
+    @Override
+    public int read() throws IOException {
+        throw new HyracksDataException(
+                "read() is not supported with this stream. use read(byte[] b, int off, int len)");
+    }
+
+    @Override
+    public int read(byte[] b, int off, int len) throws IOException {
+        if (in == null) {
+            if (!advance()) {
+                return -1;
+            }
+        }
+        int result = in.read(b, off, len);
+        while (result < 0 && advance()) {
+            // return a new line at the end of every file <--Might create problems for some cases depending on the parser implementation-->
+            if (lastByte != ExternalDataConstants.BYTE_LF && lastByte != ExternalDataConstants.BYTE_LF) {
+                lastByte = ExternalDataConstants.BYTE_LF;
+                b[off] = ExternalDataConstants.BYTE_LF;
+                return 1;
+            }
+            // recursive call
+            result = in.read(b, off, len);
+        }
+        if (result > 0) {
+            lastByte = b[off + result - 1];
+        }
+        return result;
+    }
+
+    @Override
+    public boolean skipError() throws Exception {
+        advance();
+        return true;
+    }
+
+    @Override
+    public boolean stop() throws Exception {
+        watcher.close();
+        return true;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/TwitterFirehoseInputStreamProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/TwitterFirehoseInputStreamProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/TwitterFirehoseInputStreamProvider.java
index d32a94f..7c64aa3 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/TwitterFirehoseInputStreamProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/TwitterFirehoseInputStreamProvider.java
@@ -30,7 +30,7 @@ import java.util.logging.Level;
 import java.util.logging.Logger;
 
 import org.apache.asterix.external.api.IInputStreamProvider;
-import org.apache.asterix.external.runtime.TweetGenerator;
+import org.apache.asterix.external.util.TweetGenerator;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 
 public class TwitterFirehoseInputStreamProvider implements IInputStreamProvider {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamProviderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamProviderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamProviderFactory.java
index 14c712a..ab1f8a0 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamProviderFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamProviderFactory.java
@@ -29,8 +29,10 @@ import org.apache.asterix.external.api.IInputStreamProviderFactory;
 import org.apache.asterix.external.api.INodeResolver;
 import org.apache.asterix.external.api.INodeResolverFactory;
 import org.apache.asterix.external.input.stream.LocalFSInputStreamProvider;
-import org.apache.asterix.external.util.DNSResolverFactory;
 import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.external.util.ExternalDataUtils;
+import org.apache.asterix.external.util.FeedUtils;
+import org.apache.asterix.external.util.NodeResolverFactory;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -41,15 +43,21 @@ public class LocalFSInputStreamProviderFactory implements IInputStreamProviderFa
 
     private static final long serialVersionUID = 1L;
 
-    protected static final INodeResolver DEFAULT_NODE_RESOLVER = new DNSResolverFactory().createNodeResolver();
+    protected static final INodeResolver DEFAULT_NODE_RESOLVER = new NodeResolverFactory().createNodeResolver();
     protected static final Logger LOGGER = Logger.getLogger(LocalFSInputStreamProviderFactory.class.getName());
     protected static INodeResolver nodeResolver;
     protected Map<String, String> configuration;
-    protected FileSplit[] fileSplits;
+    protected FileSplit[] inputFileSplits;
+    protected FileSplit[] feedLogFileSplits; // paths where instances of this feed can use as log storage
+    protected boolean isFeed;
+    protected String expression;
+    // transient fields (They don't need to be serialized and transferred)
+    private transient AlgebricksAbsolutePartitionConstraint constraints;
 
     @Override
     public IInputStreamProvider createInputStreamProvider(IHyracksTaskContext ctx, int partition) throws Exception {
-        return new LocalFSInputStreamProvider(fileSplits, ctx, configuration, partition);
+        return new LocalFSInputStreamProvider(inputFileSplits, ctx, configuration, partition, expression, isFeed,
+                feedLogFileSplits);
     }
 
     @Override
@@ -67,16 +75,23 @@ public class LocalFSInputStreamProviderFactory implements IInputStreamProviderFa
         this.configuration = configuration;
         String[] splits = configuration.get(ExternalDataConstants.KEY_PATH).split(",");
         configureFileSplits(splits);
+        configurePartitionConstraint();
+        this.isFeed = ExternalDataUtils.isFeed(configuration) && ExternalDataUtils.keepDataSourceOpen(configuration);
+        if (isFeed) {
+            feedLogFileSplits = FeedUtils.splitsForAdapter(ExternalDataUtils.getDataverse(configuration),
+                    ExternalDataUtils.getFeedName(configuration), constraints);
+        }
+        this.expression = configuration.get(ExternalDataConstants.KEY_EXPRESSION);
     }
 
     @Override
     public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
-        return configurePartitionConstraint();
+        return constraints;
     }
 
     private void configureFileSplits(String[] splits) throws AsterixException {
-        if (fileSplits == null) {
-            fileSplits = new FileSplit[splits.length];
+        if (inputFileSplits == null) {
+            inputFileSplits = new FileSplit[splits.length];
             String nodeName;
             String nodeLocalPath;
             int count = 0;
@@ -90,19 +105,19 @@ public class LocalFSInputStreamProviderFactory implements IInputStreamProviderFa
                 nodeName = trimmedValue.split(":")[0];
                 nodeLocalPath = trimmedValue.split("://")[1];
                 FileSplit fileSplit = new FileSplit(nodeName, new FileReference(new File(nodeLocalPath)));
-                fileSplits[count++] = fileSplit;
+                inputFileSplits[count++] = fileSplit;
             }
         }
     }
 
-    private AlgebricksPartitionConstraint configurePartitionConstraint() throws AsterixException {
-        String[] locs = new String[fileSplits.length];
+    private void configurePartitionConstraint() throws AsterixException {
+        String[] locs = new String[inputFileSplits.length];
         String location;
-        for (int i = 0; i < fileSplits.length; i++) {
-            location = getNodeResolver().resolveNode(fileSplits[i].getNodeName());
+        for (int i = 0; i < inputFileSplits.length; i++) {
+            location = getNodeResolver().resolveNode(inputFileSplits[i].getNodeName());
             locs[i] = location;
         }
-        return new AlgebricksAbsolutePartitionConstraint(locs);
+        constraints = new AlgebricksAbsolutePartitionConstraint(locs);
     }
 
     protected INodeResolver getNodeResolver() {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java b/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java
index e9c15cb..fd3d9e3 100755
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java
@@ -75,7 +75,7 @@ public abstract class ExternalFunction implements IExternalFunction {
         }
     }
 
-    public static ISerializerDeserializer getSerDe(Object typeInfo) {
+    public static ISerializerDeserializer<?> getSerDe(Object typeInfo) {
         return AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(typeInfo);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/library/ResultCollector.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/library/ResultCollector.java b/asterix-external-data/src/main/java/org/apache/asterix/external/library/ResultCollector.java
index 192bd4e..b6795f6 100755
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/library/ResultCollector.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/library/ResultCollector.java
@@ -117,6 +117,7 @@ public class ResultCollector implements IResultCollector {
         return reusableResultObjectHolder;
     }
 
+    @SuppressWarnings("unchecked")
     private void serializeResult(IAObject object) throws AsterixException {
         try {
             AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(finfo.getReturnType())

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectUtil.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectUtil.java b/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectUtil.java
index 93b4bf1..e7c1ec1 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectUtil.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectUtil.java
@@ -23,7 +23,6 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
 import org.apache.asterix.dataflow.data.nontagged.serde.AStringSerializerDeserializer;
 import org.apache.asterix.dataflow.data.nontagged.serde.SerializerDeserializerUtil;
 import org.apache.asterix.external.api.IJObject;
@@ -65,7 +64,6 @@ public class JObjectUtil {
     /**
      * Normalize an input string by removing linebreaks, and replace them with space
      * Also remove non-readable special characters
-     *
      * @param originalString
      *            The input String
      * @return
@@ -314,54 +312,38 @@ public class JObjectUtil {
                 int numberOfSchemaFields = recordType.getFieldTypes().length;
                 byte[] recordBits = dis.getInputStream().getArray();
                 boolean isExpanded = false;
-                int s = dis.getInputStream().getPosition();
-                int recordOffset = s;
-                int openPartOffset = 0;
-                int offsetArrayOffset = 0;
+                dis.getInputStream();
                 int[] fieldOffsets = new int[numberOfSchemaFields];
                 IJObject[] closedFields = new IJObject[numberOfSchemaFields];
 
-                if (recordType == null) {
-                    openPartOffset = s + AInt32SerializerDeserializer.getInt(recordBits, s + 6);
-                    s += 8;
-                    isExpanded = true;
-                } else {
-                    dis.skip(4); // reading length is not required.
-                    if (recordType.isOpen()) {
-                        isExpanded = dis.readBoolean();
-                        if (isExpanded) {
-                            openPartOffset = s + dis.readInt(); // AInt32SerializerDeserializer.getInt(recordBits, s + 6);
-                        } else {
-                            // do nothing s += 6;
-                        }
+                dis.skip(4); // reading length is not required.
+                if (recordType.isOpen()) {
+                    isExpanded = dis.readBoolean();
+                    if (isExpanded) {
+                        dis.readInt();
                     } else {
-                        // do nothing s += 5;
                     }
+                } else {
                 }
 
                 if (numberOfSchemaFields > 0) {
-                    int numOfSchemaFields = dis.readInt(); //s += 4;
+                    dis.readInt();
                     int nullBitMapOffset = 0;
                     boolean hasNullableFields = NonTaggedFormatUtil.hasNullableField(recordType);
                     if (hasNullableFields) {
-                        nullBitMapOffset = dis.getInputStream().getPosition();//s
-                        offsetArrayOffset = dis.getInputStream().getPosition() //s
-                                + (numberOfSchemaFields % 8 == 0 ? numberOfSchemaFields / 8
-                                        : numberOfSchemaFields / 8 + 1);
+                        nullBitMapOffset = dis.getInputStream().getPosition();
+                        dis.getInputStream();
                     } else {
-                        offsetArrayOffset = dis.getInputStream().getPosition();
+                        dis.getInputStream();
                     }
                     for (int i = 0; i < numberOfSchemaFields; i++) {
-                        fieldOffsets[i] = dis.readInt(); // AInt32SerializerDeserializer.getInt(recordBits, offsetArrayOffset) + recordOffset;
-                        // offsetArrayOffset += 4;
+                        fieldOffsets[i] = dis.readInt();
                     }
                     for (int fieldNumber = 0; fieldNumber < numberOfSchemaFields; fieldNumber++) {
                         if (hasNullableFields) {
                             byte b1 = recordBits[nullBitMapOffset + fieldNumber / 8];
                             int p = 1 << (7 - (fieldNumber % 8));
                             if ((b1 & p) == 0) {
-                                // set null value (including type tag inside)
-                                //fieldValues.add(nullReference);
                                 continue;
                             }
                         }
@@ -373,8 +355,6 @@ public class JObjectUtil {
                             if (((AUnionType) fieldTypes[fieldNumber]).isNullableType()) {
                                 fieldType = ((AUnionType) fieldTypes[fieldNumber]).getNullableType();
                                 fieldValueTypeTag = fieldType.getTypeTag();
-                                //                      fieldValueLength = NonTaggedFormatUtil.getFieldValueLength(recordBits,
-                                //                              fieldOffsets[fieldNumber], typeTag, false);
                             }
                         } else {
                             fieldValueTypeTag = fieldTypes[fieldNumber].getTypeTag();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDataScanOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDataScanOperatorDescriptor.java b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDataScanOperatorDescriptor.java
new file mode 100644
index 0000000..aed8bb9
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDataScanOperatorDescriptor.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.operators;
+
+import org.apache.asterix.external.api.IAdapterFactory;
+import org.apache.asterix.external.api.IDataSourceAdapter;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.IOperatorNodePushable;
+import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.job.JobSpecification;
+import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import org.apache.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+
+/*
+ * A single activity operator that provides the functionality of scanning data using an
+ * instance of the configured adapter.
+ */
+public class ExternalDataScanOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    private IAdapterFactory adapterFactory;
+
+    public ExternalDataScanOperatorDescriptor(JobSpecification spec, RecordDescriptor rDesc,
+            IAdapterFactory dataSourceAdapterFactory) {
+        super(spec, 0, 1);
+        recordDescriptors[0] = rDesc;
+        this.adapterFactory = dataSourceAdapterFactory;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions)
+                    throws HyracksDataException {
+
+        return new AbstractUnaryOutputSourceOperatorNodePushable() {
+
+            @Override
+            public void initialize() throws HyracksDataException {
+                IDataSourceAdapter adapter = null;
+                try {
+                    writer.open();
+                    adapter = adapterFactory.createAdapter(ctx, partition);
+                    adapter.start(partition, writer);
+                } catch (Throwable th) {
+                    writer.fail();
+                    throw new HyracksDataException(th);
+                } finally {
+                    writer.close();
+                }
+            }
+        };
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesRecoverOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesRecoverOperatorDescriptor.java b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesRecoverOperatorDescriptor.java
index 59ad076..82ca715 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesRecoverOperatorDescriptor.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesRecoverOperatorDescriptor.java
@@ -18,7 +18,6 @@
  */
 package org.apache.asterix.external.operators;
 
-import java.io.File;
 import java.util.List;
 
 import org.apache.hyracks.api.context.IHyracksTaskContext;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedCollectOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedCollectOperatorDescriptor.java b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedCollectOperatorDescriptor.java
new file mode 100644
index 0000000..a929eec
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedCollectOperatorDescriptor.java
@@ -0,0 +1,171 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.operators;
+
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
+import org.apache.asterix.external.feed.api.IFeedManager;
+import org.apache.asterix.external.feed.api.IFeedSubscriptionManager;
+import org.apache.asterix.external.feed.api.ISubscribableRuntime;
+import org.apache.asterix.external.feed.api.IFeedLifecycleListener.ConnectionLocation;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.runtime.IngestionRuntime;
+import org.apache.asterix.external.feed.runtime.SubscribableFeedRuntimeId;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.IAType;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.IOperatorNodePushable;
+import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.job.JobSpecification;
+import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+
+/**
+ * FeedCollectOperatorDescriptor is responsible for ingesting data from an external source. This
+ * operator uses a user specified for a built-in adaptor for retrieving data from the external
+ * data source.
+ */
+public class FeedCollectOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+    private static final Logger LOGGER = Logger.getLogger(FeedCollectOperatorDescriptor.class.getName());
+
+    /** The type associated with the ADM data output from the feed adaptor */
+    private final IAType outputType;
+
+    /** unique identifier for a feed instance. */
+    private final FeedConnectionId connectionId;
+
+    /** Map representation of policy parameters */
+    private final Map<String, String> feedPolicyProperties;
+
+    /** The (singleton) instance of {@code IFeedIngestionManager} **/
+    private IFeedSubscriptionManager subscriptionManager;
+
+    /** The source feed from which the feed derives its data from. **/
+    private final FeedId sourceFeedId;
+
+    /** The subscription location at which the recipient feed receives tuples from the source feed **/
+    private final ConnectionLocation subscriptionLocation;
+
+    public FeedCollectOperatorDescriptor(JobSpecification spec, FeedConnectionId feedConnectionId, FeedId sourceFeedId,
+            ARecordType atype, RecordDescriptor rDesc, Map<String, String> feedPolicyProperties,
+            ConnectionLocation subscriptionLocation) {
+        super(spec, 0, 1);
+        recordDescriptors[0] = rDesc;
+        this.outputType = atype;
+        this.connectionId = feedConnectionId;
+        this.feedPolicyProperties = feedPolicyProperties;
+        this.sourceFeedId = sourceFeedId;
+        this.subscriptionLocation = subscriptionLocation;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
+                    throws HyracksDataException {
+        IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
+                .getApplicationContext().getApplicationObject();
+        this.subscriptionManager = ((IFeedManager) runtimeCtx.getFeedManager()).getFeedSubscriptionManager();
+        ISubscribableRuntime sourceRuntime = null;
+        IOperatorNodePushable nodePushable = null;
+        switch (subscriptionLocation) {
+            case SOURCE_FEED_INTAKE_STAGE:
+                try {
+                    SubscribableFeedRuntimeId feedSubscribableRuntimeId = new SubscribableFeedRuntimeId(sourceFeedId,
+                            FeedRuntimeType.INTAKE, partition);
+                    sourceRuntime = getIntakeRuntime(feedSubscribableRuntimeId);
+                    if (sourceRuntime == null) {
+                        throw new HyracksDataException(
+                                "Source intake task not found for source feed id " + sourceFeedId);
+                    }
+                    nodePushable = new FeedCollectOperatorNodePushable(ctx, sourceFeedId, connectionId,
+                            feedPolicyProperties, partition, nPartitions, sourceRuntime);
+
+                } catch (Exception exception) {
+                    if (LOGGER.isLoggable(Level.SEVERE)) {
+                        LOGGER.severe("Initialization of the feed adaptor failed with exception " + exception);
+                    }
+                    throw new HyracksDataException("Initialization of the feed adapter failed", exception);
+                }
+                break;
+            case SOURCE_FEED_COMPUTE_STAGE:
+                SubscribableFeedRuntimeId feedSubscribableRuntimeId = new SubscribableFeedRuntimeId(sourceFeedId,
+                        FeedRuntimeType.COMPUTE, partition);
+                sourceRuntime = subscriptionManager.getSubscribableRuntime(feedSubscribableRuntimeId);
+                if (sourceRuntime == null) {
+                    throw new HyracksDataException("Source compute task not found for source feed id " + sourceFeedId
+                            + " " + FeedRuntimeType.COMPUTE + "[" + partition + "]");
+                }
+                nodePushable = new FeedCollectOperatorNodePushable(ctx, sourceFeedId, connectionId,
+                        feedPolicyProperties, partition, nPartitions, sourceRuntime);
+                break;
+        }
+        return nodePushable;
+    }
+
+    public FeedConnectionId getFeedConnectionId() {
+        return connectionId;
+    }
+
+    public Map<String, String> getFeedPolicyProperties() {
+        return feedPolicyProperties;
+    }
+
+    public IAType getOutputType() {
+        return outputType;
+    }
+
+    public RecordDescriptor getRecordDescriptor() {
+        return recordDescriptors[0];
+    }
+
+    public FeedId getSourceFeedId() {
+        return sourceFeedId;
+    }
+
+    private IngestionRuntime getIntakeRuntime(SubscribableFeedRuntimeId subscribableRuntimeId) {
+        int waitCycleCount = 0;
+        ISubscribableRuntime ingestionRuntime = subscriptionManager.getSubscribableRuntime(subscribableRuntimeId);
+        while (ingestionRuntime == null && waitCycleCount < 10) {
+            try {
+                Thread.sleep(2000);
+                waitCycleCount++;
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("waiting to obtain ingestion runtime for subscription " + subscribableRuntimeId);
+                }
+            } catch (InterruptedException e) {
+                e.printStackTrace();
+                break;
+            }
+            ingestionRuntime = subscriptionManager.getSubscribableRuntime(subscribableRuntimeId);
+        }
+        return (IngestionRuntime) ingestionRuntime;
+    }
+
+    public ConnectionLocation getSubscriptionLocation() {
+        return subscriptionLocation;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedCollectOperatorNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedCollectOperatorNodePushable.java b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedCollectOperatorNodePushable.java
new file mode 100644
index 0000000..8916af6
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedCollectOperatorNodePushable.java
@@ -0,0 +1,207 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.operators;
+
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
+import org.apache.asterix.external.feed.api.IFeedManager;
+import org.apache.asterix.external.feed.api.IFeedOperatorOutputSideHandler;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.api.IFeedRuntime.Mode;
+import org.apache.asterix.external.feed.api.ISubscribableRuntime;
+import org.apache.asterix.external.feed.dataflow.CollectTransformFeedFrameWriter;
+import org.apache.asterix.external.feed.dataflow.FeedCollectRuntimeInputHandler;
+import org.apache.asterix.external.feed.dataflow.FeedFrameCollector.State;
+import org.apache.asterix.external.feed.dataflow.FeedRuntimeInputHandler;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.external.feed.runtime.CollectionRuntime;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+import org.apache.asterix.external.feed.runtime.SubscribableFeedRuntimeId;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import org.apache.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+
+/**
+ * The runtime for @see{FeedIntakeOperationDescriptor}
+ */
+public class FeedCollectOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
+
+    private static Logger LOGGER = Logger.getLogger(FeedCollectOperatorNodePushable.class.getName());
+
+    private final int partition;
+    private final FeedConnectionId connectionId;
+    private final Map<String, String> feedPolicy;
+    private final FeedPolicyAccessor policyAccessor;
+    private final IFeedManager feedManager;
+    private final ISubscribableRuntime sourceRuntime;
+    private final IHyracksTaskContext ctx;
+    private final int nPartitions;
+
+    private RecordDescriptor outputRecordDescriptor;
+    private FeedRuntimeInputHandler inputSideHandler;
+    private CollectionRuntime collectRuntime;
+
+    public FeedCollectOperatorNodePushable(IHyracksTaskContext ctx, FeedId sourceFeedId,
+            FeedConnectionId feedConnectionId, Map<String, String> feedPolicy, int partition, int nPartitions,
+            ISubscribableRuntime sourceRuntime) {
+        this.ctx = ctx;
+        this.partition = partition;
+        this.nPartitions = nPartitions;
+        this.connectionId = feedConnectionId;
+        this.sourceRuntime = sourceRuntime;
+        this.feedPolicy = feedPolicy;
+        policyAccessor = new FeedPolicyAccessor(feedPolicy);
+        IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
+                .getApplicationContext().getApplicationObject();
+        this.feedManager = (IFeedManager) runtimeCtx.getFeedManager();
+    }
+
+    @Override
+    public void initialize() throws HyracksDataException {
+        try {
+            outputRecordDescriptor = recordDesc;
+            FeedRuntimeType sourceRuntimeType = ((SubscribableFeedRuntimeId) sourceRuntime.getRuntimeId())
+                    .getFeedRuntimeType();
+            switch (sourceRuntimeType) {
+                case INTAKE:
+                    handleCompleteConnection();
+                    break;
+                case COMPUTE:
+                    handlePartialConnection();
+                    break;
+                default:
+                    throw new IllegalStateException("Invalid source type " + sourceRuntimeType);
+            }
+
+            State state = collectRuntime.waitTillCollectionOver();
+            if (state.equals(State.FINISHED)) {
+                feedManager.getFeedConnectionManager().deRegisterFeedRuntime(connectionId,
+                        collectRuntime.getRuntimeId());
+                writer.close();
+                inputSideHandler.close();
+            } else if (state.equals(State.HANDOVER)) {
+                inputSideHandler.setMode(Mode.STALL);
+                writer.close();
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Ending Collect Operator, the input side handler is now in " + Mode.STALL
+                            + " and the output writer " + writer + " has been closed ");
+                }
+            }
+        } catch (InterruptedException ie) {
+            handleInterruptedException(ie);
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new HyracksDataException(e);
+        }
+    }
+
+    private void handleCompleteConnection() throws Exception {
+        FeedRuntimeId runtimeId = new FeedRuntimeId(FeedRuntimeType.COLLECT, partition,
+                FeedRuntimeId.DEFAULT_OPERAND_ID);
+        collectRuntime = (CollectionRuntime) feedManager.getFeedConnectionManager().getFeedRuntime(connectionId,
+                runtimeId);
+        if (collectRuntime == null) {
+            beginNewFeed(runtimeId);
+        } else {
+            reviveOldFeed();
+        }
+    }
+
+    private void beginNewFeed(FeedRuntimeId runtimeId) throws Exception {
+        writer.open();
+        IFrameWriter outputSideWriter = writer;
+        if (((SubscribableFeedRuntimeId) sourceRuntime.getRuntimeId()).getFeedRuntimeType()
+                .equals(FeedRuntimeType.COMPUTE)) {
+            outputSideWriter = new CollectTransformFeedFrameWriter(ctx, writer, sourceRuntime, outputRecordDescriptor,
+                    connectionId);
+            this.recordDesc = sourceRuntime.getRecordDescriptor();
+        }
+
+        FrameTupleAccessor tupleAccessor = new FrameTupleAccessor(recordDesc);
+        inputSideHandler = new FeedCollectRuntimeInputHandler(ctx, connectionId, runtimeId, outputSideWriter,
+                policyAccessor, false, tupleAccessor, recordDesc, feedManager, nPartitions);
+
+        collectRuntime = new CollectionRuntime(connectionId, runtimeId, inputSideHandler, outputSideWriter,
+                sourceRuntime, feedPolicy);
+        feedManager.getFeedConnectionManager().registerFeedRuntime(connectionId, collectRuntime);
+        sourceRuntime.subscribeFeed(policyAccessor, collectRuntime);
+    }
+
+    private void reviveOldFeed() throws HyracksDataException {
+        writer.open();
+        collectRuntime.getFrameCollector().setState(State.ACTIVE);
+        inputSideHandler = collectRuntime.getInputHandler();
+
+        IFrameWriter innerWriter = inputSideHandler.getCoreOperator();
+        if (innerWriter instanceof CollectTransformFeedFrameWriter) {
+            ((CollectTransformFeedFrameWriter) innerWriter).reset(this.writer);
+        } else {
+            inputSideHandler.setCoreOperator(writer);
+        }
+
+        inputSideHandler.setMode(Mode.PROCESS);
+    }
+
+    private void handlePartialConnection() throws Exception {
+        FeedRuntimeId runtimeId = new FeedRuntimeId(FeedRuntimeType.COMPUTE_COLLECT, partition,
+                FeedRuntimeId.DEFAULT_OPERAND_ID);
+        writer.open();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Beginning new feed (from existing partial connection):" + connectionId);
+        }
+        IFeedOperatorOutputSideHandler wrapper = new CollectTransformFeedFrameWriter(ctx, writer, sourceRuntime,
+                outputRecordDescriptor, connectionId);
+
+        inputSideHandler = new FeedRuntimeInputHandler(ctx, connectionId, runtimeId, wrapper, policyAccessor, false,
+                new FrameTupleAccessor(recordDesc), recordDesc, feedManager, nPartitions);
+
+        collectRuntime = new CollectionRuntime(connectionId, runtimeId, inputSideHandler, wrapper, sourceRuntime,
+                feedPolicy);
+        feedManager.getFeedConnectionManager().registerFeedRuntime(connectionId, collectRuntime);
+        recordDesc = sourceRuntime.getRecordDescriptor();
+        sourceRuntime.subscribeFeed(policyAccessor, collectRuntime);
+    }
+
+    private void handleInterruptedException(InterruptedException ie) throws HyracksDataException {
+        if (policyAccessor.continueOnHardwareFailure()) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Continuing on failure as per feed policy, switching to " + Mode.STALL
+                        + " until failure is resolved");
+            }
+            inputSideHandler.setMode(Mode.STALL);
+        } else {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Failure during feed ingestion. Deregistering feed runtime " + collectRuntime
+                        + " as feed is not configured to handle failures");
+            }
+            feedManager.getFeedConnectionManager().deRegisterFeedRuntime(connectionId, collectRuntime.getRuntimeId());
+            writer.close();
+            throw new HyracksDataException(ie);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
new file mode 100644
index 0000000..a18ebcd
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
@@ -0,0 +1,138 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.operators;
+
+import java.util.Map;
+import java.util.logging.Logger;
+
+import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
+import org.apache.asterix.external.api.IAdapterFactory;
+import org.apache.asterix.external.feed.api.IFeed;
+import org.apache.asterix.external.feed.api.IFeedManager;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.api.IFeedSubscriptionManager;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.external.feed.runtime.IngestionRuntime;
+import org.apache.asterix.external.feed.runtime.SubscribableFeedRuntimeId;
+import org.apache.asterix.external.library.ExternalLibraryManager;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.IOperatorNodePushable;
+import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.job.JobSpecification;
+import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+
+/**
+ * An operator responsible for establishing connection with external data source and parsing,
+ * translating the received content.It uses an instance of feed adaptor to perform these functions.
+ */
+public class FeedIntakeOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    private static final Logger LOGGER = Logger.getLogger(FeedIntakeOperatorDescriptor.class.getName());
+
+    /** The unique identifier of the feed that is being ingested. **/
+    private final FeedId feedId;
+
+    private final FeedPolicyAccessor policyAccessor;
+
+    /** The adaptor factory that is used to create an instance of the feed adaptor **/
+    private IAdapterFactory adaptorFactory;
+
+    /** The library that contains the adapter in use. **/
+    private String adaptorLibraryName;
+
+    /**
+     * The adapter factory class that is used to create an instance of the feed adapter.
+     * This value is used only in the case of external adapters.
+     **/
+    private String adaptorFactoryClassName;
+
+    /** The configuration parameters associated with the adapter. **/
+    private Map<String, String> adaptorConfiguration;
+
+    private ARecordType adapterOutputType;
+
+    public FeedIntakeOperatorDescriptor(JobSpecification spec, IFeed primaryFeed, IAdapterFactory adapterFactory,
+            ARecordType adapterOutputType, FeedPolicyAccessor policyAccessor) {
+        super(spec, 0, 1);
+        this.feedId = new FeedId(primaryFeed.getDataverseName(), primaryFeed.getFeedName());
+        this.adaptorFactory = adapterFactory;
+        this.adapterOutputType = adapterOutputType;
+        this.policyAccessor = policyAccessor;
+    }
+
+    public FeedIntakeOperatorDescriptor(JobSpecification spec, IFeed primaryFeed, String adapterLibraryName,
+            String adapterFactoryClassName, ARecordType adapterOutputType, FeedPolicyAccessor policyAccessor) {
+        super(spec, 0, 1);
+        this.feedId = new FeedId(primaryFeed.getDataverseName(), primaryFeed.getFeedName());
+        this.adaptorFactoryClassName = adapterFactoryClassName;
+        this.adaptorLibraryName = adapterLibraryName;
+        this.adaptorConfiguration = primaryFeed.getAdapterConfiguration();
+        this.adapterOutputType = adapterOutputType;
+        this.policyAccessor = policyAccessor;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
+        IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
+                .getApplicationContext().getApplicationObject();
+        IFeedSubscriptionManager feedSubscriptionManager = ((IFeedManager) runtimeCtx.getFeedManager())
+                .getFeedSubscriptionManager();
+        SubscribableFeedRuntimeId feedIngestionId = new SubscribableFeedRuntimeId(feedId, FeedRuntimeType.INTAKE,
+                partition);
+        IngestionRuntime ingestionRuntime = (IngestionRuntime) feedSubscriptionManager
+                .getSubscribableRuntime(feedIngestionId);
+        if (adaptorFactory == null) {
+            try {
+                adaptorFactory = createExtenralAdapterFactory(ctx, partition);
+            } catch (Exception exception) {
+                throw new HyracksDataException(exception);
+            }
+
+        }
+        return new FeedIntakeOperatorNodePushable(ctx, feedId, adaptorFactory, partition, ingestionRuntime,
+                policyAccessor);
+    }
+
+    private IAdapterFactory createExtenralAdapterFactory(IHyracksTaskContext ctx, int partition) throws Exception {
+        IAdapterFactory adapterFactory = null;
+        ClassLoader classLoader = ExternalLibraryManager.getLibraryClassLoader(feedId.getDataverse(),
+                adaptorLibraryName);
+        if (classLoader != null) {
+            adapterFactory = ((IAdapterFactory) (classLoader.loadClass(adaptorFactoryClassName).newInstance()));
+            adapterFactory.configure(adaptorConfiguration, adapterOutputType);
+        } else {
+            String message = "Unable to create adapter as class loader not configured for library " + adaptorLibraryName
+                    + " in dataverse " + feedId.getDataverse();
+            LOGGER.severe(message);
+            throw new IllegalArgumentException(message);
+        }
+        return adapterFactory;
+    }
+
+    public FeedId getFeedId() {
+        return feedId;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorNodePushable.java b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorNodePushable.java
new file mode 100644
index 0000000..b31f2bf
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorNodePushable.java
@@ -0,0 +1,216 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.operators;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
+import org.apache.asterix.external.api.IAdapterFactory;
+import org.apache.asterix.external.api.IAdapterRuntimeManager;
+import org.apache.asterix.external.api.IAdapterRuntimeManager.State;
+import org.apache.asterix.external.feed.api.IFeedManager;
+import org.apache.asterix.external.feed.api.IFeedSubscriptionManager;
+import org.apache.asterix.external.feed.api.IIntakeProgressTracker;
+import org.apache.asterix.external.feed.api.ISubscriberRuntime;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.dataflow.DistributeFeedFrameWriter;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.external.feed.runtime.AdapterRuntimeManager;
+import org.apache.asterix.external.feed.runtime.CollectionRuntime;
+import org.apache.asterix.external.feed.runtime.IngestionRuntime;
+import org.apache.asterix.external.feed.runtime.SubscribableFeedRuntimeId;
+import org.apache.asterix.external.api.IFeedAdapter;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import org.apache.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+
+/**
+ * The runtime for @see{FeedIntakeOperationDescriptor}.
+ * Provides the core functionality to set up the artifacts for ingestion of a feed.
+ * The artifacts are lazily activated when a feed receives a subscription request.
+ */
+public class FeedIntakeOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
+
+    private static Logger LOGGER = Logger.getLogger(FeedIntakeOperatorNodePushable.class.getName());
+
+    private final FeedId feedId;
+    private final int partition;
+    private final IFeedSubscriptionManager feedSubscriptionManager;
+    private final IFeedManager feedManager;
+    private final IHyracksTaskContext ctx;
+    private final IAdapterFactory adapterFactory;
+
+    private IngestionRuntime ingestionRuntime;
+    private IFeedAdapter adapter;
+    private IIntakeProgressTracker tracker;
+    private DistributeFeedFrameWriter feedFrameWriter;
+
+    public FeedIntakeOperatorNodePushable(IHyracksTaskContext ctx, FeedId feedId, IAdapterFactory adapterFactory,
+            int partition, IngestionRuntime ingestionRuntime, FeedPolicyAccessor policyAccessor) {
+        this.ctx = ctx;
+        this.feedId = feedId;
+        this.partition = partition;
+        this.ingestionRuntime = ingestionRuntime;
+        this.adapterFactory = adapterFactory;
+        IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
+                .getApplicationContext().getApplicationObject();
+        this.feedManager = (IFeedManager) runtimeCtx.getFeedManager();
+        this.feedSubscriptionManager = feedManager.getFeedSubscriptionManager();
+    }
+
+    @Override
+    public void initialize() throws HyracksDataException {
+        IAdapterRuntimeManager adapterRuntimeManager = null;
+        try {
+            if (ingestionRuntime == null) {
+                try {
+                    adapter = (IFeedAdapter) adapterFactory.createAdapter(ctx, partition);
+                    //TODO: Fix record tracking
+                    //                    if (adapterFactory.isRecordTrackingEnabled()) {
+                    //                        tracker = adapterFactory.createIntakeProgressTracker();
+                    //                    }
+                } catch (Exception e) {
+                    LOGGER.severe("Unable to create adapter : " + adapterFactory.getAlias() + "[" + partition + "]"
+                            + " Exception " + e);
+                    throw new HyracksDataException(e);
+                }
+                FrameTupleAccessor fta = new FrameTupleAccessor(recordDesc);
+                feedFrameWriter = new DistributeFeedFrameWriter(ctx, feedId, writer, FeedRuntimeType.INTAKE, partition,
+                        fta, feedManager);
+                adapterRuntimeManager = new AdapterRuntimeManager(feedId, adapter, tracker, feedFrameWriter, partition);
+                SubscribableFeedRuntimeId runtimeId = new SubscribableFeedRuntimeId(feedId, FeedRuntimeType.INTAKE,
+                        partition);
+                ingestionRuntime = new IngestionRuntime(feedId, runtimeId, feedFrameWriter, recordDesc,
+                        adapterRuntimeManager);
+                feedSubscriptionManager.registerFeedSubscribableRuntime(ingestionRuntime);
+                feedFrameWriter.open();
+            } else {
+                if (ingestionRuntime.getAdapterRuntimeManager().getState().equals(State.INACTIVE_INGESTION)) {
+                    ingestionRuntime.getAdapterRuntimeManager().setState(State.ACTIVE_INGESTION);
+                    adapter = ingestionRuntime.getAdapterRuntimeManager().getFeedAdapter();
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info(" Switching to " + State.ACTIVE_INGESTION + " for ingestion runtime "
+                                + ingestionRuntime);
+                        LOGGER.info(" Adaptor " + adapter.getClass().getName() + "[" + partition + "]"
+                                + " connected to backend for feed " + feedId);
+                    }
+                    feedFrameWriter = ingestionRuntime.getFeedFrameWriter();
+                } else {
+                    String message = "Feed Ingestion Runtime for feed " + feedId
+                            + " is already registered and is active!.";
+                    LOGGER.severe(message);
+                    throw new IllegalStateException(message);
+                }
+            }
+
+            waitTillIngestionIsOver(adapterRuntimeManager);
+            feedSubscriptionManager
+                    .deregisterFeedSubscribableRuntime((SubscribableFeedRuntimeId) ingestionRuntime.getRuntimeId());
+            if (adapterRuntimeManager.getState().equals(IAdapterRuntimeManager.State.FAILED_INGESTION)) {
+                throw new HyracksDataException("Unable to ingest data");
+            }
+
+        } catch (InterruptedException ie) {
+            /*
+             * An Interrupted Exception is thrown if the Intake job cannot progress further due to failure of another node involved in the Hyracks job.
+             * As the Intake job involves only the intake operator, the exception is indicative of a failure at the sibling intake operator location.
+             * The surviving intake partitions must continue to live and receive data from the external source.
+             */
+            List<ISubscriberRuntime> subscribers = ingestionRuntime.getSubscribers();
+            FeedPolicyAccessor policyAccessor = new FeedPolicyAccessor(new HashMap<String, String>());
+            boolean needToHandleFailure = false;
+            List<ISubscriberRuntime> failingSubscribers = new ArrayList<ISubscriberRuntime>();
+            for (ISubscriberRuntime subscriber : subscribers) {
+                policyAccessor.reset(subscriber.getFeedPolicy());
+                if (!policyAccessor.continueOnHardwareFailure()) {
+                    failingSubscribers.add(subscriber);
+                } else {
+                    needToHandleFailure = true;
+                }
+            }
+
+            for (ISubscriberRuntime failingSubscriber : failingSubscribers) {
+                try {
+                    ingestionRuntime.unsubscribeFeed((CollectionRuntime) failingSubscriber);
+                } catch (Exception e) {
+                    if (LOGGER.isLoggable(Level.WARNING)) {
+                        LOGGER.warning(
+                                "Excpetion in unsubscribing " + failingSubscriber + " message " + e.getMessage());
+                    }
+                }
+            }
+
+            if (needToHandleFailure) {
+                ingestionRuntime.getAdapterRuntimeManager().setState(State.INACTIVE_INGESTION);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Switching to " + State.INACTIVE_INGESTION + " on occurrence of failure.");
+                }
+            } else {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info(
+                            "Interrupted Exception. None of the subscribers need to handle failures. Shutting down feed ingestion");
+                }
+                feedSubscriptionManager
+                        .deregisterFeedSubscribableRuntime((SubscribableFeedRuntimeId) ingestionRuntime.getRuntimeId());
+                throw new HyracksDataException(ie);
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new HyracksDataException(e);
+        } finally {
+            if (ingestionRuntime != null
+                    && !ingestionRuntime.getAdapterRuntimeManager().getState().equals(State.INACTIVE_INGESTION)) {
+                feedFrameWriter.close();
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Closed Frame Writer " + feedFrameWriter + " adapter state "
+                            + ingestionRuntime.getAdapterRuntimeManager().getState());
+                }
+            } else {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Ending intake operator node pushable in state " + State.INACTIVE_INGESTION
+                            + " Will resume after correcting failure");
+                }
+            }
+
+        }
+    }
+
+    private void waitTillIngestionIsOver(IAdapterRuntimeManager adapterRuntimeManager) throws InterruptedException {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Waiting for adaptor [" + partition + "]" + "to be done with ingestion of feed " + feedId);
+        }
+        synchronized (adapterRuntimeManager) {
+            while (!(adapterRuntimeManager.getState().equals(IAdapterRuntimeManager.State.FINISHED_INGESTION)
+                    || (adapterRuntimeManager.getState().equals(IAdapterRuntimeManager.State.FAILED_INGESTION)))) {
+                adapterRuntimeManager.wait();
+            }
+        }
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(" Adaptor " + adapter.getClass().getName() + "[" + partition + "]"
+                    + " done with ingestion of feed " + feedId);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMessageOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMessageOperatorDescriptor.java b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMessageOperatorDescriptor.java
new file mode 100644
index 0000000..219110f
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMessageOperatorDescriptor.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.operators;
+
+import org.apache.asterix.external.feed.api.IFeedMessage;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.IOperatorNodePushable;
+import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.job.JobSpecification;
+import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+
+/**
+ * Sends a control message to the registered message queue for feed specified by its feedId.
+ */
+public class FeedMessageOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+    private static final long serialVersionUID = 1L;
+
+    private final FeedConnectionId connectionId;
+    private final IFeedMessage feedMessage;
+
+    public FeedMessageOperatorDescriptor(JobSpecification spec, FeedConnectionId connectionId,
+            IFeedMessage feedMessage) {
+        super(spec, 0, 1);
+        this.connectionId = connectionId;
+        this.feedMessage = feedMessage;
+    }
+
+    @Override
+    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
+        return new FeedMessageOperatorNodePushable(ctx, connectionId, feedMessage, partition, nPartitions);
+    }
+
+}


[09/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/DataGenerator.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/DataGenerator.java b/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/DataGenerator.java
deleted file mode 100644
index cbf488c..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/DataGenerator.java
+++ /dev/null
@@ -1,1188 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.runtime;
-
-import java.nio.CharBuffer;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Random;
-
-import org.apache.asterix.external.util.Datatypes;
-
-public class DataGenerator {
-
-    private RandomDateGenerator randDateGen;
-    private RandomNameGenerator randNameGen;
-    private RandomMessageGenerator randMessageGen;
-    private RandomLocationGenerator randLocationGen;
-    private Random random = new Random();
-    private TwitterUser twUser = new TwitterUser();
-    private TweetMessage twMessage = new TweetMessage();
-    private static final String DEFAULT_COUNTRY = "US";
-
-    public DataGenerator(InitializationInfo info) {
-        initialize(info);
-    }
-
-    public class TweetMessageIterator implements Iterator<TweetMessage> {
-
-        private final int duration;
-        private long startTime = 0;
-        private int tweetId;
-
-        public TweetMessageIterator(int duration) {
-            this.duration = duration;
-            this.startTime = System.currentTimeMillis();
-        }
-
-        @Override
-        public boolean hasNext() {
-            if (duration == TweetGenerator.INFINITY) {
-                return true;
-            }
-            return System.currentTimeMillis() - startTime <= duration * 1000;
-        }
-
-        @Override
-        public TweetMessage next() {
-            tweetId++;
-            TweetMessage msg = null;
-            getTwitterUser(null);
-            Message message = randMessageGen.getNextRandomMessage();
-            Point location = randLocationGen.getRandomPoint();
-            DateTime sendTime = randDateGen.getNextRandomDatetime();
-            twMessage.reset(tweetId, twUser, location.getLatitude(), location.getLongitude(), sendTime.toString(),
-                    message, DEFAULT_COUNTRY);
-            msg = twMessage;
-            return msg;
-        }
-
-        @Override
-        public void remove() {
-            // TODO Auto-generated method stub
-
-        }
-
-    }
-
-    public static class InitializationInfo {
-        public Date startDate = new Date(1, 1, 2005);
-        public Date endDate = new Date(8, 20, 2012);
-        public String[] lastNames = DataGenerator.lastNames;
-        public String[] firstNames = DataGenerator.firstNames;
-        public String[] vendors = DataGenerator.vendors;
-        public String[] jargon = DataGenerator.jargon;
-        public String[] org_list = DataGenerator.org_list;
-    }
-
-    public void initialize(InitializationInfo info) {
-        randDateGen = new RandomDateGenerator(info.startDate, info.endDate);
-        randNameGen = new RandomNameGenerator(info.firstNames, info.lastNames);
-        randLocationGen = new RandomLocationGenerator(24, 49, 66, 98);
-        randMessageGen = new RandomMessageGenerator(info.vendors, info.jargon);
-    }
-
-    public void getTwitterUser(String usernameSuffix) {
-        String suggestedName = randNameGen.getRandomName();
-        String[] nameComponents = suggestedName.split(" ");
-        String screenName = nameComponents[0] + nameComponents[1] + randNameGen.getRandomNameSuffix();
-        String name = suggestedName;
-        if (usernameSuffix != null) {
-            name = name + usernameSuffix;
-        }
-        int numFriends = random.nextInt((int) (100)); // draw from Zipfian
-        int statusesCount = random.nextInt(500); // draw from Zipfian
-        int followersCount = random.nextInt((int) (200));
-        twUser.reset(screenName, numFriends, statusesCount, name, followersCount);
-    }
-
-    public static class RandomDateGenerator {
-
-        private final Date startDate;
-        private final Date endDate;
-        private final Random random = new Random();
-        private final int yearDifference;
-        private Date workingDate;
-        private Date recentDate;
-        private DateTime dateTime;
-
-        public RandomDateGenerator(Date startDate, Date endDate) {
-            this.startDate = startDate;
-            this.endDate = endDate;
-            this.yearDifference = endDate.getYear() - startDate.getYear() + 1;
-            this.workingDate = new Date();
-            this.recentDate = new Date();
-            this.dateTime = new DateTime();
-        }
-
-        public Date getStartDate() {
-            return startDate;
-        }
-
-        public Date getEndDate() {
-            return endDate;
-        }
-
-        public Date getNextRandomDate() {
-            int year = random.nextInt(yearDifference) + startDate.getYear();
-            int month;
-            int day;
-            if (year == endDate.getYear()) {
-                month = random.nextInt(endDate.getMonth()) + 1;
-                if (month == endDate.getMonth()) {
-                    day = random.nextInt(endDate.getDay()) + 1;
-                } else {
-                    day = random.nextInt(28) + 1;
-                }
-            } else {
-                month = random.nextInt(12) + 1;
-                day = random.nextInt(28) + 1;
-            }
-            workingDate.reset(month, day, year);
-            return workingDate;
-        }
-
-        public DateTime getNextRandomDatetime() {
-            Date randomDate = getNextRandomDate();
-            dateTime.reset(randomDate);
-            return dateTime;
-        }
-
-        public Date getNextRecentDate(Date date) {
-            int year = date.getYear()
-                    + (date.getYear() == endDate.getYear() ? 0 : random.nextInt(endDate.getYear() - date.getYear()));
-            int month = (year == endDate.getYear()) ? date.getMonth() == endDate.getMonth() ? (endDate.getMonth())
-                    : (date.getMonth() + random.nextInt(endDate.getMonth() - date.getMonth())) : random.nextInt(12) + 1;
-
-            int day = (year == endDate.getYear()) ? month == endDate.getMonth() ? date.getDay() == endDate.getDay() ? endDate
-                    .getDay() : date.getDay() + random.nextInt(endDate.getDay() - date.getDay())
-                    : random.nextInt(28) + 1
-                    : random.nextInt(28) + 1;
-            recentDate.reset(month, day, year);
-            return recentDate;
-        }
-
-    }
-
-    public static class DateTime extends Date {
-
-        private String hour = "10";
-        private String min = "10";
-        private String sec = "00";
-
-        public DateTime(int month, int day, int year, String hour, String min, String sec) {
-            super(month, day, year);
-            this.hour = hour;
-            this.min = min;
-            this.sec = sec;
-        }
-
-        public DateTime() {
-        }
-
-        public void reset(int month, int day, int year, String hour, String min, String sec) {
-            super.setDay(month);
-            super.setDay(day);
-            super.setYear(year);
-            this.hour = hour;
-            this.min = min;
-            this.sec = sec;
-        }
-
-        public DateTime(Date date) {
-            super(date.getMonth(), date.getDay(), date.getYear());
-        }
-
-        public void reset(Date date) {
-            reset(date.getMonth(), date.getDay(), date.getYear());
-        }
-
-        public DateTime(Date date, int hour, int min, int sec) {
-            super(date.getMonth(), date.getDay(), date.getYear());
-            this.hour = (hour < 10) ? "0" : "" + hour;
-            this.min = (min < 10) ? "0" : "" + min;
-            this.sec = (sec < 10) ? "0" : "" + sec;
-        }
-
-        public String toString() {
-            StringBuilder builder = new StringBuilder();
-            builder.append("\"");
-            builder.append(super.getYear());
-            builder.append("-");
-            builder.append(super.getMonth() < 10 ? "0" + super.getMonth() : super.getMonth());
-            builder.append("-");
-            builder.append(super.getDay() < 10 ? "0" + super.getDay() : super.getDay());
-            builder.append("T");
-            builder.append(hour + ":" + min + ":" + sec);
-            builder.append("\"");
-            return builder.toString();
-        }
-    }
-
-    public static class Message {
-
-        private char[] message = new char[500];
-        private List<String> referredTopics;
-        private int length;
-
-        public Message(char[] m, List<String> referredTopics) {
-            System.arraycopy(m, 0, message, 0, m.length);
-            length = m.length;
-            this.referredTopics = referredTopics;
-        }
-
-        public Message() {
-            referredTopics = new ArrayList<String>();
-            length = 0;
-        }
-
-        public List<String> getReferredTopics() {
-            return referredTopics;
-        }
-
-        public void reset(char[] m, int offset, int length, List<String> referredTopics) {
-            System.arraycopy(m, offset, message, 0, length);
-            this.length = length;
-            this.referredTopics = referredTopics;
-        }
-
-        public int getLength() {
-            return length;
-        }
-
-        public char charAt(int index) {
-            return message[index];
-        }
-
-    }
-
-    public static class Point {
-
-        private float latitude;
-        private float longitude;
-
-        public float getLatitude() {
-            return latitude;
-        }
-
-        public float getLongitude() {
-            return longitude;
-        }
-
-        public Point(float latitude, float longitude) {
-            this.latitude = latitude;
-            this.longitude = longitude;
-        }
-
-        public void reset(float latitude, float longitude) {
-            this.latitude = latitude;
-            this.longitude = longitude;
-        }
-
-        public Point() {
-        }
-
-        public String toString() {
-            StringBuilder builder = new StringBuilder();
-            builder.append("point(\"" + latitude + "," + longitude + "\")");
-            return builder.toString();
-        }
-    }
-
-    public static class RandomNameGenerator {
-
-        private String[] firstNames;
-        private String[] lastNames;
-
-        private final Random random = new Random();
-
-        private final String[] connectors = new String[] { "_", "#", "$", "@" };
-
-        public RandomNameGenerator(String[] firstNames, String[] lastNames) {
-            this.firstNames = firstNames;
-            this.lastNames = lastNames;
-        }
-
-        public String getRandomName() {
-            String name;
-            name = getSuggestedName();
-            return name;
-
-        }
-
-        private String getSuggestedName() {
-            int firstNameIndex = random.nextInt(firstNames.length);
-            int lastNameIndex = random.nextInt(lastNames.length);
-            String suggestedName = firstNames[firstNameIndex] + " " + lastNames[lastNameIndex];
-            return suggestedName;
-        }
-
-        public String getRandomNameSuffix() {
-            return connectors[random.nextInt(connectors.length)] + random.nextInt(1000);
-        }
-    }
-
-    public static class RandomMessageGenerator {
-
-        private final MessageTemplate messageTemplate;
-
-        public RandomMessageGenerator(String[] vendors, String[] jargon) {
-            List<String> vendorList = new ArrayList<String>();
-            for (String v : vendors) {
-                vendorList.add(v);
-            }
-            List<String> jargonList = new ArrayList<String>();
-            for (String j : jargon) {
-                jargonList.add(j);
-            }
-            this.messageTemplate = new MessageTemplate(vendorList, jargonList);
-        }
-
-        public Message getNextRandomMessage() {
-            return messageTemplate.getNextMessage();
-        }
-    }
-
-    public static class AbstractMessageTemplate {
-
-        protected final Random random = new Random();
-
-        protected String[] positiveVerbs = new String[] { "like", "love" };
-        protected String[] negativeVerbs = new String[] { "dislike", "hate", "can't stand" };
-
-        protected String[] negativeAdjectives = new String[] { "horrible", "bad", "terrible", "OMG" };
-        protected String[] postiveAdjectives = new String[] { "good", "awesome", "amazing", "mind-blowing" };
-
-        protected String[] otherWords = new String[] { "the", "its" };
-    }
-
-    public static class MessageTemplate extends AbstractMessageTemplate {
-
-        private List<String> vendors;
-        private List<String> jargon;
-        private CharBuffer buffer;
-        private List<String> referredTopics;
-        private Message message = new Message();
-
-        public MessageTemplate(List<String> vendors, List<String> jargon) {
-            this.vendors = vendors;
-            this.jargon = jargon;
-            buffer = CharBuffer.allocate(2500);
-            referredTopics = new ArrayList<String>();
-        }
-
-        public Message getNextMessage() {
-            buffer.position(0);
-            buffer.limit(2500);
-            referredTopics.clear();
-            boolean isPositive = random.nextBoolean();
-            String[] verbArray = isPositive ? positiveVerbs : negativeVerbs;
-            String[] adjectiveArray = isPositive ? postiveAdjectives : negativeAdjectives;
-            String verb = verbArray[random.nextInt(verbArray.length)];
-            String adjective = adjectiveArray[random.nextInt(adjectiveArray.length)];
-
-            buffer.put(" ");
-            buffer.put(verb);
-            buffer.put(" ");
-            String vendor = vendors.get(random.nextInt(vendors.size()));
-            referredTopics.add(vendor);
-            buffer.append(vendor);
-            buffer.append(" ");
-            buffer.append(otherWords[random.nextInt(otherWords.length)]);
-            buffer.append(" ");
-            String jargonTerm = jargon.get(random.nextInt(jargon.size()));
-            referredTopics.add(jargonTerm);
-            buffer.append(jargonTerm);
-            buffer.append(" is ");
-            buffer.append(adjective);
-            if (random.nextBoolean()) {
-                buffer.append(isPositive ? ":)" : ":(");
-            }
-
-            buffer.flip();
-            message.reset(buffer.array(), 0, buffer.limit(), referredTopics);
-            return message;
-        }
-    }
-
-    public static class RandomUtil {
-
-        public static Random random = new Random();
-
-        public static int[] getKFromN(int k, int n) {
-            int[] result = new int[k];
-            int cnt = 0;
-            HashSet<Integer> values = new HashSet<Integer>();
-            while (cnt < k) {
-                int val = random.nextInt(n + 1);
-                if (values.contains(val)) {
-                    continue;
-                }
-
-                result[cnt++] = val;
-                values.add(val);
-            }
-            return result;
-        }
-    }
-
-    public static class RandomLocationGenerator {
-
-        private Random random = new Random();
-
-        private final int beginLat;
-        private final int endLat;
-        private final int beginLong;
-        private final int endLong;
-
-        private Point point;
-
-        public RandomLocationGenerator(int beginLat, int endLat, int beginLong, int endLong) {
-            this.beginLat = beginLat;
-            this.endLat = endLat;
-            this.beginLong = beginLong;
-            this.endLong = endLong;
-            this.point = new Point();
-        }
-
-        public Point getRandomPoint() {
-            int latMajor = beginLat + random.nextInt(endLat - beginLat);
-            int latMinor = random.nextInt(100);
-            float latitude = latMajor + ((float) latMinor) / 100;
-
-            int longMajor = beginLong + random.nextInt(endLong - beginLong);
-            int longMinor = random.nextInt(100);
-            float longitude = longMajor + ((float) longMinor) / 100;
-
-            point.reset(latitude, longitude);
-            return point;
-        }
-
-    }
-
-    public static class TweetMessage {
-
-        private static final String[] DEFAULT_FIELDS = new String[] { TweetFields.TWEETID, TweetFields.USER,
-                TweetFields.LATITUDE, TweetFields.LONGITUDE, TweetFields.MESSAGE_TEXT, TweetFields.CREATED_AT,
-                TweetFields.COUNTRY };
-
-        private int id;
-        private TwitterUser user;
-        private double latitude;
-        private double longitude;
-        private String created_at;
-        private Message messageText;
-        private String country;
-
-        public static final class TweetFields {
-            public static final String TWEETID = "id";
-            public static final String USER = "user";
-            public static final String LATITUDE = "latitude";
-            public static final String LONGITUDE = "longitude";
-            public static final String MESSAGE_TEXT = "message_text";
-            public static final String CREATED_AT = "created_at";
-            public static final String COUNTRY = "country";
-
-        }
-
-        public TweetMessage() {
-        }
-
-        public TweetMessage(int tweetid, TwitterUser user, double latitude, double longitude, String created_at,
-                Message messageText, String country) {
-            this.id = tweetid;
-            this.user = user;
-            this.latitude = latitude;
-            this.longitude = longitude;
-            this.created_at = created_at;
-            this.messageText = messageText;
-            this.country = country;
-        }
-
-        public void reset(int tweetid, TwitterUser user, double latitude, double longitude, String created_at,
-                Message messageText, String country) {
-            this.id = tweetid;
-            this.user = user;
-            this.latitude = latitude;
-            this.longitude = longitude;
-            this.created_at = created_at;
-            this.messageText = messageText;
-            this.country = country;
-        }
-
-        public String getAdmEquivalent(String[] fields) {
-            if (fields == null) {
-                fields = DEFAULT_FIELDS;
-            }
-            StringBuilder builder = new StringBuilder();
-            builder.append("{");
-            for (String field : fields) {
-                switch (field) {
-                    case Datatypes.Tweet.ID:
-                        appendFieldName(builder, Datatypes.Tweet.ID);
-                        builder.append("int64(\"" + id + "\")");
-                        break;
-                    case Datatypes.Tweet.USER:
-                        appendFieldName(builder, Datatypes.Tweet.USER);
-                        builder.append(user);
-                        break;
-                    case Datatypes.Tweet.LATITUDE:
-                        appendFieldName(builder, Datatypes.Tweet.LATITUDE);
-                        builder.append(latitude);
-                        break;
-                    case Datatypes.Tweet.LONGITUDE:
-                        appendFieldName(builder, Datatypes.Tweet.LONGITUDE);
-                        builder.append(longitude);
-                        break;
-                    case Datatypes.Tweet.MESSAGE:
-                        appendFieldName(builder, Datatypes.Tweet.MESSAGE);
-                        builder.append("\"");
-                        for (int i = 0; i < messageText.getLength(); i++) {
-                            builder.append(messageText.charAt(i));
-                        }
-                        builder.append("\"");
-                        break;
-                    case Datatypes.Tweet.CREATED_AT:
-                        appendFieldName(builder, Datatypes.Tweet.CREATED_AT);
-                        builder.append(created_at);
-                        break;
-                    case Datatypes.Tweet.COUNTRY:
-                        appendFieldName(builder, Datatypes.Tweet.COUNTRY);
-                        builder.append("\"" + country + "\"");
-                        break;
-                }
-                builder.append(",");
-            }
-            builder.deleteCharAt(builder.length() - 1);
-            builder.append("}");
-            return builder.toString();
-        }
-
-        private void appendFieldName(StringBuilder builder, String fieldName) {
-            builder.append("\"" + fieldName + "\":");
-        }
-
-        public int getTweetid() {
-            return id;
-        }
-
-        public void setTweetid(int tweetid) {
-            this.id = tweetid;
-        }
-
-        public TwitterUser getUser() {
-            return user;
-        }
-
-        public void setUser(TwitterUser user) {
-            this.user = user;
-        }
-
-        public double getLatitude() {
-            return latitude;
-        }
-
-        public String getSendTime() {
-            return created_at;
-        }
-
-        public Message getMessageText() {
-            return messageText;
-        }
-
-        public void setMessageText(Message messageText) {
-            this.messageText = messageText;
-        }
-
-        public String getCountry() {
-            return country;
-        }
-
-    }
-
-    public static class TwitterUser {
-
-        private String screenName;
-        private String lang = "en";
-        private int friendsCount;
-        private int statusesCount;
-        private String name;
-        private int followersCount;
-
-        public TwitterUser() {
-
-        }
-
-        public TwitterUser(String screenName, int friendsCount, int statusesCount, String name, int followersCount) {
-            this.screenName = screenName;
-            this.friendsCount = friendsCount;
-            this.statusesCount = statusesCount;
-            this.name = name;
-            this.followersCount = followersCount;
-        }
-
-        public void reset(String screenName, int friendsCount, int statusesCount, String name, int followersCount) {
-            this.screenName = screenName;
-            this.friendsCount = friendsCount;
-            this.statusesCount = statusesCount;
-            this.name = name;
-            this.followersCount = followersCount;
-        }
-
-        public String getScreenName() {
-            return screenName;
-        }
-
-        public int getFriendsCount() {
-            return friendsCount;
-        }
-
-        public int getStatusesCount() {
-            return statusesCount;
-        }
-
-        public String getName() {
-            return name;
-        }
-
-        public int getFollowersCount() {
-            return followersCount;
-        }
-
-        public String toString() {
-            StringBuilder builder = new StringBuilder();
-            builder.append("{");
-            builder.append("\"screen_name\":" + "\"" + screenName + "\"");
-            builder.append(",");
-            builder.append("\"language\":" + "\"" + lang + "\"");
-            builder.append(",");
-            builder.append("\"friends_count\":" + friendsCount);
-            builder.append(",");
-            builder.append("\"status_count\":" + statusesCount);
-            builder.append(",");
-            builder.append("\"name\":" + "\"" + name + "\"");
-            builder.append(",");
-            builder.append("\"followers_count\":" + followersCount);
-            builder.append("}");
-            return builder.toString();
-        }
-
-    }
-
-    public static class Date {
-
-        private int day;
-        private int month;
-        private int year;
-
-        public Date(int month, int day, int year) {
-            this.month = month;
-            this.day = day;
-            this.year = year;
-        }
-
-        public void reset(int month, int day, int year) {
-            this.month = month;
-            this.day = day;
-            this.year = year;
-        }
-
-        public int getDay() {
-            return day;
-        }
-
-        public int getMonth() {
-            return month;
-        }
-
-        public int getYear() {
-            return year;
-        }
-
-        public Date() {
-        }
-
-        public String toString() {
-            StringBuilder builder = new StringBuilder();
-            builder.append("date");
-            builder.append("(\"");
-            builder.append(year);
-            builder.append("-");
-            builder.append(month < 10 ? "0" + month : "" + month);
-            builder.append("-");
-            builder.append(day < 10 ? "0" + day : "" + day);
-            builder.append("\")");
-            return builder.toString();
-        }
-
-        public void setDay(int day) {
-            this.day = day;
-        }
-
-        public void setMonth(int month) {
-            this.month = month;
-        }
-
-        public void setYear(int year) {
-            this.year = year;
-        }
-    }
-
-    public static String[] lastNames = { "Hoopengarner", "Harrow", "Gardner", "Blyant", "Best", "Buttermore", "Gronko",
-            "Mayers", "Countryman", "Neely", "Ruhl", "Taggart", "Bash", "Cason", "Hil", "Zalack", "Mingle", "Carr",
-            "Rohtin", "Wardle", "Pullman", "Wire", "Kellogg", "Hiles", "Keppel", "Bratton", "Sutton", "Wickes",
-            "Muller", "Friedline", "Llora", "Elizabeth", "Anderson", "Gaskins", "Rifler", "Vinsant", "Stanfield",
-            "Black", "Guest", "Hujsak", "Carter", "Weidemann", "Hays", "Patton", "Hayhurst", "Paynter", "Cressman",
-            "Fiddler", "Evans", "Sherlock", "Woodworth", "Jackson", "Bloise", "Schneider", "Ring", "Kepplinger",
-            "James", "Moon", "Bennett", "Bashline", "Ryals", "Zeal", "Christman", "Milliron", "Nash", "Ewing", "Camp",
-            "Mason", "Richardson", "Bowchiew", "Hahn", "Wilson", "Wood", "Toyley", "Williamson", "Lafortune", "Errett",
-            "Saltser", "Hirleman", "Brindle", "Newbiggin", "Ulery", "Lambert", "Shick", "Kuster", "Moore", "Finck",
-            "Powell", "Jolce", "Townsend", "Sauter", "Cowher", "Wolfe", "Cavalet", "Porter", "Laborde", "Ballou",
-            "Murray", "Stoddard", "Pycroft", "Milne", "King", "Todd", "Staymates", "Hall", "Romanoff", "Keilbach",
-            "Sandford", "Hamilton", "Fye", "Kline", "Weeks", "Mcelroy", "Mccullough", "Bryant", "Hill", "Moore",
-            "Ledgerwood", "Prevatt", "Eckert", "Read", "Hastings", "Doverspike", "Allshouse", "Bryan", "Mccallum",
-            "Lombardi", "Mckendrick", "Cattley", "Barkley", "Steiner", "Finlay", "Priebe", "Armitage", "Hall", "Elder",
-            "Erskine", "Hatcher", "Walker", "Pearsall", "Dunkle", "Haile", "Adams", "Miller", "Newbern", "Basinger",
-            "Fuhrer", "Brinigh", "Mench", "Blackburn", "Bastion", "Mccune", "Bridger", "Hynes", "Quinn", "Courtney",
-            "Geddinge", "Field", "Seelig", "Cable", "Earhart", "Harshman", "Roby", "Beals", "Berry", "Reed", "Hector",
-            "Pittman", "Haverrman", "Kalp", "Briner", "Joghs", "Cowart", "Close", "Wynne", "Harden", "Weldy",
-            "Stephenson", "Hildyard", "Moberly", "Wells", "Mackendoerfer", "Fisher", "Oppie", "Oneal", "Churchill",
-            "Keister", "Alice", "Tavoularis", "Fisher", "Hair", "Burns", "Veith", "Wile", "Fuller", "Fields", "Clark",
-            "Randolph", "Stone", "Mcclymonds", "Holtzer", "Donkin", "Wilkinson", "Rosensteel", "Albright", "Stahl",
-            "Fox", "Kadel", "Houser", "Hanseu", "Henderson", "Davis", "Bicknell", "Swain", "Mercer", "Holdeman",
-            "Enderly", "Caesar", "Margaret", "Munshower", "Elless", "Lucy", "Feufer", "Schofield", "Graham",
-            "Blatenberger", "Benford", "Akers", "Campbell", "Ann", "Sadley", "Ling", "Gongaware", "Schmidt", "Endsley",
-            "Groah", "Flanders", "Reichard", "Lowstetter", "Sandblom", "Griffis", "Basmanoff", "Coveney", "Hawker",
-            "Archibald", "Hutton", "Barnes", "Diegel", "Raybould", "Focell", "Breitenstein", "Murray", "Chauvin",
-            "Busk", "Pheleps", "Teagarden", "Northey", "Baumgartner", "Fleming", "Harris", "Parkinson", "Carpenter",
-            "Whirlow", "Bonner", "Wortman", "Rogers", "Scott", "Lowe", "Mckee", "Huston", "Bullard", "Throckmorton",
-            "Rummel", "Mathews", "Dull", "Saline", "Tue", "Woolery", "Lalty", "Schrader", "Ramsey", "Eisenmann",
-            "Philbrick", "Sybilla", "Wallace", "Fonblanque", "Paul", "Orbell", "Higgens", "Casteel", "Franks",
-            "Demuth", "Eisenman", "Hay", "Robinson", "Fischer", "Hincken", "Wylie", "Leichter", "Bousum",
-            "Littlefield", "Mcdonald", "Greif", "Rhodes", "Wall", "Steele", "Baldwin", "Smith", "Stewart", "Schere",
-            "Mary", "Aultman", "Emrick", "Guess", "Mitchell", "Painter", "Aft", "Hasely", "Weldi", "Loewentsein",
-            "Poorbaugh", "Kepple", "Noton", "Judge", "Jackson", "Style", "Adcock", "Diller", "Marriman", "Johnston",
-            "Children", "Monahan", "Ehret", "Shaw", "Congdon", "Pinney", "Millard", "Crissman", "Tanner", "Rose",
-            "Knisely", "Cypret", "Sommer", "Poehl", "Hardie", "Bender", "Overholt", "Gottwine", "Beach", "Leslie",
-            "Trevithick", "Langston", "Magor", "Shotts", "Howe", "Hunter", "Cross", "Kistler", "Dealtry", "Christner",
-            "Pennington", "Thorley", "Eckhardstein", "Van", "Stroh", "Stough", "Stall", "Beedell", "Shea", "Garland",
-            "Mays", "Pritchard", "Frankenberger", "Rowley", "Lane", "Baum", "Alliman", "Park", "Jardine", "Butler",
-            "Cherry", "Kooser", "Baxter", "Billimek", "Downing", "Hurst", "Wood", "Baird", "Watkins", "Edwards",
-            "Kemerer", "Harding", "Owens", "Eiford", "Keener", "Garneis", "Fiscina", "Mang", "Draudy", "Mills",
-            "Gibson", "Reese", "Todd", "Ramos", "Levett", "Wilks", "Ward", "Mosser", "Dunlap", "Kifer", "Christopher",
-            "Ashbaugh", "Wynter", "Rawls", "Cribbs", "Haynes", "Thigpen", "Schreckengost", "Bishop", "Linton",
-            "Chapman", "James", "Jerome", "Hook", "Omara", "Houston", "Maclagan", "Sandys", "Pickering", "Blois",
-            "Dickson", "Kemble", "Duncan", "Woodward", "Southern", "Henley", "Treeby", "Cram", "Elsas", "Driggers",
-            "Warrick", "Overstreet", "Hindman", "Buck", "Sulyard", "Wentzel", "Swink", "Butt", "Schaeffer",
-            "Hoffhants", "Bould", "Willcox", "Lotherington", "Bagley", "Graff", "White", "Wheeler", "Sloan",
-            "Rodacker", "Hanford", "Jowers", "Kunkle", "Cass", "Powers", "Gilman", "Mcmichaels", "Hobbs", "Herndon",
-            "Prescott", "Smail", "Mcdonald", "Biery", "Orner", "Richards", "Mueller", "Isaman", "Bruxner", "Goodman",
-            "Barth", "Turzanski", "Vorrasi", "Stainforth", "Nehling", "Rahl", "Erschoff", "Greene", "Mckinnon",
-            "Reade", "Smith", "Pery", "Roose", "Greenwood", "Weisgarber", "Curry", "Holts", "Zadovsky", "Parrish",
-            "Putnam", "Munson", "Mcindoe", "Nickolson", "Brooks", "Bollinger", "Stroble", "Siegrist", "Fulton",
-            "Tomey", "Zoucks", "Roberts", "Otis", "Clarke", "Easter", "Johnson", "Fylbrigg", "Taylor", "Swartzbaugh",
-            "Weinstein", "Gadow", "Sayre", "Marcotte", "Wise", "Atweeke", "Mcfall", "Napier", "Eisenhart", "Canham",
-            "Sealis", "Baughman", "Gertraht", "Losey", "Laurence", "Eva", "Pershing", "Kern", "Pirl", "Rega",
-            "Sanborn", "Kanaga", "Sanders", "Anderson", "Dickinson", "Osteen", "Gettemy", "Crom", "Snyder", "Reed",
-            "Laurenzi", "Riggle", "Tillson", "Fowler", "Raub", "Jenner", "Koepple", "Soames", "Goldvogel", "Dimsdale",
-            "Zimmer", "Giesen", "Baker", "Beail", "Mortland", "Bard", "Sanner", "Knopsnider", "Jenkins", "Bailey",
-            "Werner", "Barrett", "Faust", "Agg", "Tomlinson", "Williams", "Little", "Greenawalt", "Wells", "Wilkins",
-            "Gisiko", "Bauerle", "Harrold", "Prechtl", "Polson", "Faast", "Winton", "Garneys", "Peters", "Potter",
-            "Porter", "Tennant", "Eve", "Dugger", "Jones", "Burch", "Cowper", "Whittier" };
-
-    public static String[] firstNames = { "Albert", "Jacquelin", "Dona", "Alia", "Mayme", "Genoveva", "Emma", "Lena",
-            "Melody", "Vilma", "Katelyn", "Jeremy", "Coral", "Leann", "Lita", "Gilda", "Kayla", "Alvina", "Maranda",
-            "Verlie", "Khadijah", "Karey", "Patrice", "Kallie", "Corey", "Mollie", "Daisy", "Melanie", "Sarita",
-            "Nichole", "Pricilla", "Terresa", "Berneice", "Arianne", "Brianne", "Lavinia", "Ulrike", "Lesha", "Adell",
-            "Ardelle", "Marisha", "Laquita", "Karyl", "Maryjane", "Kendall", "Isobel", "Raeann", "Heike", "Barbera",
-            "Norman", "Yasmine", "Nevada", "Mariam", "Edith", "Eugena", "Lovie", "Maren", "Bennie", "Lennie", "Tamera",
-            "Crystal", "Randi", "Anamaria", "Chantal", "Jesenia", "Avis", "Shela", "Randy", "Laurena", "Sharron",
-            "Christiane", "Lorie", "Mario", "Elizabeth", "Reina", "Adria", "Lakisha", "Brittni", "Azzie", "Dori",
-            "Shaneka", "Asuncion", "Katheryn", "Laurice", "Sharita", "Krystal", "Reva", "Inger", "Alpha", "Makeda",
-            "Anabel", "Loni", "Tiara", "Meda", "Latashia", "Leola", "Chin", "Daisey", "Ivory", "Amalia", "Logan",
-            "Tyler", "Kyong", "Carolann", "Maryetta", "Eufemia", "Anya", "Doreatha", "Lorna", "Rutha", "Ehtel",
-            "Debbie", "Chassidy", "Sang", "Christa", "Lottie", "Chun", "Karine", "Peggie", "Amina", "Melany", "Alayna",
-            "Scott", "Romana", "Naomi", "Christiana", "Salena", "Taunya", "Mitsue", "Regina", "Chelsie", "Charity",
-            "Dacia", "Aletha", "Latosha", "Lia", "Tamica", "Chery", "Bianca", "Shu", "Georgianne", "Myriam", "Austin",
-            "Wan", "Mallory", "Jana", "Georgie", "Jenell", "Kori", "Vicki", "Delfina", "June", "Mellisa", "Catherina",
-            "Claudie", "Tynisha", "Dayle", "Enriqueta", "Belen", "Pia", "Sarai", "Rosy", "Renay", "Kacie", "Frieda",
-            "Cayla", "Elissa", "Claribel", "Sabina", "Mackenzie", "Raina", "Cira", "Mitzie", "Aubrey", "Serafina",
-            "Maria", "Katharine", "Esperanza", "Sung", "Daria", "Billye", "Stefanie", "Kasha", "Holly", "Suzanne",
-            "Inga", "Flora", "Andria", "Genevie", "Eladia", "Janet", "Erline", "Renna", "Georgeanna", "Delorse",
-            "Elnora", "Rudy", "Rima", "Leanora", "Letisha", "Love", "Alverta", "Pinkie", "Domonique", "Jeannie",
-            "Jose", "Jacqueline", "Tara", "Lily", "Erna", "Tennille", "Galina", "Tamala", "Kirby", "Nichelle",
-            "Myesha", "Farah", "Santa", "Ludie", "Kenia", "Yee", "Micheline", "Maryann", "Elaina", "Ethelyn",
-            "Emmaline", "Shanell", "Marina", "Nila", "Alane", "Shakira", "Dorris", "Belinda", "Elois", "Barbie",
-            "Carita", "Gisela", "Lura", "Fransisca", "Helga", "Peg", "Leonarda", "Earlie", "Deetta", "Jacquetta",
-            "Blossom", "Kayleigh", "Deloras", "Keshia", "Christinia", "Dulce", "Bernie", "Sheba", "Lashanda", "Tula",
-            "Claretta", "Kary", "Jeanette", "Lupita", "Lenora", "Hisako", "Sherise", "Glynda", "Adela", "Chia",
-            "Sudie", "Mindy", "Caroyln", "Lindsey", "Xiomara", "Mercedes", "Onie", "Loan", "Alexis", "Tommie",
-            "Donette", "Monica", "Soo", "Camellia", "Lavera", "Valery", "Ariana", "Sophia", "Loris", "Ginette",
-            "Marielle", "Tari", "Julissa", "Alesia", "Suzanna", "Emelda", "Erin", "Ladawn", "Sherilyn", "Candice",
-            "Nereida", "Fairy", "Carl", "Joel", "Marilee", "Gracia", "Cordie", "So", "Shanita", "Drew", "Cassie",
-            "Sherie", "Marget", "Norma", "Delois", "Debera", "Chanelle", "Catarina", "Aracely", "Carlene", "Tricia",
-            "Aleen", "Katharina", "Marguerita", "Guadalupe", "Margorie", "Mandie", "Kathe", "Chong", "Sage", "Faith",
-            "Maryrose", "Stephany", "Ivy", "Pauline", "Susie", "Cristen", "Jenifer", "Annette", "Debi", "Karmen",
-            "Luci", "Shayla", "Hope", "Ocie", "Sharie", "Tami", "Breana", "Kerry", "Rubye", "Lashay", "Sondra",
-            "Katrice", "Brunilda", "Cortney", "Yan", "Zenobia", "Penni", "Addie", "Lavona", "Noel", "Anika",
-            "Herlinda", "Valencia", "Bunny", "Tory", "Victoria", "Carrie", "Mikaela", "Wilhelmina", "Chung",
-            "Hortencia", "Gerda", "Wen", "Ilana", "Sibyl", "Candida", "Victorina", "Chantell", "Casie", "Emeline",
-            "Dominica", "Cecila", "Delora", "Miesha", "Nova", "Sally", "Ronald", "Charlette", "Francisca", "Mina",
-            "Jenna", "Loraine", "Felisa", "Lulu", "Page", "Lyda", "Babara", "Flor", "Walter", "Chan", "Sherika",
-            "Kala", "Luna", "Vada", "Syreeta", "Slyvia", "Karin", "Renata", "Robbi", "Glenda", "Delsie", "Lizzie",
-            "Genia", "Caitlin", "Bebe", "Cory", "Sam", "Leslee", "Elva", "Caren", "Kasie", "Leticia", "Shannan",
-            "Vickey", "Sandie", "Kyle", "Chang", "Terrilyn", "Sandra", "Elida", "Marketta", "Elsy", "Tu", "Carman",
-            "Ashlie", "Vernia", "Albertine", "Vivian", "Elba", "Bong", "Margy", "Janetta", "Xiao", "Teofila", "Danyel",
-            "Nickole", "Aleisha", "Tera", "Cleotilde", "Dara", "Paulita", "Isela", "Maricela", "Rozella", "Marivel",
-            "Aurora", "Melissa", "Carylon", "Delinda", "Marvella", "Candelaria", "Deidre", "Tawanna", "Myrtie",
-            "Milagro", "Emilie", "Coretta", "Ivette", "Suzann", "Ammie", "Lucina", "Lory", "Tena", "Eleanor",
-            "Cherlyn", "Tiana", "Brianna", "Myra", "Flo", "Carisa", "Kandi", "Erlinda", "Jacqulyn", "Fermina", "Riva",
-            "Palmira", "Lindsay", "Annmarie", "Tamiko", "Carline", "Amelia", "Quiana", "Lashawna", "Veola", "Belva",
-            "Marsha", "Verlene", "Alex", "Leisha", "Camila", "Mirtha", "Melva", "Lina", "Arla", "Cythia", "Towanda",
-            "Aracelis", "Tasia", "Aurore", "Trinity", "Bernadine", "Farrah", "Deneen", "Ines", "Betty", "Lorretta",
-            "Dorethea", "Hertha", "Rochelle", "Juli", "Shenika", "Yung", "Lavon", "Deeanna", "Nakia", "Lynnette",
-            "Dinorah", "Nery", "Elene", "Carolee", "Mira", "Franchesca", "Lavonda", "Leida", "Paulette", "Dorine",
-            "Allegra", "Keva", "Jeffrey", "Bernardina", "Maryln", "Yoko", "Faviola", "Jayne", "Lucilla", "Charita",
-            "Ewa", "Ella", "Maggie", "Ivey", "Bettie", "Jerri", "Marni", "Bibi", "Sabrina", "Sarah", "Marleen",
-            "Katherin", "Remona", "Jamika", "Antonina", "Oliva", "Lajuana", "Fonda", "Sigrid", "Yael", "Billi",
-            "Verona", "Arminda", "Mirna", "Tesha", "Katheleen", "Bonita", "Kamilah", "Patrica", "Julio", "Shaina",
-            "Mellie", "Denyse", "Deandrea", "Alena", "Meg", "Kizzie", "Krissy", "Karly", "Alleen", "Yahaira", "Lucie",
-            "Karena", "Elaine", "Eloise", "Buena", "Marianela", "Renee", "Nan", "Carolynn", "Windy", "Avril", "Jane",
-            "Vida", "Thea", "Marvel", "Rosaline", "Tifany", "Robena", "Azucena", "Carlota", "Mindi", "Andera", "Jenny",
-            "Courtney", "Lyndsey", "Willette", "Kristie", "Shaniqua", "Tabatha", "Ngoc", "Una", "Marlena", "Louetta",
-            "Vernie", "Brandy", "Jacquelyne", "Jenelle", "Elna", "Erminia", "Ida", "Audie", "Louis", "Marisol",
-            "Shawana", "Harriette", "Karol", "Kitty", "Esmeralda", "Vivienne", "Eloisa", "Iris", "Jeanice", "Cammie",
-            "Jacinda", "Shena", "Floy", "Theda", "Lourdes", "Jayna", "Marg", "Kati", "Tanna", "Rosalyn", "Maxima",
-            "Soon", "Angelika", "Shonna", "Merle", "Kassandra", "Deedee", "Heidi", "Marti", "Renae", "Arleen",
-            "Alfredia", "Jewell", "Carley", "Pennie", "Corina", "Tonisha", "Natividad", "Lilliana", "Darcie", "Shawna",
-            "Angel", "Piedad", "Josefa", "Rebbeca", "Natacha", "Nenita", "Petrina", "Carmon", "Chasidy", "Temika",
-            "Dennise", "Renetta", "Augusta", "Shirlee", "Valeri", "Casimira", "Janay", "Berniece", "Deborah", "Yaeko",
-            "Mimi", "Digna", "Irish", "Cher", "Yong", "Lucila", "Jimmie", "Junko", "Lezlie", "Waneta", "Sandee",
-            "Marquita", "Eura", "Freeda", "Annabell", "Laree", "Jaye", "Wendy", "Toshia", "Kylee", "Aleta", "Emiko",
-            "Clorinda", "Sixta", "Audrea", "Juanita", "Birdie", "Reita", "Latanya", "Nia", "Leora", "Laurine",
-            "Krysten", "Jerrie", "Chantel", "Ira", "Sena", "Andre", "Jann", "Marla", "Precious", "Katy", "Gabrielle",
-            "Yvette", "Brook", "Shirlene", "Eldora", "Laura", "Milda", "Euna", "Jettie", "Debora", "Lise", "Edythe",
-            "Leandra", "Shandi", "Araceli", "Johanne", "Nieves", "Denese", "Carmelita", "Nohemi", "Annice", "Natalie",
-            "Yolande", "Jeffie", "Vashti", "Vickie", "Obdulia", "Youlanda", "Lupe", "Tomoko", "Monserrate", "Domitila",
-            "Etsuko", "Adrienne", "Lakesha", "Melissia", "Odessa", "Meagan", "Veronika", "Jolyn", "Isabelle", "Leah",
-            "Rhiannon", "Gianna", "Audra", "Sommer", "Renate", "Perla", "Thao", "Myong", "Lavette", "Mark", "Emilia",
-            "Ariane", "Karl", "Dorie", "Jacquie", "Mia", "Malka", "Shenita", "Tashina", "Christine", "Cherri", "Roni",
-            "Fran", "Mildred", "Sara", "Clarissa", "Fredia", "Elease", "Samuel", "Earlene", "Vernita", "Mae", "Concha",
-            "Renea", "Tamekia", "Hye", "Ingeborg", "Tessa", "Kelly", "Kristin", "Tam", "Sacha", "Kanisha", "Jillian",
-            "Tiffanie", "Ashlee", "Madelyn", "Donya", "Clementine", "Mickie", "My", "Zena", "Terrie", "Samatha",
-            "Gertie", "Tarra", "Natalia", "Sharlene", "Evie", "Shalon", "Rosalee", "Numbers", "Jodi", "Hattie",
-            "Naoma", "Valene", "Whitley", "Claude", "Alline", "Jeanne", "Camie", "Maragret", "Viola", "Kris", "Marlo",
-            "Arcelia", "Shari", "Jalisa", "Corrie", "Eleonor", "Angelyn", "Merry", "Lauren", "Melita", "Gita",
-            "Elenor", "Aurelia", "Janae", "Lyndia", "Margeret", "Shawanda", "Rolande", "Shirl", "Madeleine", "Celinda",
-            "Jaleesa", "Shemika", "Joye", "Tisa", "Trudie", "Kathrine", "Clarita", "Dinah", "Georgia", "Antoinette",
-            "Janis", "Suzette", "Sherri", "Herta", "Arie", "Hedy", "Cassi", "Audrie", "Caryl", "Jazmine", "Jessica",
-            "Beverly", "Elizbeth", "Marylee", "Londa", "Fredericka", "Argelia", "Nana", "Donnette", "Damaris",
-            "Hailey", "Jamee", "Kathlene", "Glayds", "Lydia", "Apryl", "Verla", "Adam", "Concepcion", "Zelda",
-            "Shonta", "Vernice", "Detra", "Meghann", "Sherley", "Sheri", "Kiyoko", "Margarita", "Adaline", "Mariela",
-            "Velda", "Ailene", "Juliane", "Aiko", "Edyth", "Cecelia", "Shavon", "Florance", "Madeline", "Rheba",
-            "Deann", "Ignacia", "Odelia", "Heide", "Mica", "Jennette", "Maricruz", "Ouida", "Darcy", "Laure",
-            "Justina", "Amada", "Laine", "Cruz", "Sunny", "Francene", "Roxanna", "Nam", "Nancie", "Deanna", "Letty",
-            "Britni", "Kazuko", "Lacresha", "Simon", "Caleb", "Milton", "Colton", "Travis", "Miles", "Jonathan",
-            "Logan", "Rolf", "Emilio", "Roberto", "Marcus", "Tim", "Delmar", "Devon", "Kurt", "Edward", "Jeffrey",
-            "Elvis", "Alfonso", "Blair", "Wm", "Sheldon", "Leonel", "Michal", "Federico", "Jacques", "Leslie",
-            "Augustine", "Hugh", "Brant", "Hong", "Sal", "Modesto", "Curtis", "Jefferey", "Adam", "John", "Glenn",
-            "Vance", "Alejandro", "Refugio", "Lucio", "Demarcus", "Chang", "Huey", "Neville", "Preston", "Bert",
-            "Abram", "Foster", "Jamison", "Kirby", "Erich", "Manual", "Dustin", "Derrick", "Donnie", "Jospeh", "Chris",
-            "Josue", "Stevie", "Russ", "Stanley", "Nicolas", "Samuel", "Waldo", "Jake", "Max", "Ernest", "Reinaldo",
-            "Rene", "Gale", "Morris", "Nathan", "Maximo", "Courtney", "Theodore", "Octavio", "Otha", "Delmer",
-            "Graham", "Dean", "Lowell", "Myles", "Colby", "Boyd", "Adolph", "Jarrod", "Nick", "Mark", "Clinton", "Kim",
-            "Sonny", "Dalton", "Tyler", "Jody", "Orville", "Luther", "Rubin", "Hollis", "Rashad", "Barton", "Vicente",
-            "Ted", "Rick", "Carmine", "Clifton", "Gayle", "Christopher", "Jessie", "Bradley", "Clay", "Theo", "Josh",
-            "Mitchell", "Boyce", "Chung", "Eugenio", "August", "Norbert", "Sammie", "Jerry", "Adan", "Edmundo",
-            "Homer", "Hilton", "Tod", "Kirk", "Emmett", "Milan", "Quincy", "Jewell", "Herb", "Steve", "Carmen",
-            "Bobby", "Odis", "Daron", "Jeremy", "Carl", "Hunter", "Tuan", "Thurman", "Asa", "Brenton", "Shane",
-            "Donny", "Andreas", "Teddy", "Dario", "Cyril", "Hoyt", "Teodoro", "Vincenzo", "Hilario", "Daren",
-            "Agustin", "Marquis", "Ezekiel", "Brendan", "Johnson", "Alden", "Richie", "Granville", "Chad", "Joseph",
-            "Lamont", "Jordon", "Gilberto", "Chong", "Rosendo", "Eddy", "Rob", "Dewitt", "Andre", "Titus", "Russell",
-            "Rigoberto", "Dick", "Garland", "Gabriel", "Hank", "Darius", "Ignacio", "Lazaro", "Johnie", "Mauro",
-            "Edmund", "Trent", "Harris", "Osvaldo", "Marvin", "Judson", "Rodney", "Randall", "Renato", "Richard",
-            "Denny", "Jon", "Doyle", "Cristopher", "Wilson", "Christian", "Jamie", "Roland", "Ken", "Tad", "Romeo",
-            "Seth", "Quinton", "Byron", "Ruben", "Darrel", "Deandre", "Broderick", "Harold", "Ty", "Monroe", "Landon",
-            "Mohammed", "Angel", "Arlen", "Elias", "Andres", "Carlton", "Numbers", "Tony", "Thaddeus", "Issac",
-            "Elmer", "Antoine", "Ned", "Fermin", "Grover", "Benito", "Abdul", "Cortez", "Eric", "Maxwell", "Coy",
-            "Gavin", "Rich", "Andy", "Del", "Giovanni", "Major", "Efren", "Horacio", "Joaquin", "Charles", "Noah",
-            "Deon", "Pasquale", "Reed", "Fausto", "Jermaine", "Irvin", "Ray", "Tobias", "Carter", "Yong", "Jorge",
-            "Brent", "Daniel", "Zane", "Walker", "Thad", "Shaun", "Jaime", "Mckinley", "Bradford", "Nathanial",
-            "Jerald", "Aubrey", "Virgil", "Abel", "Philip", "Chester", "Chadwick", "Dominick", "Britt", "Emmitt",
-            "Ferdinand", "Julian", "Reid", "Santos", "Dwain", "Morgan", "James", "Marion", "Micheal", "Eddie", "Brett",
-            "Stacy", "Kerry", "Dale", "Nicholas", "Darrick", "Freeman", "Scott", "Newton", "Sherman", "Felton",
-            "Cedrick", "Winfred", "Brad", "Fredric", "Dewayne", "Virgilio", "Reggie", "Edgar", "Heriberto", "Shad",
-            "Timmy", "Javier", "Nestor", "Royal", "Lynn", "Irwin", "Ismael", "Jonas", "Wiley", "Austin", "Kieth",
-            "Gonzalo", "Paris", "Earnest", "Arron", "Jarred", "Todd", "Erik", "Maria", "Chauncey", "Neil", "Conrad",
-            "Maurice", "Roosevelt", "Jacob", "Sydney", "Lee", "Basil", "Louis", "Rodolfo", "Rodger", "Roman", "Corey",
-            "Ambrose", "Cristobal", "Sylvester", "Benton", "Franklin", "Marcelo", "Guillermo", "Toby", "Jeramy",
-            "Donn", "Danny", "Dwight", "Clifford", "Valentine", "Matt", "Jules", "Kareem", "Ronny", "Lonny", "Son",
-            "Leopoldo", "Dannie", "Gregg", "Dillon", "Orlando", "Weston", "Kermit", "Damian", "Abraham", "Walton",
-            "Adrian", "Rudolf", "Will", "Les", "Norberto", "Fred", "Tyrone", "Ariel", "Terry", "Emmanuel", "Anderson",
-            "Elton", "Otis", "Derek", "Frankie", "Gino", "Lavern", "Jarod", "Kenny", "Dane", "Keenan", "Bryant",
-            "Eusebio", "Dorian", "Ali", "Lucas", "Wilford", "Jeremiah", "Warner", "Woodrow", "Galen", "Bob",
-            "Johnathon", "Amado", "Michel", "Harry", "Zachery", "Taylor", "Booker", "Hershel", "Mohammad", "Darrell",
-            "Kyle", "Stuart", "Marlin", "Hyman", "Jeffery", "Sidney", "Merrill", "Roy", "Garrett", "Porter", "Kenton",
-            "Giuseppe", "Terrance", "Trey", "Felix", "Buster", "Von", "Jackie", "Linwood", "Darron", "Francisco",
-            "Bernie", "Diego", "Brendon", "Cody", "Marco", "Ahmed", "Antonio", "Vince", "Brooks", "Kendrick", "Ross",
-            "Mohamed", "Jim", "Benny", "Gerald", "Pablo", "Charlie", "Antony", "Werner", "Hipolito", "Minh", "Mel",
-            "Derick", "Armand", "Fidel", "Lewis", "Donnell", "Desmond", "Vaughn", "Guadalupe", "Keneth", "Rodrick",
-            "Spencer", "Chas", "Gus", "Harlan", "Wes", "Carmelo", "Jefferson", "Gerard", "Jarvis", "Haywood", "Hayden",
-            "Sergio", "Gene", "Edgardo", "Colin", "Horace", "Dominic", "Aldo", "Adolfo", "Juan", "Man", "Lenard",
-            "Clement", "Everett", "Hal", "Bryon", "Mason", "Emerson", "Earle", "Laurence", "Columbus", "Lamar",
-            "Douglas", "Ian", "Fredrick", "Marc", "Loren", "Wallace", "Randell", "Noble", "Ricardo", "Rory", "Lindsey",
-            "Boris", "Bill", "Carlos", "Domingo", "Grant", "Craig", "Ezra", "Matthew", "Van", "Rudy", "Danial",
-            "Brock", "Maynard", "Vincent", "Cole", "Damion", "Ellsworth", "Marcel", "Markus", "Rueben", "Tanner",
-            "Reyes", "Hung", "Kennith", "Lindsay", "Howard", "Ralph", "Jed", "Monte", "Garfield", "Avery", "Bernardo",
-            "Malcolm", "Sterling", "Ezequiel", "Kristofer", "Luciano", "Casey", "Rosario", "Ellis", "Quintin",
-            "Trevor", "Miquel", "Jordan", "Arthur", "Carson", "Tyron", "Grady", "Walter", "Jonathon", "Ricky",
-            "Bennie", "Terrence", "Dion", "Dusty", "Roderick", "Isaac", "Rodrigo", "Harrison", "Zack", "Dee", "Devin",
-            "Rey", "Ulysses", "Clint", "Greg", "Dino", "Frances", "Wade", "Franklyn", "Jude", "Bradly", "Salvador",
-            "Rocky", "Weldon", "Lloyd", "Milford", "Clarence", "Alec", "Allan", "Bobbie", "Oswaldo", "Wilfred",
-            "Raleigh", "Shelby", "Willy", "Alphonso", "Arnoldo", "Robbie", "Truman", "Nicky", "Quinn", "Damien",
-            "Lacy", "Marcos", "Parker", "Burt", "Carroll", "Denver", "Buck", "Dong", "Normand", "Billie", "Edwin",
-            "Troy", "Arden", "Rusty", "Tommy", "Kenneth", "Leo", "Claud", "Joel", "Kendall", "Dante", "Milo", "Cruz",
-            "Lucien", "Ramon", "Jarrett", "Scottie", "Deshawn", "Ronnie", "Pete", "Alonzo", "Whitney", "Stefan",
-            "Sebastian", "Edmond", "Enrique", "Branden", "Leonard", "Loyd", "Olin", "Ron", "Rhett", "Frederic",
-            "Orval", "Tyrell", "Gail", "Eli", "Antonia", "Malcom", "Sandy", "Stacey", "Nickolas", "Hosea", "Santo",
-            "Oscar", "Fletcher", "Dave", "Patrick", "Dewey", "Bo", "Vito", "Blaine", "Randy", "Robin", "Winston",
-            "Sammy", "Edwardo", "Manuel", "Valentin", "Stanford", "Filiberto", "Buddy", "Zachariah", "Johnnie",
-            "Elbert", "Paul", "Isreal", "Jerrold", "Leif", "Owen", "Sung", "Junior", "Raphael", "Josef", "Donte",
-            "Allen", "Florencio", "Raymond", "Lauren", "Collin", "Eliseo", "Bruno", "Martin", "Lyndon", "Kurtis",
-            "Salvatore", "Erwin", "Michael", "Sean", "Davis", "Alberto", "King", "Rolland", "Joe", "Tory", "Chase",
-            "Dallas", "Vernon", "Beau", "Terrell", "Reynaldo", "Monty", "Jame", "Dirk", "Florentino", "Reuben", "Saul",
-            "Emory", "Esteban", "Michale", "Claudio", "Jacinto", "Kelley", "Levi", "Andrea", "Lanny", "Wendell",
-            "Elwood", "Joan", "Felipe", "Palmer", "Elmo", "Lawrence", "Hubert", "Rudolph", "Duane", "Cordell",
-            "Everette", "Mack", "Alan", "Efrain", "Trenton", "Bryan", "Tom", "Wilmer", "Clyde", "Chance", "Lou",
-            "Brain", "Justin", "Phil", "Jerrod", "George", "Kris", "Cyrus", "Emery", "Rickey", "Lincoln", "Renaldo",
-            "Mathew", "Luke", "Dwayne", "Alexis", "Jackson", "Gil", "Marty", "Burton", "Emil", "Glen", "Willian",
-            "Clemente", "Keven", "Barney", "Odell", "Reginald", "Aurelio", "Damon", "Ward", "Gustavo", "Harley",
-            "Peter", "Anibal", "Arlie", "Nigel", "Oren", "Zachary", "Scot", "Bud", "Wilbert", "Bart", "Josiah",
-            "Marlon", "Eldon", "Darryl", "Roger", "Anthony", "Omer", "Francis", "Patricia", "Moises", "Chuck",
-            "Waylon", "Hector", "Jamaal", "Cesar", "Julius", "Rex", "Norris", "Ollie", "Isaias", "Quentin", "Graig",
-            "Lyle", "Jeffry", "Karl", "Lester", "Danilo", "Mike", "Dylan", "Carlo", "Ryan", "Leon", "Percy", "Lucius",
-            "Jamel", "Lesley", "Joey", "Cornelius", "Rico", "Arnulfo", "Chet", "Margarito", "Ernie", "Nathanael",
-            "Amos", "Cleveland", "Luigi", "Alfonzo", "Phillip", "Clair", "Elroy", "Alva", "Hans", "Shon", "Gary",
-            "Jesus", "Cary", "Silas", "Keith", "Israel", "Willard", "Randolph", "Dan", "Adalberto", "Claude",
-            "Delbert", "Garry", "Mary", "Larry", "Riley", "Robt", "Darwin", "Barrett", "Steven", "Kelly", "Herschel",
-            "Darnell", "Scotty", "Armando", "Miguel", "Lawerence", "Wesley", "Garth", "Carol", "Micah", "Alvin",
-            "Billy", "Earl", "Pat", "Brady", "Cory", "Carey", "Bernard", "Jayson", "Nathaniel", "Gaylord", "Archie",
-            "Dorsey", "Erasmo", "Angelo", "Elisha", "Long", "Augustus", "Hobert", "Drew", "Stan", "Sherwood",
-            "Lorenzo", "Forrest", "Shawn", "Leigh", "Hiram", "Leonardo", "Gerry", "Myron", "Hugo", "Alvaro", "Leland",
-            "Genaro", "Jamey", "Stewart", "Elden", "Irving", "Olen", "Antone", "Freddy", "Lupe", "Joshua", "Gregory",
-            "Andrew", "Sang", "Wilbur", "Gerardo", "Merlin", "Williams", "Johnny", "Alex", "Tommie", "Jimmy",
-            "Donovan", "Dexter", "Gaston", "Tracy", "Jeff", "Stephen", "Berry", "Anton", "Darell", "Fritz", "Willis",
-            "Noel", "Mariano", "Crawford", "Zoey", "Alex", "Brianna", "Carlie", "Lloyd", "Cal", "Astor", "Randolf",
-            "Magdalene", "Trevelyan", "Terance", "Roy", "Kermit", "Harriett", "Crystal", "Laurinda", "Kiersten",
-            "Phyllida", "Liz", "Bettie", "Rena", "Colten", "Berenice", "Sindy", "Wilma", "Amos", "Candi", "Ritchie",
-            "Dirk", "Kathlyn", "Callista", "Anona", "Flossie", "Sterling", "Calista", "Regan", "Erica", "Jeana",
-            "Keaton", "York", "Nolan", "Daniel", "Benton", "Tommie", "Serenity", "Deanna", "Chas", "Heron", "Marlyn",
-            "Xylia", "Tristin", "Lyndon", "Andriana", "Madelaine", "Maddison", "Leila", "Chantelle", "Audrey",
-            "Connor", "Daley", "Tracee", "Tilda", "Eliot", "Merle", "Linwood", "Kathryn", "Silas", "Alvina",
-            "Phinehas", "Janis", "Alvena", "Zubin", "Gwendolen", "Caitlyn", "Bertram", "Hailee", "Idelle", "Homer",
-            "Jannah", "Delbert", "Rhianna", "Cy", "Jefferson", "Wayland", "Nona", "Tempest", "Reed", "Jenifer",
-            "Ellery", "Nicolina", "Aldous", "Prince", "Lexia", "Vinnie", "Doug", "Alberic", "Kayleen", "Woody",
-            "Rosanne", "Ysabel", "Skyler", "Twyla", "Geordie", "Leta", "Clive", "Aaron", "Scottie", "Celeste", "Chuck",
-            "Erle", "Lallie", "Jaycob", "Ray", "Carrie", "Laurita", "Noreen", "Meaghan", "Ulysses", "Andy", "Drogo",
-            "Dina", "Yasmin", "Mya", "Luvenia", "Urban", "Jacob", "Laetitia", "Sherry", "Love", "Michaela", "Deonne",
-            "Summer", "Brendon", "Sheena", "Mason", "Jayson", "Linden", "Salal", "Darrell", "Diana", "Hudson",
-            "Lennon", "Isador", "Charley", "April", "Ralph", "James", "Mina", "Jolyon", "Laurine", "Monna", "Carita",
-            "Munro", "Elsdon", "Everette", "Radclyffe", "Darrin", "Herbert", "Gawain", "Sheree", "Trudy", "Emmaline",
-            "Kassandra", "Rebecca", "Basil", "Jen", "Don", "Osborne", "Lilith", "Hannah", "Fox", "Rupert", "Paulene",
-            "Darius", "Wally", "Baptist", "Sapphire", "Tia", "Sondra", "Kylee", "Ashton", "Jepson", "Joetta", "Val",
-            "Adela", "Zacharias", "Zola", "Marmaduke", "Shannah", "Posie", "Oralie", "Brittany", "Ernesta", "Raymund",
-            "Denzil", "Daren", "Roosevelt", "Nelson", "Fortune", "Mariel", "Nick", "Jaden", "Upton", "Oz", "Margaux",
-            "Precious", "Albert", "Bridger", "Jimmy", "Nicola", "Rosalynne", "Keith", "Walt", "Della", "Joanna",
-            "Xenia", "Esmeralda", "Major", "Simon", "Rexana", "Stacy", "Calanthe", "Sherley", "Kaitlyn", "Graham",
-            "Ramsey", "Abbey", "Madlyn", "Kelvin", "Bill", "Rue", "Monica", "Caileigh", "Laraine", "Booker", "Jayna",
-            "Greta", "Jervis", "Sherman", "Kendrick", "Tommy", "Iris", "Geffrey", "Kaelea", "Kerr", "Garrick", "Jep",
-            "Audley", "Nic", "Bronte", "Beulah", "Patricia", "Jewell", "Deidra", "Cory", "Everett", "Harper",
-            "Charity", "Godfrey", "Jaime", "Sinclair", "Talbot", "Dayna", "Cooper", "Rosaline", "Jennie", "Eileen",
-            "Latanya", "Corinna", "Roxie", "Caesar", "Charles", "Pollie", "Lindsey", "Sorrel", "Dwight", "Jocelyn",
-            "Weston", "Shyla", "Valorie", "Bessie", "Josh", "Lessie", "Dayton", "Kathi", "Chasity", "Wilton", "Adam",
-            "William", "Ash", "Angela", "Ivor", "Ria", "Jazmine", "Hailey", "Jo", "Silvestra", "Ernie", "Clifford",
-            "Levi", "Matilda", "Quincey", "Camilla", "Delicia", "Phemie", "Laurena", "Bambi", "Lourdes", "Royston",
-            "Chastity", "Lynwood", "Elle", "Brenda", "Phoebe", "Timothy", "Raschelle", "Lilly", "Burt", "Rina",
-            "Rodney", "Maris", "Jaron", "Wilf", "Harlan", "Audra", "Vincent", "Elwyn", "Drew", "Wynter", "Ora",
-            "Lissa", "Virgil", "Xavier", "Chad", "Ollie", "Leyton", "Karolyn", "Skye", "Roni", "Gladys", "Dinah",
-            "Penny", "August", "Osmund", "Whitaker", "Brande", "Cornell", "Phil", "Zara", "Kilie", "Gavin", "Coty",
-            "Randy", "Teri", "Keira", "Pru", "Clemency", "Kelcey", "Nevil", "Poppy", "Gareth", "Christabel", "Bastian",
-            "Wynonna", "Roselyn", "Goddard", "Collin", "Trace", "Neal", "Effie", "Denys", "Virginia", "Richard",
-            "Isiah", "Harrietta", "Gaylord", "Diamond", "Trudi", "Elaine", "Jemmy", "Gage", "Annabel", "Quincy", "Syd",
-            "Marianna", "Philomena", "Aubree", "Kathie", "Jacki", "Kelley", "Bess", "Cecil", "Maryvonne", "Kassidy",
-            "Anselm", "Dona", "Darby", "Jamison", "Daryl", "Darell", "Teal", "Lennie", "Bartholomew", "Katie",
-            "Maybelline", "Kimball", "Elvis", "Les", "Flick", "Harley", "Beth", "Bidelia", "Montague", "Helen", "Ozzy",
-            "Stef", "Debra", "Maxene", "Stefanie", "Russ", "Avril", "Johnathan", "Orson", "Chelsey", "Josephine",
-            "Deshaun", "Wendell", "Lula", "Ferdinanda", "Greg", "Brad", "Kynaston", "Dena", "Russel", "Robertina",
-            "Misti", "Leon", "Anjelica", "Bryana", "Myles", "Judi", "Curtis", "Davin", "Kristia", "Chrysanta",
-            "Hayleigh", "Hector", "Osbert", "Eustace", "Cary", "Tansy", "Cayley", "Maryann", "Alissa", "Ike",
-            "Tranter", "Reina", "Alwilda", "Sidony", "Columbine", "Astra", "Jillie", "Stephania", "Jonah", "Kennedy",
-            "Ferdinand", "Allegria", "Donella", "Kelleigh", "Darian", "Eldreda", "Jayden", "Herbie", "Jake", "Winston",
-            "Vi", "Annie", "Cherice", "Hugo", "Tricia", "Haydee", "Cassarah", "Darden", "Mallory", "Alton", "Hadley",
-            "Romayne", "Lacey", "Ern", "Alayna", "Cecilia", "Seward", "Tilly", "Edgar", "Concordia", "Ibbie", "Dahlia",
-            "Oswin", "Stu", "Brett", "Maralyn", "Kristeen", "Dotty", "Robyn", "Nessa", "Tresha", "Guinevere",
-            "Emerson", "Haze", "Lyn", "Henderson", "Lexa", "Jaylen", "Gail", "Lizette", "Tiara", "Robbie", "Destiny",
-            "Alice", "Livia", "Rosy", "Leah", "Jan", "Zach", "Vita", "Gia", "Micheal", "Rowina", "Alysha", "Bobbi",
-            "Delores", "Osmond", "Karaugh", "Wilbur", "Kasandra", "Renae", "Kaety", "Dora", "Gaye", "Amaryllis",
-            "Katelyn", "Dacre", "Prudence", "Ebony", "Camron", "Jerrold", "Vivyan", "Randall", "Donna", "Misty",
-            "Damon", "Selby", "Esmund", "Rian", "Garry", "Julius", "Raelene", "Clement", "Dom", "Tibby", "Moss",
-            "Millicent", "Gwendoline", "Berry", "Ashleigh", "Lilac", "Quin", "Vere", "Creighton", "Harriet", "Malvina",
-            "Lianne", "Pearle", "Kizzie", "Kara", "Petula", "Jeanie", "Maria", "Pacey", "Victoria", "Huey", "Toni",
-            "Rose", "Wallis", "Diggory", "Josiah", "Delma", "Keysha", "Channing", "Prue", "Lee", "Ryan", "Sidney",
-            "Valerie", "Clancy", "Ezra", "Gilbert", "Clare", "Laz", "Crofton", "Mike", "Annabella", "Tara", "Eldred",
-            "Arthur", "Jaylon", "Peronel", "Paden", "Dot", "Marian", "Amyas", "Alexus", "Esmond", "Abbie", "Stanley",
-            "Brittani", "Vickie", "Errol", "Kimberlee", "Uland", "Ebenezer", "Howie", "Eveline", "Andrea", "Trish",
-            "Hopkin", "Bryanna", "Temperance", "Valarie", "Femie", "Alix", "Terrell", "Lewin", "Lorrin", "Happy",
-            "Micah", "Rachyl", "Sloan", "Gertrude", "Elizabeth", "Dorris", "Andra", "Bram", "Gary", "Jeannine",
-            "Maurene", "Irene", "Yolonda", "Jonty", "Coleen", "Cecelia", "Chantal", "Stuart", "Caris", "Ros",
-            "Kaleigh", "Mirabelle", "Kolby", "Primrose", "Susannah", "Ginny", "Jinny", "Dolly", "Lettice", "Sonny",
-            "Melva", "Ernest", "Garret", "Reagan", "Trenton", "Gallagher", "Edwin", "Nikolas", "Corrie", "Lynette",
-            "Ettie", "Sly", "Debbi", "Eudora", "Brittney", "Tacey", "Marius", "Anima", "Gordon", "Olivia", "Kortney",
-            "Shantel", "Kolleen", "Nevaeh", "Buck", "Sera", "Liliana", "Aric", "Kalyn", "Mick", "Libby", "Ingram",
-            "Alexandria", "Darleen", "Jacklyn", "Hughie", "Tyler", "Aida", "Ronda", "Deemer", "Taryn", "Laureen",
-            "Samantha", "Dave", "Hardy", "Baldric", "Montgomery", "Gus", "Ellis", "Titania", "Luke", "Chase", "Haidee",
-            "Mayra", "Isabell", "Trinity", "Milo", "Abigail", "Tacita", "Meg", "Hervey", "Natasha", "Sadie", "Holden",
-            "Dee", "Mansel", "Perry", "Randi", "Frederica", "Georgina", "Kolour", "Debbie", "Seraphina", "Elspet",
-            "Julyan", "Raven", "Zavia", "Jarvis", "Jaymes", "Grover", "Cairo", "Alea", "Jordon", "Braxton", "Donny",
-            "Rhoda", "Tonya", "Bee", "Alyssia", "Ashlyn", "Reanna", "Lonny", "Arlene", "Deb", "Jane", "Nikole",
-            "Bettina", "Harrison", "Tamzen", "Arielle", "Adelaide", "Faith", "Bridie", "Wilburn", "Fern", "Nan",
-            "Shaw", "Zeke", "Alan", "Dene", "Gina", "Alexa", "Bailey", "Sal", "Tammy", "Maximillian", "America",
-            "Sylvana", "Fitz", "Mo", "Marissa", "Cass", "Eldon", "Wilfrid", "Tel", "Joann", "Kendra", "Tolly",
-            "Leanne", "Ferdie", "Haven", "Lucas", "Marlee", "Cyrilla", "Red", "Phoenix", "Jazmin", "Carin", "Gena",
-            "Lashonda", "Tucker", "Genette", "Kizzy", "Winifred", "Melody", "Keely", "Kaylyn", "Radcliff", "Lettie",
-            "Foster", "Lyndsey", "Nicholas", "Farley", "Louisa", "Dana", "Dortha", "Francine", "Doran", "Bonita",
-            "Hal", "Sawyer", "Reginald", "Aislin", "Nathan", "Baylee", "Abilene", "Ladonna", "Maurine", "Shelly",
-            "Deandre", "Jasmin", "Roderic", "Tiffany", "Amanda", "Verity", "Wilford", "Gayelord", "Whitney", "Demelza",
-            "Kenton", "Alberta", "Kyra", "Tabitha", "Sampson", "Korey", "Lillian", "Edison", "Clayton", "Steph",
-            "Maya", "Dusty", "Jim", "Ronny", "Adrianne", "Bernard", "Harris", "Kiley", "Alexander", "Kisha", "Ethalyn",
-            "Patience", "Briony", "Indigo", "Aureole", "Makenzie", "Molly", "Sherilyn", "Barry", "Laverne", "Hunter",
-            "Rocky", "Tyreek", "Madalyn", "Phyliss", "Chet", "Beatrice", "Faye", "Lavina", "Madelyn", "Tracey",
-            "Gyles", "Patti", "Carlyn", "Stephanie", "Jackalyn", "Larrie", "Kimmy", "Isolda", "Emelina", "Lis",
-            "Zillah", "Cody", "Sheard", "Rufus", "Paget", "Mae", "Rexanne", "Luvinia", "Tamsen", "Rosanna", "Greig",
-            "Stacia", "Mabelle", "Quianna", "Lotus", "Delice", "Bradford", "Angus", "Cosmo", "Earlene", "Adrian",
-            "Arlie", "Noelle", "Sabella", "Isa", "Adelle", "Innocent", "Kirby", "Trixie", "Kenelm", "Nelda", "Melia",
-            "Kendal", "Dorinda", "Placid", "Linette", "Kam", "Sherisse", "Evan", "Ewart", "Janice", "Linton",
-            "Jacaline", "Charissa", "Douglas", "Aileen", "Kemp", "Oli", "Amethyst", "Rosie", "Nigella", "Sherill",
-            "Anderson", "Alanna", "Eric", "Claudia", "Jennifer", "Boniface", "Harriet", "Vernon", "Lucy", "Shawnee",
-            "Gerard", "Cecily", "Romey", "Randall", "Wade", "Lux", "Dawson", "Gregg", "Kade", "Roxanne", "Melinda",
-            "Rolland", "Rowanne", "Fannie", "Isidore", "Melia", "Harvie", "Salal", "Eleonor", "Jacquette", "Lavone",
-            "Shanika", "Tarquin", "Janet", "Josslyn", "Maegan", "Augusta", "Aubree", "Francene", "Martie", "Marisa",
-            "Tyreek", "Tatianna", "Caleb", "Sheridan", "Nellie", "Barbara", "Wat", "Jayla", "Esmaralda", "Graeme",
-            "Lavena", "Jemima", "Nikolas", "Triston", "Portia", "Kyla", "Marcus", "Raeburn", "Jamison", "Earl", "Wren",
-            "Leighton", "Lagina", "Lucasta", "Dina", "Amaranta", "Jessika", "Claud", "Bernard", "Winifred", "Ebba",
-            "Sammi", "Gall", "Chloe", "Ottoline", "Herbert", "Janice", "Gareth", "Channing", "Caleigh", "Kailee",
-            "Ralphie", "Tamzen", "Quincy", "Beaumont", "Albert", "Jadyn", "Violet", "Luanna", "Moriah", "Humbert",
-            "Jed", "Leona", "Hale", "Mitch", "Marlin", "Nivek", "Darwin", "Dirk", "Liliana", "Meadow", "Bernadine",
-            "Jorie", "Peyton", "Astra", "Roscoe", "Gina", "Lovell", "Jewel", "Romayne", "Rosy", "Imogene",
-            "Margaretta", "Lorinda", "Hopkin", "Bobby", "Flossie", "Bennie", "Horatio", "Jonah", "Lyn", "Deana",
-            "Juliana", "Blanch", "Wright", "Kendal", "Woodrow", "Tania", "Austyn", "Val", "Mona", "Charla", "Rudyard",
-            "Pamela", "Raven", "Zena", "Nicola", "Kaelea", "Conor", "Virgil", "Sonnie", "Goodwin", "Christianne",
-            "Linford", "Myron", "Denton", "Charita", "Brody", "Ginnie", "Harrison", "Jeanine", "Quin", "Isolda",
-            "Zoie", "Pearce", "Margie", "Larrie", "Angelina", "Marcia", "Jessamine", "Delilah", "Dick", "Luana",
-            "Delicia", "Lake", "Luvenia", "Vaughan", "Concordia", "Gayelord", "Cheyenne", "Felix", "Dorris", "Pen",
-            "Kristeen", "Parris", "Everitt", "Josephina", "Amy", "Tommie", "Adrian", "April", "Rosaline", "Zachery",
-            "Trace", "Phoebe", "Jenelle", "Kameron", "Katharine", "Media", "Colton", "Tad", "Quianna", "Kerenza",
-            "Greta", "Luvinia", "Pete", "Tonya", "Beckah", "Barbra", "Jon", "Tetty", "Corey", "Sylvana", "Kizzy",
-            "Korey", "Trey", "Haydee", "Penny", "Mandy", "Panda", "Coline", "Ramsey", "Sukie", "Annabel", "Sarina",
-            "Corbin", "Suzanna", "Rob", "Duana", "Shell", "Jason", "Eddy", "Rube", "Roseann", "Celia", "Brianne",
-            "Nerissa", "Jera", "Humphry", "Ashlynn", "Terrence", "Philippina", "Coreen", "Kolour", "Indiana", "Paget",
-            "Marlyn", "Hester", "Isbel", "Ocean", "Harris", "Leslie", "Vere", "Monroe", "Isabelle", "Bertie", "Clitus",
-            "Dave", "Alethea", "Lessie", "Louiza", "Madlyn", "Garland", "Wolf", "Lalo", "Donny", "Amabel", "Tianna",
-            "Louie", "Susie", "Mackenzie", "Renie", "Tess", "Marmaduke", "Gwendolen", "Bettina", "Beatrix", "Esmund",
-            "Minnie", "Carlie", "Barnabas", "Ruthie", "Honour", "Haylie", "Xavior", "Freddie", "Ericka", "Aretha",
-            "Edie", "Madelina", "Anson", "Tabby", "Derrick", "Jocosa", "Deirdre", "Aislin", "Chastity", "Abigail",
-            "Wynonna", "Zo", "Eldon", "Krystine", "Ghislaine", "Zavia", "Nolene", "Marigold", "Kelley", "Sylvester",
-            "Odell", "George", "Laurene", "Franklyn", "Clarice", "Mo", "Dustin", "Debbi", "Lina", "Tony", "Acacia",
-            "Hettie", "Natalee", "Marcie", "Brittany", "Elnora", "Rachel", "Dawn", "Basil", "Christal", "Anjelica",
-            "Fran", "Tawny", "Delroy", "Tameka", "Lillie", "Ceara", "Deanna", "Deshaun", "Ken", "Bradford", "Justina",
-            "Merle", "Draven", "Gretta", "Harriette", "Webster", "Nathaniel", "Anemone", "Coleen", "Ruth", "Chryssa",
-            "Hortensia", "Saffie", "Deonne", "Leopold", "Harlan", "Lea", "Eppie", "Lucinda", "Tilda", "Fanny", "Titty",
-            "Lockie", "Jepson", "Sherisse", "Maralyn", "Ethel", "Sly", "Ebenezer", "Canute", "Ella", "Freeman",
-            "Reuben", "Olivette", "Nona", "Rik", "Amice", "Kristine", "Kathie", "Jayne", "Jeri", "Mckenna", "Bertram",
-            "Kaylee", "Livia", "Gil", "Wallace", "Maryann", "Keeleigh", "Laurinda", "Doran", "Khloe", "Dakota",
-            "Yaron", "Kimberleigh", "Gytha", "Doris", "Marylyn", "Benton", "Linnette", "Esther", "Jakki", "Rowina",
-            "Marian", "Roselyn", "Norbert", "Maggie", "Caesar", "Phinehas", "Jerry", "Jasmine", "Antonette", "Miriam",
-            "Monna", "Maryvonne", "Jacquetta", "Bernetta", "Napier", "Annie", "Gladwin", "Sheldon", "Aric", "Elouise",
-            "Gawain", "Kristia", "Gabe", "Kyra", "Red", "Tod", "Dudley", "Lorraine", "Ryley", "Sabina", "Poppy",
-            "Leland", "Aileen", "Eglantine", "Alicia", "Jeni", "Addy", "Tiffany", "Geffrey", "Lavina", "Collin",
-            "Clover", "Vin", "Jerome", "Doug", "Vincent", "Florence", "Scarlet", "Celeste", "Desdemona", "Tiphanie",
-            "Kassandra", "Ashton", "Madison", "Art", "Magdalene", "Iona", "Josepha", "Anise", "Ferne", "Derek",
-            "Huffie", "Qiana", "Ysabel", "Tami", "Shannah", "Xavier", "Willard", "Winthrop", "Vickie", "Maura",
-            "Placid", "Tiara", "Reggie", "Elissa", "Isa", "Chrysanta", "Jeff", "Bessie", "Terri", "Amilia", "Brett",
-            "Daniella", "Damion", "Carolina", "Maximillian", "Travers", "Benjamin", "Oprah", "Darcy", "Yolanda",
-            "Nicolina", "Crofton", "Jarrett", "Kaitlin", "Shauna", "Keren", "Bevis", "Kalysta", "Sharron", "Alyssa",
-            "Blythe", "Zelma", "Caelie", "Norwood", "Billie", "Patrick", "Gary", "Cambria", "Tylar", "Mason", "Helen",
-            "Melyssa", "Gene", "Gilberta", "Carter", "Herbie", "Harmonie", "Leola", "Eugenia", "Clint", "Pauletta",
-            "Edwyna", "Georgina", "Teal", "Harper", "Izzy", "Dillon", "Kezia", "Evangeline", "Colene", "Madelaine",
-            "Zilla", "Rudy", "Dottie", "Caris", "Morton", "Marge", "Tacey", "Parker", "Troy", "Liza", "Lewin",
-            "Tracie", "Justine", "Dallas", "Linden", "Ray", "Loretta", "Teri", "Elvis", "Diane", "Julianna", "Manfred",
-            "Denise", "Eireen", "Ann", "Kenith", "Linwood", "Kathlyn", "Bernice", "Shelley", "Oswald", "Amedeus",
-            "Homer", "Tanzi", "Ted", "Ralphina", "Hyacinth", "Lotus", "Matthias", "Arlette", "Clark", "Cecil",
-            "Elspeth", "Alvena", "Noah", "Millard", "Brenden", "Cole", "Philipa", "Nina", "Thelma", "Iantha", "Reid",
-            "Jefferson", "Meg", "Elsie", "Shirlee", "Nathan", "Nancy", "Simona", "Racheal", "Carin", "Emory", "Delice",
-            "Kristi", "Karaugh", "Kaety", "Tilly", "Em", "Alanis", "Darrin", "Jerrie", "Hollis", "Cary", "Marly",
-            "Carita", "Jody", "Farley", "Hervey", "Rosalin", "Cuthbert", "Stewart", "Jodene", "Caileigh", "Briscoe",
-            "Dolores", "Sheree", "Eustace", "Nigel", "Detta", "Barret", "Rowland", "Kenny", "Githa", "Zoey", "Adela",
-            "Petronella", "Opal", "Coleman", "Niles", "Cyril", "Dona", "Alberic", "Allannah", "Jules", "Avalon",
-            "Hadley", "Thomas", "Renita", "Calanthe", "Heron", "Shawnda", "Chet", "Malina", "Manny", "Rina", "Frieda",
-            "Eveleen", "Deshawn", "Amos", "Raelene", "Paige", "Molly", "Nannie", "Ileen", "Brendon", "Milford",
-            "Unice", "Rebeccah", "Caedmon", "Gae", "Doreen", "Vivian", "Louis", "Raphael", "Vergil", "Lise", "Glenn",
-            "Karyn", "Terance", "Reina", "Jake", "Gordon", "Wisdom", "Isiah", "Gervase", "Fern", "Marylou", "Roddy",
-            "Justy", "Derick", "Shantelle", "Adam", "Chantel", "Madoline", "Emmerson", "Lexie", "Mickey", "Stephen",
-            "Dane", "Stacee", "Elwin", "Tracey", "Alexandra", "Ricky", "Ian", "Kasey", "Rita", "Alanna", "Georgene",
-            "Deon", "Zavier", "Ophelia", "Deforest", "Lowell", "Zubin", "Hardy", "Osmund", "Tabatha", "Debby",
-            "Katlyn", "Tallulah", "Priscilla", "Braden", "Wil", "Keziah", "Jen", "Aggie", "Korbin", "Lemoine",
-            "Barnaby", "Tranter", "Goldie", "Roderick", "Trina", "Emery", "Pris", "Sidony", "Adelle", "Tate", "Wilf",
-            "Zola", "Brande", "Chris", "Calanthia", "Lilly", "Kaycee", "Lashonda", "Jasmin", "Elijah", "Shantel",
-            "Simon", "Rosalind", "Jarod", "Kaylie", "Corrine", "Joselyn", "Archibald", "Mariabella", "Winton",
-            "Merlin", "Chad", "Ursula", "Kristopher", "Hewie", "Adrianna", "Lyndsay", "Jasmyn", "Tim", "Evette",
-            "Margaret", "Samson", "Bronte", "Terence", "Leila", "Candice", "Tori", "Jamey", "Coriander", "Conrad",
-            "Floyd", "Karen", "Lorin", "Maximilian", "Cairo", "Emily", "Yasmin", "Karolyn", "Bryan", "Lanny",
-            "Kimberly", "Rick", "Chaz", "Krystle", "Lyric", "Laura", "Garrick", "Flip", "Monty", "Brendan",
-            "Ermintrude", "Rayner", "Merla", "Titus", "Marva", "Patricia", "Leone", "Tracy", "Jaqueline", "Hallam",
-            "Delores", "Cressida", "Carlyle", "Leann", "Kelcey", "Laurence", "Ryan", "Reynold", "Mark", "Collyn",
-            "Audie", "Sammy", "Ellery", "Sallie", "Pamelia", "Adolph", "Lydia", "Titania", "Ron", "Bridger", "Aline",
-            "Read", "Kelleigh", "Weldon", "Irving", "Garey", "Diggory", "Evander", "Kylee", "Deidre", "Ormond",
-            "Laurine", "Reannon", "Arline", "Pat"
-
-    };
-
-    public static String[] jargon = { "wireless", "signal", "network", "3G", "plan", "touch-screen",
-            "customer-service", "reachability", "voice-command", "shortcut-menu", "customization", "platform", "speed",
-            "voice-clarity", "voicemail-service" };
-
-    public static String[] vendors = { "at&t", "verizon", "t-mobile", "sprint", "motorola", "samsung", "iphone" };
-
-    public static String[] org_list = { "Latsonity", "ganjalax", "Zuncan", "Lexitechno", "Hot-tech", "subtam",
-            "Coneflex", "Ganjatax", "physcane", "Tranzap", "Qvohouse", "Zununoing", "jaydax", "Keytech", "goldendexon",
-            "Villa-tech", "Trustbam", "Newcom", "Voltlane", "Ontohothex", "Ranhotfan", "Alphadax", "Transhigh",
-            "kin-ron", "Doublezone", "Solophase", "Vivaace", "silfind", "Basecone", "sonstreet", "Freshfix",
-            "Techitechi", "Kanelectrics", "linedexon", "Goldcity", "Newfase", "Technohow", "Zimcone", "Salthex",
-            "U-ron", "Solfix", "whitestreet", "Xx-technology", "Hexviafind", "over-it", "Strongtone", "Tripplelane",
-            "geomedia", "Scotcity", "Inchex", "Vaiatech", "Striptaxon", "Hatcom", "tresline", "Sanjodax", "freshdox",
-            "Sumlane", "Quadlane", "Newphase", "overtech", "Voltbam", "Icerunin", "Fixdintex", "Hexsanhex", "Statcode",
-            "Greencare", "U-electrics", "Zamcorporation", "Ontotanin", "Tanzimcare", "Groovetex", "Ganjastrip",
-            "Redelectronics", "Dandamace", "Whitemedia", "strongex", "Streettax", "highfax", "Mathtech", "Xx-drill",
-            "Sublamdox", "Unijobam", "Rungozoom", "Fixelectrics", "Villa-dox", "Ransaofan", "Plexlane", "itlab",
-            "Lexicone", "Fax-fax", "Viatechi", "Inchdox", "Kongreen", "Doncare", "Y-geohex", "Opeelectronics",
-            "Medflex", "Dancode", "Roundhex", "Labzatron", "Newhotplus", "Sancone", "Ronholdings", "Quoline",
-            "zoomplus", "Fix-touch", "Codetechno", "Tanzumbam", "Indiex", "Canline" };
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/GenericSocketFeedAdapter.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/GenericSocketFeedAdapter.java b/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/GenericSocketFeedAdapter.java
deleted file mode 100644
index dcf3b51..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/GenericSocketFeedAdapter.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.runtime;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.ServerSocket;
-import java.net.Socket;
-import java.util.logging.Level;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.external.dataset.adapter.StreamBasedAdapter;
-import org.apache.asterix.om.types.ARecordType;
-import org.apache.hyracks.api.comm.IFrameWriter;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.dataflow.std.file.ITupleParserFactory;
-
-public class GenericSocketFeedAdapter extends StreamBasedAdapter {
-
-    private static final long serialVersionUID = 1L;
-
-    private final int port;
-    private SocketFeedServer socketFeedServer;
-
-    public GenericSocketFeedAdapter(ITupleParserFactory parserFactory, ARecordType outputType, int port,
-            IHyracksTaskContext ctx, int partition) throws AsterixException, IOException {
-        super(parserFactory, outputType, ctx, partition);
-        this.port = port;
-        this.socketFeedServer = new SocketFeedServer(outputType, port);
-    }
-
-    @Override
-    public void start(int partition, IFrameWriter writer) throws Exception {
-        super.start(partition, writer);
-    }
-
-    @Override
-    public InputStream getInputStream(int partition) throws IOException {
-        return socketFeedServer.getInputStream();
-    }
-
-    private static class SocketFeedServer {
-        private ServerSocket serverSocket;
-        private InputStream inputStream;
-
-        public SocketFeedServer(ARecordType outputtype, int port) throws IOException, AsterixException {
-            try {
-                serverSocket = new ServerSocket(port);
-            } catch (Exception e) {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("port: " + port + " unusable ");
-                }
-            }
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Feed server configured to use port: " + port);
-            }
-        }
-
-        public InputStream getInputStream() {
-            Socket socket;
-            try {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("waiting for client at " + serverSocket.getLocalPort());
-                }
-                socket = serverSocket.accept();
-                inputStream = socket.getInputStream();
-            } catch (IOException e) {
-                if (LOGGER.isLoggable(Level.SEVERE)) {
-                    LOGGER.severe("Unable to create input stream required for feed ingestion");
-                }
-            }
-            return inputStream;
-        }
-
-        public void stop() throws IOException {
-            try {
-                serverSocket.close();
-            } catch (IOException ioe) {
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("Unable to close socket at " + serverSocket.getLocalPort());
-                }
-            }
-        }
-
-    }
-
-    @Override
-    public boolean stop() throws Exception {
-        socketFeedServer.stop();
-        return true;
-    }
-
-    @Override
-    public boolean handleException(Throwable e) {
-        try {
-            this.socketFeedServer = new SocketFeedServer((ARecordType) sourceDatatype, port);
-            return true;
-        } catch (Exception re) {
-            return false;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/GenericSocketFeedAdapterFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/GenericSocketFeedAdapterFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/GenericSocketFeedAdapterFactory.java
deleted file mode 100644
index e19d757..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/runtime/GenericSocketFeedAdapterFactory.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.runtime;
-
-import java.net.InetAddress;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-import java.util.Set;
-
-import org.apache.asterix.common.feeds.api.IDataSourceAdapter;
-import org.apache.asterix.external.api.IAdapterFactory;
-import org.apache.asterix.om.types.ARecordType;
-import org.apache.asterix.om.util.AsterixRuntimeUtil;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
-import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import org.apache.hyracks.algebricks.common.utils.Pair;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.dataflow.std.file.ITupleParserFactory;
-
-/**
- * Factory class for creating @see{GenericSocketFeedAdapter} The
- * adapter listens at a port for receiving data (from external world).
- * Data received is transformed into Asterix Data Format (ADM).
- */
-public class GenericSocketFeedAdapterFactory implements IAdapterFactory {
-
-    private static final long serialVersionUID = 1L;
-
-    private ARecordType outputType;
-
-    private List<Pair<String, Integer>> sockets;
-
-    private Mode mode = Mode.IP;
-
-    private Map<String, String> configuration;
-
-    private ITupleParserFactory parserFactory;
-
-    public static final String KEY_SOCKETS = "sockets";
-
-    public static final String KEY_MODE = "address-type";
-
-    public static enum Mode {
-        NC,
-        IP
-    }
-
-    @Override
-    public String getAlias() {
-        return "socket_adapter";
-    }
-
-    public List<Pair<String, Integer>> getSockets() {
-        return sockets;
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration, ARecordType outputType) throws Exception {
-        this.configuration = configuration;
-        this.configureSockets(configuration);
-        this.configureFormat(outputType);
-        this.outputType = outputType;
-    }
-
-    private void configureFormat(ARecordType outputType2) {
-        // TODO Auto-generated method stub
-
-    }
-
-    @Override
-    public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
-        List<String> locations = new ArrayList<String>();
-        for (Pair<String, Integer> socket : sockets) {
-            locations.add(socket.first);
-        }
-        return new AlgebricksAbsolutePartitionConstraint(locations.toArray(new String[] {}));
-    }
-
-    @Override
-    public IDataSourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
-        Pair<String, Integer> socket = sockets.get(partition);
-        return new GenericSocketFeedAdapter(parserFactory, outputType, socket.second, ctx, partition);
-    }
-
-    private void configureSockets(Map<String, String> configuration) throws Exception {
-        sockets = new ArrayList<Pair<String, Integer>>();
-        String modeValue = configuration.get(KEY_MODE);
-        if (modeValue != null) {
-            mode = Mode.valueOf(modeValue.trim().toUpperCase());
-        }
-        String socketsValue = configuration.get(KEY_SOCKETS);
-        if (socketsValue == null) {
-            throw new IllegalArgumentException("\'sockets\' parameter not specified as part of adapter configuration");
-        }
-        Map<InetAddress, Set<String>> ncMap = AsterixRuntimeUtil.getNodeControllerMap();
-        List<String> ncs = AsterixRuntimeUtil.getAllNodeControllers();
-        String[] socketsArray = socketsValue.split(",");
-        Random random = new Random();
-        for (String socket : socketsArray) {
-            String[] socketTokens = socket.split(":");
-            String host = socketTokens[0].trim();
-            int port = Integer.parseInt(socketTokens[1].trim());
-            Pair<String, Integer> p = null;
-            switch (mode) {
-                case IP:
-                    Set<String> ncsOnIp = ncMap.get(InetAddress.getByName(host));
-                    if (ncsOnIp == null || ncsOnIp.isEmpty()) {
-                        throw new IllegalArgumentException("Invalid host " + host
-                                + " as it is not part of the AsterixDB cluster. Valid choices are "
-                                + StringUtils.join(ncMap.keySet(), ", "));
-                    }
-                    String[] ncArray = ncsOnIp.toArray(new String[] {});
-                    String nc = ncArray[random.nextInt(ncArray.length)];
-                    p = new Pair<String, Integer>(nc, port);
-                    break;
-
-                case NC:
-                    p = new Pair<String, Integer>(host, port);
-                    if (!ncs.contains(host)) {
-                        throw new IllegalArgumentException(
-                                "Invalid NC " + host + " as it is not part of the AsterixDB cluster. Valid choices are "
-                                        + StringUtils.join(ncs, ", "));
-
-                    }
-                    break;
-            }
-            sockets.add(p);
-        }
-    }
-
-    @Override
-    public ARecordType getAdapterOutputType() {
-        return outputType;
-    }
-}
\ No newline at end of file


[06/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataExceptionUtils.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataExceptionUtils.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataExceptionUtils.java
index 9dcaef4..f16e24b 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataExceptionUtils.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataExceptionUtils.java
@@ -18,6 +18,10 @@
  */
 package org.apache.asterix.external.util;
 
+import java.util.Arrays;
+
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
 public class ExternalDataExceptionUtils {
     public static final String INCORRECT_PARAMETER = "Incorrect parameter.\n";
     public static final String MISSING_PARAMETER = "Missing parameter.\n";
@@ -29,4 +33,22 @@ public class ExternalDataExceptionUtils {
         return INCORRECT_PARAMETER + PARAMETER_NAME + parameterName + ExternalDataConstants.LF + EXPECTED_VALUE
                 + expectedValue + ExternalDataConstants.LF + PASSED_VALUE + passedValue;
     }
+
+    public static String concat(String... vals) {
+        return Arrays.toString(vals);
+    }
+
+    // For now, we are accepting all exceptions as resolvable by adapter.
+    public static boolean isResolvable(Exception e) {
+        return true;
+    }
+
+    public static HyracksDataException suppress(HyracksDataException hde, Throwable th) {
+        if (hde == null) {
+            return new HyracksDataException(th);
+        } else {
+            hde.addSuppressed(th);
+            return hde;
+        }
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
index 7c1c1b5..c9be872 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
@@ -23,7 +23,6 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.feeds.FeedConstants;
 import org.apache.asterix.external.api.IDataParserFactory;
 import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 import org.apache.asterix.external.api.IInputStreamProviderFactory;
@@ -134,6 +133,15 @@ public class ExternalDataUtils {
         return parserFormat != null ? parserFormat : configuration.get(ExternalDataConstants.KEY_FORMAT);
     }
 
+    public static void setRecordFormat(Map<String, String> configuration, String format) {
+        if (!configuration.containsKey(ExternalDataConstants.KEY_DATA_PARSER)) {
+            configuration.put(ExternalDataConstants.KEY_DATA_PARSER, format);
+        }
+        if (!configuration.containsKey(ExternalDataConstants.KEY_FORMAT)) {
+            configuration.put(ExternalDataConstants.KEY_FORMAT, format);
+        }
+    }
+
     private static Map<ATypeTag, IValueParserFactory> valueParserFactoryMap = initializeValueParserFactoryMap();
 
     private static Map<ATypeTag, IValueParserFactory> initializeValueParserFactoryMap() {
@@ -219,4 +227,31 @@ public class ExternalDataUtils {
                         .substring(parserFactoryName.indexOf(ExternalDataConstants.EXTERNAL_LIBRARY_SEPARATOR) + 1))
                 .newInstance();
     }
+
+    public static boolean isFeed(Map<String, String> configuration) {
+        if (!configuration.containsKey(ExternalDataConstants.KEY_IS_FEED)) {
+            return false;
+        } else {
+            return Boolean.parseBoolean(configuration.get(ExternalDataConstants.KEY_IS_FEED));
+        }
+    }
+
+    public static void prepareFeed(Map<String, String> configuration, String dataverseName, String feedName) {
+        if (!configuration.containsKey(ExternalDataConstants.KEY_IS_FEED)) {
+            configuration.put(ExternalDataConstants.KEY_IS_FEED, ExternalDataConstants.TRUE);
+        }
+        configuration.put(ExternalDataConstants.KEY_DATAVERSE, dataverseName);
+        configuration.put(ExternalDataConstants.KEY_FEED_NAME, feedName);
+    }
+
+    public static boolean keepDataSourceOpen(Map<String, String> configuration) {
+        if (!configuration.containsKey(ExternalDataConstants.KEY_WAIT_FOR_DATA)) {
+            return true;
+        }
+        return Boolean.parseBoolean(configuration.get(ExternalDataConstants.KEY_WAIT_FOR_DATA));
+    }
+
+    public static String getFeedName(Map<String, String> configuration) {
+        return configuration.get(ExternalDataConstants.KEY_FEED_NAME);
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedConstants.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedConstants.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedConstants.java
new file mode 100644
index 0000000..cc21360
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedConstants.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.util;
+
+public class FeedConstants {
+
+    public final static String FEEDS_METADATA_DV = "feeds_metadata";
+    public final static String FAILED_TUPLE_DATASET = "failed_tuple";
+    public final static String FAILED_TUPLE_DATASET_TYPE = "FailedTupleType";
+    public final static String FAILED_TUPLE_DATASET_KEY = "id";
+
+    public static final class StatisticsConstants {
+        public static final String INTAKE_TUPLEID = "intake-tupleid";
+        public static final String INTAKE_PARTITION = "intake-partition";
+        public static final String INTAKE_TIMESTAMP = "intake-timestamp";
+        public static final String COMPUTE_TIMESTAMP = "compute-timestamp";
+        public static final String STORE_TIMESTAMP = "store-timestamp";
+
+    }
+
+    public static final class MessageConstants {
+        public static final String MESSAGE_TYPE = "message-type";
+        public static final String NODE_ID = "nodeId";
+        public static final String DATAVERSE = "dataverse";
+        public static final String FEED = "feed";
+        public static final String DATASET = "dataset";
+        public static final String AQL = "aql";
+        public static final String RUNTIME_TYPE = "runtime-type";
+        public static final String PARTITION = "partition";
+        public static final String INTAKE_PARTITION = "intake-partition";
+        public static final String INFLOW_RATE = "inflow-rate";
+        public static final String OUTFLOW_RATE = "outflow-rate";
+        public static final String MODE = "mode";
+        public static final String CURRENT_CARDINALITY = "current-cardinality";
+        public static final String REDUCED_CARDINALITY = "reduced-cardinality";
+        public static final String VALUE_TYPE = "value-type";
+        public static final String VALUE = "value";
+        public static final String CPU_LOAD = "cpu-load";
+        public static final String N_RUNTIMES = "n_runtimes";
+        public static final String HEAP_USAGE = "heap_usage";
+        public static final String OPERAND_ID = "operand-id";
+        public static final String COMPUTE_PARTITION_RETAIN_LIMIT = "compute-partition-retain-limit";
+        public static final String LAST_PERSISTED_TUPLE_INTAKE_TIMESTAMP = "last-persisted-tuple-intake_timestamp";
+        public static final String PERSISTENCE_DELAY_WITHIN_LIMIT = "persistence-delay-within-limit";
+        public static final String AVERAGE_PERSISTENCE_DELAY = "average-persistence-delay";
+        public static final String COMMIT_ACKS = "commit-acks";
+        public static final String MAX_WINDOW_ACKED = "max-window-acked";
+        public static final String BASE = "base";
+        public static final String NOT_APPLICABLE = "N/A";
+
+    }
+
+    public static final class NamingConstants {
+        public static final String LIBRARY_NAME_SEPARATOR = "#";
+    }
+
+    public static class JobConstants {
+        public static final int DEFAULT_FRAME_SIZE = 8192;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedFrameUtil.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedFrameUtil.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedFrameUtil.java
new file mode 100644
index 0000000..a2bdd64
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedFrameUtil.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.util;
+
+import java.nio.ByteBuffer;
+import java.util.BitSet;
+import java.util.Random;
+
+import org.apache.hyracks.api.comm.IFrame;
+import org.apache.hyracks.api.comm.VSizeFrame;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+
+public class FeedFrameUtil {
+    public static ByteBuffer removeBadTuple(IHyracksTaskContext ctx, int tupleIndex, FrameTupleAccessor fta)
+            throws HyracksDataException {
+        FrameTupleAppender appender = new FrameTupleAppender();
+        IFrame slicedFrame = new VSizeFrame(ctx);
+        appender.reset(slicedFrame, true);
+        int totalTuples = fta.getTupleCount();
+        for (int ti = 0; ti < totalTuples; ti++) {
+            if (ti != tupleIndex) {
+                appender.append(fta, ti);
+            }
+        }
+        return slicedFrame.getBuffer();
+    }
+
+    public static ByteBuffer getSampledFrame(IHyracksTaskContext ctx, FrameTupleAccessor fta, int sampleSize)
+            throws HyracksDataException {
+        NChooseKIterator it = new NChooseKIterator(fta.getTupleCount(), sampleSize);
+        FrameTupleAppender appender = new FrameTupleAppender();
+        IFrame sampledFrame = new VSizeFrame(ctx);
+        appender.reset(sampledFrame, true);
+        int nextTupleIndex = 0;
+        while (it.hasNext()) {
+            nextTupleIndex = it.next();
+            appender.append(fta, nextTupleIndex);
+        }
+        return sampledFrame.getBuffer();
+    }
+
+    private static class NChooseKIterator {
+
+        private final int n;
+        private final int k;
+        private final BitSet bSet;
+        private final Random random;
+
+        private int traversed = 0;
+
+        public NChooseKIterator(int n, int k) {
+            this.n = n;
+            this.k = k;
+            this.bSet = new BitSet(n);
+            bSet.set(0, n - 1);
+            this.random = new Random();
+        }
+
+        public boolean hasNext() {
+            return traversed < k;
+        }
+
+        public int next() {
+            if (hasNext()) {
+                traversed++;
+                int startOffset = random.nextInt(n);
+                int pos = -1;
+                while (pos < 0) {
+                    pos = bSet.nextSetBit(startOffset);
+                    if (pos < 0) {
+                        startOffset = 0;
+                    }
+                }
+                bSet.clear(pos);
+                return pos;
+            } else {
+                return -1;
+            }
+        }
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedLogManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedLogManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedLogManager.java
new file mode 100644
index 0000000..72b438d
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedLogManager.java
@@ -0,0 +1,152 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.util;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.StandardOpenOption;
+import java.util.TreeSet;
+
+import org.apache.commons.io.FileUtils;
+
+public class FeedLogManager {
+
+    public enum LogEntryType {
+        START,      // partition start
+        END,        // partition end
+        COMMIT,     // a record commit within a partition
+        SNAPSHOT    // an identifier that partitions with identifiers before this one should be ignored
+    }
+
+    public static final String PROGRESS_LOG_FILE_NAME = "progress.log";
+    public static final String ERROR_LOG_FILE_NAME = "error.log";
+    public static final String BAD_RECORDS_FILE_NAME = "failed_record.log";
+    public static final String START_PREFIX = "s:";
+    public static final String END_PREFIX = "e:";
+    public static final int PREFIX_SIZE = 2;
+    private String currentPartition;
+    private TreeSet<String> completed;
+    private Path dir;
+    private BufferedWriter progressLogger;
+    private BufferedWriter errorLogger;
+    private BufferedWriter recordLogger;
+
+    public FeedLogManager(File file) {
+        this.dir = file.toPath();
+        this.completed = new TreeSet<String>();
+    }
+
+    public void endPartition() throws IOException {
+        logProgress(END_PREFIX + currentPartition);
+        completed.add(currentPartition);
+    }
+
+    public void endPartition(String partition) throws IOException {
+        currentPartition = partition;
+        logProgress(END_PREFIX + currentPartition);
+        completed.add(currentPartition);
+    }
+
+    public void startPartition(String partition) throws IOException {
+        currentPartition = partition;
+        logProgress(START_PREFIX + currentPartition);
+    }
+
+    public boolean exists() {
+        return Files.exists(dir);
+    }
+
+    public void open() throws IOException {
+        // read content of logs.
+        BufferedReader reader = Files.newBufferedReader(
+                Paths.get(dir.toAbsolutePath().toString() + File.separator + PROGRESS_LOG_FILE_NAME));
+        String log = reader.readLine();
+        while (log != null) {
+            if (log.startsWith(END_PREFIX)) {
+                completed.add(getSplitId(log));
+            }
+            log = reader.readLine();
+        }
+        reader.close();
+
+        progressLogger = Files.newBufferedWriter(
+                Paths.get(dir.toAbsolutePath().toString() + File.separator + PROGRESS_LOG_FILE_NAME),
+                StandardCharsets.UTF_8, StandardOpenOption.APPEND);
+        errorLogger = Files.newBufferedWriter(
+                Paths.get(dir.toAbsolutePath().toString() + File.separator + ERROR_LOG_FILE_NAME),
+                StandardCharsets.UTF_8, StandardOpenOption.APPEND);
+        recordLogger = Files.newBufferedWriter(
+                Paths.get(dir.toAbsolutePath().toString() + File.separator + BAD_RECORDS_FILE_NAME),
+                StandardCharsets.UTF_8, StandardOpenOption.APPEND);
+    }
+
+    public void close() throws IOException {
+        progressLogger.close();
+        errorLogger.close();
+        recordLogger.close();
+    }
+
+    public boolean create() throws IOException {
+        File f = dir.toFile();
+        f.mkdirs();
+        new File(f, PROGRESS_LOG_FILE_NAME).createNewFile();
+        new File(f, ERROR_LOG_FILE_NAME).createNewFile();
+        new File(f, BAD_RECORDS_FILE_NAME).createNewFile();
+        return true;
+    }
+
+    public boolean destroy() throws IOException {
+        File f = dir.toFile();
+        FileUtils.deleteDirectory(f);
+        return true;
+    }
+
+    public void logProgress(String log) throws IOException {
+        progressLogger.write(log);
+        progressLogger.newLine();
+    }
+
+    public void logError(String error, Throwable th) throws IOException {
+        errorLogger.append(error);
+        errorLogger.newLine();
+        errorLogger.append(th.toString());
+        errorLogger.newLine();
+    }
+
+    public void logRecord(String record, Exception e) throws IOException {
+        recordLogger.append(record);
+        recordLogger.newLine();
+        recordLogger.append(e.toString());
+        recordLogger.newLine();
+    }
+
+    public static String getSplitId(String log) {
+        return log.substring(PREFIX_SIZE);
+    }
+
+    public boolean isSplitRead(String split) {
+        return completed.contains(split);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java
new file mode 100644
index 0000000..224ee31
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.util;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.asterix.common.cluster.ClusterPartition;
+import org.apache.asterix.common.utils.StoragePathUtil;
+import org.apache.asterix.om.util.AsterixClusterProperties;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint.PartitionConstraintType;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.io.FileReference;
+import org.apache.hyracks.api.io.IIOManager;
+import org.apache.hyracks.dataflow.std.file.FileSplit;
+
+public class FeedUtils {
+    private static String prepareDataverseFeedName(String dataverseName, String feedName) {
+        return dataverseName + File.separator + feedName;
+    }
+
+    public static FileSplit[] splitsForAdapter(String dataverseName, String feedName,
+            AlgebricksPartitionConstraint partitionConstraints) throws Exception {
+        File relPathFile = new File(prepareDataverseFeedName(dataverseName, feedName));
+        if (partitionConstraints.getPartitionConstraintType() == PartitionConstraintType.COUNT) {
+            throw new AlgebricksException("Can't create file splits for adapter with count partitioning constraints");
+        }
+        String[] locations = ((AlgebricksAbsolutePartitionConstraint) partitionConstraints).getLocations();
+        List<FileSplit> splits = new ArrayList<FileSplit>();
+        String storageDirName = AsterixClusterProperties.INSTANCE.getStorageDirectoryName();
+        int i = 0;
+        for (String nd : locations) {
+            // Always get the first partition
+            ClusterPartition nodePartition = AsterixClusterProperties.INSTANCE.getNodePartitions(nd)[0];
+            String storagePartitionPath = StoragePathUtil.prepareStoragePartitionPath(storageDirName,
+                    nodePartition.getPartitionId());
+            // format: 'storage dir name'/partition_#/dataverse/feed/adapter_#
+            File f = new File(storagePartitionPath + File.separator + relPathFile + File.separator
+                    + StoragePathUtil.ADAPTER_INSTANCE_PREFIX + i);
+            splits.add(StoragePathUtil.getFileSplitForClusterPartition(nodePartition, f));
+            i++;
+        }
+        return splits.toArray(new FileSplit[] {});
+    }
+
+    public static FileReference getAbsoluteFileRef(String relativePath, int ioDeviceId, IIOManager ioManager) {
+        return ioManager.getAbsoluteFileRef(ioDeviceId, relativePath);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java
new file mode 100644
index 0000000..4bb9d92
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java
@@ -0,0 +1,244 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.util;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.ClosedWatchServiceException;
+import java.nio.file.FileSystems;
+import java.nio.file.Files;
+import java.nio.file.LinkOption;
+import java.nio.file.Path;
+import java.nio.file.StandardWatchEventKinds;
+import java.nio.file.WatchEvent;
+import java.nio.file.WatchEvent.Kind;
+import java.nio.file.WatchKey;
+import java.nio.file.WatchService;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.LinkedList;
+
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+
+public class FileSystemWatcher {
+
+    private static Logger LOGGER = Logger.getLogger(FileSystemWatcher.class.getName());
+    private final WatchService watcher;
+    private final HashMap<WatchKey, Path> keys;
+    private final LinkedList<File> files = new LinkedList<File>();
+    private Iterator<File> it;
+    private final String expression;
+    private final FeedLogManager logManager;
+    private final Path path;
+    private final boolean isFeed;
+    private boolean done;
+    private File current;
+
+    public FileSystemWatcher(FeedLogManager logManager, Path inputResource, String expression, boolean isFeed)
+            throws IOException {
+        this.watcher = isFeed ? FileSystems.getDefault().newWatchService() : null;
+        this.keys = isFeed ? new HashMap<WatchKey, Path>() : null;
+        this.logManager = logManager;
+        this.expression = expression;
+        this.path = inputResource;
+        this.isFeed = isFeed;
+    }
+
+    public void init() throws IOException {
+        LinkedList<Path> dirs = null;
+        dirs = new LinkedList<Path>();
+        LocalFileSystemUtils.traverse(files, path.toFile(), expression, dirs);
+        it = files.iterator();
+        if (isFeed) {
+            for (Path path : dirs) {
+                register(path);
+            }
+            resume();
+        }
+    }
+
+    /**
+     * Register the given directory, and all its sub-directories, with the
+     * WatchService.
+     */
+    private void register(Path dir) throws IOException {
+        WatchKey key = dir.register(watcher, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_DELETE,
+                StandardWatchEventKinds.ENTRY_MODIFY);
+        keys.put(key, dir);
+    }
+
+    private void resume() throws IOException {
+        if (logManager == null) {
+            return;
+        }
+        if (logManager.exists()) {
+            logManager.open();
+        } else {
+            logManager.create();
+            logManager.open();
+            return;
+        }
+        /*
+         * Done processing the progress log file. We now have:
+         * the files that were completed.
+         */
+
+        if (it == null) {
+            return;
+        }
+        while (it.hasNext()) {
+            File file = it.next();
+            if (logManager.isSplitRead(file.getAbsolutePath())) {
+                // File was read completely, remove it from the files list
+                it.remove();
+            }
+        }
+        // reset the iterator
+        it = files.iterator();
+    }
+
+    @SuppressWarnings("unchecked")
+    static <T> WatchEvent<T> cast(WatchEvent<?> event) {
+        return (WatchEvent<T>) event;
+    }
+
+    private void handleEvents(WatchKey key) {
+        // get dir associated with the key
+        Path dir = keys.get(key);
+        if (dir == null) {
+            // This should never happen
+            if (LOGGER.isEnabledFor(Level.WARN)) {
+                LOGGER.warn("WatchKey not recognized!!");
+            }
+            return;
+        }
+        for (WatchEvent<?> event : key.pollEvents()) {
+            Kind<?> kind = event.kind();
+            // TODO: Do something about overflow events
+            // An overflow event means that some events were dropped
+            if (kind == StandardWatchEventKinds.OVERFLOW) {
+                if (LOGGER.isEnabledFor(Level.WARN)) {
+                    LOGGER.warn("Overflow event. Some events might have been missed");
+                }
+                continue;
+            }
+
+            // Context for directory entry event is the file name of entry
+            WatchEvent<Path> ev = cast(event);
+            Path name = ev.context();
+            Path child = dir.resolve(name);
+            // if directory is created then register it and its sub-directories
+            if ((kind == StandardWatchEventKinds.ENTRY_CREATE)) {
+                try {
+                    if (Files.isDirectory(child, LinkOption.NOFOLLOW_LINKS)) {
+                        register(child);
+                    } else {
+                        // it is a file, add it to the files list.
+                        LocalFileSystemUtils.validateAndAdd(child, expression, files);
+                    }
+                } catch (IOException e) {
+                    if (LOGGER.isEnabledFor(Level.ERROR)) {
+                        LOGGER.error(e);
+                    }
+                }
+            }
+        }
+    }
+
+    public void close() throws IOException {
+        if (!done) {
+            if (watcher != null) {
+                watcher.close();
+            }
+            if (logManager != null) {
+                if (current != null) {
+                    logManager.startPartition(current.getAbsolutePath());
+                    logManager.endPartition();
+                }
+                logManager.close();
+                current = null;
+            }
+            done = true;
+        }
+    }
+
+    public File next() throws IOException {
+        if (current != null && logManager != null) {
+            logManager.startPartition(current.getAbsolutePath());
+            logManager.endPartition();
+        }
+        current = it.next();
+        return current;
+    }
+
+    private boolean endOfEvents(WatchKey key) {
+        // reset key and remove from set if directory no longer accessible
+        if (!key.reset()) {
+            keys.remove(key);
+            if (keys.isEmpty()) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    public boolean hasNext() {
+        if (it.hasNext()) {
+            return true;
+        }
+        if (done || !isFeed) {
+            return false;
+        }
+        files.clear();
+        // Read new Events (Polling first to add all available files)
+        WatchKey key;
+        key = watcher.poll();
+        while (key != null) {
+            handleEvents(key);
+            if (endOfEvents(key)) {
+                return false;
+            }
+            key = watcher.poll();
+        }
+        // No file was found, wait for the filesystem to push events
+        while (files.isEmpty()) {
+            try {
+                key = watcher.take();
+            } catch (InterruptedException x) {
+                if (LOGGER.isEnabledFor(Level.WARN)) {
+                    LOGGER.warn("Feed Closed");
+                }
+                return false;
+            } catch (ClosedWatchServiceException e) {
+                if (LOGGER.isEnabledFor(Level.WARN)) {
+                    LOGGER.warn("The watcher has exited");
+                }
+                return false;
+            }
+            handleEvents(key);
+            if (endOfEvents(key)) {
+                return false;
+            }
+        }
+        // files were found, re-create the iterator and move it one step
+        it = files.iterator();
+        return it.hasNext();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/util/LocalFileSystemUtils.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/LocalFileSystemUtils.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/LocalFileSystemUtils.java
new file mode 100644
index 0000000..d6e9463
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/LocalFileSystemUtils.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.util;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.FileVisitResult;
+import java.nio.file.Files;
+import java.nio.file.LinkOption;
+import java.nio.file.Path;
+import java.nio.file.SimpleFileVisitor;
+import java.nio.file.attribute.BasicFileAttributes;
+import java.util.LinkedList;
+import java.util.regex.Pattern;
+
+public class LocalFileSystemUtils {
+
+    //TODO: replace this method by FileUtils.iterateFilesAndDirs(.)
+    public static void traverse(final LinkedList<File> files, File root, final String expression,
+            final LinkedList<Path> dirs) throws IOException {
+        if (!Files.exists(root.toPath())) {
+            return;
+        }
+        if (!Files.isDirectory(root.toPath())) {
+            validateAndAdd(root.toPath(), expression, files);
+        }
+        //FileUtils.iterateFilesAndDirs(directory, fileFilter, dirFilter)
+        Files.walkFileTree(root.toPath(), new SimpleFileVisitor<Path>() {
+            @Override
+            public FileVisitResult preVisitDirectory(Path path, BasicFileAttributes attrs) throws IOException {
+                if (!Files.exists(path, LinkOption.NOFOLLOW_LINKS)) {
+                    return FileVisitResult.TERMINATE;
+                }
+                if (Files.isDirectory(path, LinkOption.NOFOLLOW_LINKS)) {
+                    if (dirs != null) {
+                        dirs.add(path);
+                    }
+                    //get immediate children files
+                    File[] content = path.toFile().listFiles();
+                    for (File file : content) {
+                        if (!file.isDirectory()) {
+                            validateAndAdd(file.toPath(), expression, files);
+                        }
+                    }
+                } else {
+                    // Path is a file, add to list of files if it matches the expression
+                    validateAndAdd(path, expression, files);
+                }
+                return FileVisitResult.CONTINUE;
+            }
+        });
+    }
+
+    public static void validateAndAdd(Path path, String expression, LinkedList<File> files) {
+        if (expression == null || Pattern.matches(expression, path.toString())) {
+            files.add(new File(path.toString()));
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/util/NodeResolverFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/NodeResolverFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/NodeResolverFactory.java
new file mode 100644
index 0000000..b62dda8
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/NodeResolverFactory.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.util;
+
+import org.apache.asterix.external.api.INodeResolver;
+import org.apache.asterix.external.api.INodeResolverFactory;
+
+/**
+ * Factory for creating instance of {@link NodeResolver}
+ */
+public class NodeResolverFactory implements INodeResolverFactory {
+
+    private static final INodeResolver INSTANCE = new NodeResolver();
+
+    @Override
+    public INodeResolver createNodeResolver() {
+        return INSTANCE;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/util/TweetGenerator.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/TweetGenerator.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/TweetGenerator.java
new file mode 100644
index 0000000..ec866be
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/TweetGenerator.java
@@ -0,0 +1,156 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.util;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.external.util.DataGenerator.InitializationInfo;
+import org.apache.asterix.external.util.DataGenerator.TweetMessage;
+import org.apache.asterix.external.util.DataGenerator.TweetMessageIterator;
+
+public class TweetGenerator {
+
+    private static Logger LOGGER = Logger.getLogger(TweetGenerator.class.getName());
+
+    public static final String KEY_DURATION = "duration";
+    public static final String KEY_TPS = "tps";
+    public static final String KEY_VERBOSE = "verbose";
+    public static final String KEY_FIELDS = "fields";
+    public static final int INFINITY = 0;
+
+    private static final int DEFAULT_DURATION = INFINITY;
+
+    private int duration;
+    private TweetMessageIterator tweetIterator = null;
+    private int partition;
+    private long tweetCount = 0;
+    private int frameTweetCount = 0;
+    private int numFlushedTweets = 0;
+    private DataGenerator dataGenerator = null;
+    private ByteBuffer outputBuffer = ByteBuffer.allocate(32 * 1024);
+    private String[] fields;
+    private final List<OutputStream> subscribers;
+    private final Object lock = new Object();
+    private final List<OutputStream> subscribersForRemoval = new ArrayList<OutputStream>();
+
+    public TweetGenerator(Map<String, String> configuration, int partition) throws Exception {
+        this.partition = partition;
+        String value = configuration.get(KEY_DURATION);
+        this.duration = value != null ? Integer.parseInt(value) : DEFAULT_DURATION;
+        dataGenerator = new DataGenerator(new InitializationInfo());
+        tweetIterator = dataGenerator.new TweetMessageIterator(duration);
+        this.fields = configuration.get(KEY_FIELDS) != null ? configuration.get(KEY_FIELDS).split(",") : null;
+        this.subscribers = new ArrayList<OutputStream>();
+    }
+
+    private void writeTweetString(TweetMessage tweetMessage) throws IOException {
+        String tweet = tweetMessage.getAdmEquivalent(fields) + "\n";
+        System.out.println(tweet);
+        tweetCount++;
+        byte[] b = tweet.getBytes();
+        if (outputBuffer.position() + b.length > outputBuffer.limit()) {
+            flush();
+            numFlushedTweets += frameTweetCount;
+            frameTweetCount = 0;
+            outputBuffer.put(b);
+        } else {
+            outputBuffer.put(b);
+        }
+        frameTweetCount++;
+    }
+
+    private void flush() throws IOException {
+        outputBuffer.flip();
+        synchronized (lock) {
+            for (OutputStream os : subscribers) {
+                try {
+                    os.write(outputBuffer.array(), 0, outputBuffer.limit());
+                } catch (Exception e) {
+                    subscribersForRemoval.add(os);
+                }
+            }
+            if (!subscribersForRemoval.isEmpty()) {
+                subscribers.removeAll(subscribersForRemoval);
+                subscribersForRemoval.clear();
+            }
+        }
+        outputBuffer.position(0);
+        outputBuffer.limit(32 * 1024);
+    }
+
+    public boolean generateNextBatch(int numTweets) throws Exception {
+        boolean moreData = tweetIterator.hasNext();
+        if (!moreData) {
+            if (outputBuffer.position() > 0) {
+                flush();
+            }
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Reached end of batch. Tweet Count: [" + partition + "]" + tweetCount);
+            }
+            return false;
+        } else {
+            int count = 0;
+            while (count < numTweets) {
+                writeTweetString(tweetIterator.next());
+                count++;
+            }
+            return true;
+        }
+    }
+
+    public int getNumFlushedTweets() {
+        return numFlushedTweets;
+    }
+
+    public void registerSubscriber(OutputStream os) {
+        synchronized (lock) {
+            subscribers.add(os);
+        }
+    }
+
+    public void deregisterSubscribers(OutputStream os) {
+        synchronized (lock) {
+            subscribers.remove(os);
+        }
+    }
+
+    public void close() throws IOException {
+        synchronized (lock) {
+            for (OutputStream os : subscribers) {
+                os.close();
+            }
+        }
+    }
+
+    public boolean isSubscribed() {
+        return !subscribers.isEmpty();
+    }
+
+    public long getTweetCount() {
+        return tweetCount;
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/util/TweetProcessor.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/TweetProcessor.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/TweetProcessor.java
deleted file mode 100644
index f8914a6..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/TweetProcessor.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.util;
-
-import org.apache.asterix.external.library.java.JObjectUtil;
-import org.apache.asterix.external.util.Datatypes.Tweet;
-import org.apache.asterix.om.base.AMutableDouble;
-import org.apache.asterix.om.base.AMutableInt32;
-import org.apache.asterix.om.base.AMutableRecord;
-import org.apache.asterix.om.base.AMutableString;
-import org.apache.asterix.om.base.IAObject;
-import org.apache.asterix.om.types.ARecordType;
-import org.apache.asterix.om.types.ATypeTag;
-import org.apache.asterix.om.types.IAType;
-import twitter4j.Status;
-import twitter4j.User;
-
-import java.util.HashMap;
-import java.util.Map;
-
-public class TweetProcessor {
-
-    private IAObject[] mutableTweetFields;
-    private IAObject[] mutableUserFields;
-    private AMutableRecord mutableRecord;
-    private AMutableRecord mutableUser;
-
-    private final Map<String, Integer> userFieldNameMap = new HashMap<>();
-    private final Map<String, Integer> tweetFieldNameMap = new HashMap<>();
-
-
-    public TweetProcessor(ARecordType recordType) {
-        initFieldNames(recordType);
-        mutableUserFields = new IAObject[] { new AMutableString(null), new AMutableString(null), new AMutableInt32(0),
-                new AMutableInt32(0), new AMutableString(null), new AMutableInt32(0) };
-        mutableUser = new AMutableRecord((ARecordType) recordType.getFieldTypes()[tweetFieldNameMap.get(Tweet.USER)], mutableUserFields);
-
-        mutableTweetFields = new IAObject[] { new AMutableString(null), mutableUser, new AMutableDouble(0),
-                new AMutableDouble(0), new AMutableString(null), new AMutableString(null) };
-        mutableRecord = new AMutableRecord(recordType, mutableTweetFields);
-
-    }
-
-    // Initialize the hashmap values for the field names and positions
-    private void initFieldNames(ARecordType recordType) {
-        String tweetFields[] = recordType.getFieldNames();
-        for (int i=0; i<tweetFields.length; i++) {
-            tweetFieldNameMap.put(tweetFields[i], i);
-            if (tweetFields[i].equals(Tweet.USER)) {
-                IAType fieldType = recordType.getFieldTypes()[i];
-                if (fieldType.getTypeTag() == ATypeTag.RECORD) {
-                    String userFields[]  = ((ARecordType)fieldType).getFieldNames();
-                    for (int j=0; j<userFields.length; j++) {
-                        userFieldNameMap.put(userFields[j], j);
-                    }
-                }
-
-            }
-        }
-    }
-
-
-    public AMutableRecord processNextTweet(Status tweet) {
-        User user = tweet.getUser();
-
-        // Tweet user data
-        ((AMutableString) mutableUserFields[userFieldNameMap.get(Tweet.SCREEN_NAME)]).setValue(JObjectUtil.getNormalizedString(user.getScreenName()));
-        ((AMutableString) mutableUserFields[userFieldNameMap.get(Tweet.LANGUAGE)]).setValue(JObjectUtil.getNormalizedString(user.getLang()));
-        ((AMutableInt32) mutableUserFields[userFieldNameMap.get(Tweet.FRIENDS_COUNT)]).setValue(user.getFriendsCount());
-        ((AMutableInt32) mutableUserFields[userFieldNameMap.get(Tweet.STATUS_COUNT)]).setValue(user.getStatusesCount());
-        ((AMutableString) mutableUserFields[userFieldNameMap.get(Tweet.NAME)]).setValue(JObjectUtil.getNormalizedString(user.getName()));
-        ((AMutableInt32) mutableUserFields[userFieldNameMap.get(Tweet.FOLLOWERS_COUNT)]).setValue(user.getFollowersCount());
-
-
-        // Tweet data
-        ((AMutableString) mutableTweetFields[tweetFieldNameMap.get(Tweet.ID)]).setValue(String.valueOf(tweet.getId()));
-
-        int userPos = tweetFieldNameMap.get(Tweet.USER);
-        for (int i = 0; i < mutableUserFields.length; i++) {
-            ((AMutableRecord) mutableTweetFields[userPos]).setValueAtPos(i, mutableUserFields[i]);
-        }
-        if (tweet.getGeoLocation() != null) {
-            ((AMutableDouble) mutableTweetFields[tweetFieldNameMap.get(Tweet.LATITUDE)]).setValue(tweet.getGeoLocation().getLatitude());
-            ((AMutableDouble) mutableTweetFields[tweetFieldNameMap.get(Tweet.LONGITUDE)]).setValue(tweet.getGeoLocation().getLongitude());
-        } else {
-            ((AMutableDouble) mutableTweetFields[tweetFieldNameMap.get(Tweet.LATITUDE)]).setValue(0);
-            ((AMutableDouble) mutableTweetFields[tweetFieldNameMap.get(Tweet.LONGITUDE)]).setValue(0);
-        }
-        ((AMutableString) mutableTweetFields[tweetFieldNameMap.get(Tweet.CREATED_AT)]).setValue(JObjectUtil.getNormalizedString(
-                tweet.getCreatedAt().toString()));
-        ((AMutableString) mutableTweetFields[tweetFieldNameMap.get(Tweet.MESSAGE)]).setValue(JObjectUtil.getNormalizedString(tweet.getText()));
-
-        for (int i = 0; i < mutableTweetFields.length; i++) {
-            mutableRecord.setValueAtPos(i, mutableTweetFields[i]);
-        }
-
-        return mutableRecord;
-
-    }
-
-    public AMutableRecord getMutableRecord() {
-        return mutableRecord;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/test/java/org/apache/asterix/external/library/UpperCaseFunction.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/library/UpperCaseFunction.java b/asterix-external-data/src/test/java/org/apache/asterix/external/library/UpperCaseFunction.java
index 70bd3e1..16f8b0a 100644
--- a/asterix-external-data/src/test/java/org/apache/asterix/external/library/UpperCaseFunction.java
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/library/UpperCaseFunction.java
@@ -18,14 +18,11 @@
  */
 package org.apache.asterix.external.library;
 
-import java.util.Random;
-
+import org.apache.asterix.external.api.IExternalScalarFunction;
+import org.apache.asterix.external.api.IFunctionHelper;
 import org.apache.asterix.external.library.java.JObjects.JInt;
 import org.apache.asterix.external.library.java.JObjects.JRecord;
 import org.apache.asterix.external.library.java.JObjects.JString;
-import org.apache.asterix.external.api.IExternalScalarFunction;
-import org.apache.asterix.external.api.IFunctionHelper;
-import org.apache.asterix.external.library.java.JTypeTag;
 
 /**
  * Accepts an input record of type Open{ id: int32, text: string }
@@ -35,11 +32,8 @@ import org.apache.asterix.external.library.java.JTypeTag;
  */
 public class UpperCaseFunction implements IExternalScalarFunction {
 
-    private Random random;
-
     @Override
     public void initialize(IFunctionHelper functionHelper) {
-        random = new Random();
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapter.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapter.java b/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapter.java
index df0fb94..2fc289b 100644
--- a/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapter.java
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapter.java
@@ -19,21 +19,24 @@
 package org.apache.asterix.external.library.adapter;
 
 import java.io.IOException;
-import java.io.InputStream;
 import java.io.OutputStream;
 import java.io.PipedInputStream;
 import java.io.PipedOutputStream;
 import java.util.Map;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
+import java.util.logging.Level;
+import java.util.logging.Logger;
 
-import org.apache.asterix.external.dataset.adapter.StreamBasedAdapter;
+import org.apache.asterix.external.api.IFeedAdapter;
 import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.IAType;
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.dataflow.std.file.ITupleParser;
 import org.apache.hyracks.dataflow.std.file.ITupleParserFactory;
 
-public class TestTypedAdapter extends StreamBasedAdapter {
+public class TestTypedAdapter implements IFeedAdapter {
 
     private static final long serialVersionUID = 1L;
 
@@ -45,25 +48,34 @@ public class TestTypedAdapter extends StreamBasedAdapter {
 
     private DummyGenerator generator;
 
+    protected final ITupleParser tupleParser;
+
+    protected final IAType sourceDatatype;
+
+    protected static final Logger LOGGER = Logger.getLogger(TestTypedAdapter.class.getName());
+
     public TestTypedAdapter(ITupleParserFactory parserFactory, ARecordType sourceDatatype, IHyracksTaskContext ctx,
             Map<String, String> configuration, int partition) throws IOException {
-        super(parserFactory, sourceDatatype, ctx, partition);
         pos = new PipedOutputStream();
         pis = new PipedInputStream(pos);
         this.configuration = configuration;
+        this.tupleParser = parserFactory.createTupleParser(ctx);
+        this.sourceDatatype = sourceDatatype;
     }
 
     @Override
-    public InputStream getInputStream(int partition) throws IOException {
-        return pis;
-    }
-
-    @Override
-    public void start(int partition, IFrameWriter frameWriter) throws Exception {
+    public void start(int partition, IFrameWriter writer) throws Exception {
         generator = new DummyGenerator(configuration, pos);
         ExecutorService executor = Executors.newSingleThreadExecutor();
         executor.execute(generator);
-        super.start(partition, frameWriter);
+        if (pis != null) {
+            tupleParser.parse(pis, writer);
+        } else {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning(
+                        "Could not obtain input stream for parsing from adapter " + this + "[" + partition + "]");
+            }
+        }
     }
 
     private static class DummyGenerator implements Runnable {
@@ -135,4 +147,13 @@ public class TestTypedAdapter extends StreamBasedAdapter {
         return false;
     }
 
+    @Override
+    public boolean pause() {
+        return false;
+    }
+
+    @Override
+    public boolean resume() {
+        return false;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java b/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
index 6b08f3a..5346bf2 100644
--- a/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
@@ -22,9 +22,9 @@ import java.io.InputStream;
 import java.util.Map;
 
 import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.feeds.api.IDataSourceAdapter;
 import org.apache.asterix.common.parse.ITupleForwarder;
 import org.apache.asterix.external.api.IAdapterFactory;
+import org.apache.asterix.external.api.IDataSourceAdapter;
 import org.apache.asterix.external.parser.ADMDataParser;
 import org.apache.asterix.external.util.DataflowUtils;
 import org.apache.asterix.om.types.ARecordType;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/statement/SubscribeFeedStatement.java
----------------------------------------------------------------------
diff --git a/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/statement/SubscribeFeedStatement.java b/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/statement/SubscribeFeedStatement.java
index 71c762a..3f85ba9 100644
--- a/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/statement/SubscribeFeedStatement.java
+++ b/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/statement/SubscribeFeedStatement.java
@@ -24,12 +24,13 @@ import java.util.logging.Level;
 import java.util.logging.Logger;
 
 import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.feeds.FeedActivity;
-import org.apache.asterix.common.feeds.FeedConnectionRequest;
-import org.apache.asterix.common.feeds.FeedId;
-import org.apache.asterix.common.feeds.FeedPolicyAccessor;
 import org.apache.asterix.common.functions.FunctionSignature;
 import org.apache.asterix.external.api.IAdapterFactory;
+import org.apache.asterix.external.api.IDataSourceAdapter;
+import org.apache.asterix.external.feed.management.FeedConnectionRequest;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.external.feed.watch.FeedActivity;
 import org.apache.asterix.lang.aql.parser.AQLParserFactory;
 import org.apache.asterix.lang.common.base.IParser;
 import org.apache.asterix.lang.common.base.IParserFactory;
@@ -41,12 +42,9 @@ import org.apache.asterix.lang.common.visitor.base.ILangVisitor;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
-import org.apache.asterix.metadata.entities.DatasourceAdapter.AdapterType;
 import org.apache.asterix.metadata.entities.Feed;
 import org.apache.asterix.metadata.entities.Function;
-import org.apache.asterix.metadata.entities.PrimaryFeed;
-import org.apache.asterix.metadata.entities.SecondaryFeed;
-import org.apache.asterix.metadata.feeds.FeedUtil;
+import org.apache.asterix.metadata.feeds.FeedMetadataUtil;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.common.utils.Triple;
@@ -188,14 +186,13 @@ public class SubscribeFeedStatement implements Statement {
         try {
             switch (feed.getFeedType()) {
                 case PRIMARY:
-                    Triple<IAdapterFactory, ARecordType, AdapterType> factoryOutput = null;
+                    Triple<IAdapterFactory, ARecordType, IDataSourceAdapter.AdapterType> factoryOutput = null;
 
-                    factoryOutput = FeedUtil.getPrimaryFeedFactoryAndOutput((PrimaryFeed) feed, policyAccessor,
-                            mdTxnCtx);
+                    factoryOutput = FeedMetadataUtil.getPrimaryFeedFactoryAndOutput(feed, policyAccessor, mdTxnCtx);
                     outputType = factoryOutput.second.getTypeName();
                     break;
                 case SECONDARY:
-                    outputType = FeedUtil.getSecondaryFeedOutput((SecondaryFeed) feed, policyAccessor, mdTxnCtx);
+                    outputType = FeedMetadataUtil.getSecondaryFeedOutput(feed, policyAccessor, mdTxnCtx);
                     break;
             }
             return outputType;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-lang-aql/src/main/javacc/AQL.jj
----------------------------------------------------------------------
diff --git a/asterix-lang-aql/src/main/javacc/AQL.jj b/asterix-lang-aql/src/main/javacc/AQL.jj
index 12e7897..8f62f74 100644
--- a/asterix-lang-aql/src/main/javacc/AQL.jj
+++ b/asterix-lang-aql/src/main/javacc/AQL.jj
@@ -43,6 +43,7 @@ import org.apache.asterix.common.annotations.TypeDataGen;
 import org.apache.asterix.common.annotations.UndeclaredFieldsDataGen;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.config.DatasetConfig.IndexType;
+import org.apache.asterix.common.config.MetadataConstants;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.functions.FunctionSignature;
 import org.apache.asterix.lang.aql.clause.DistinctClause;
@@ -127,7 +128,6 @@ import org.apache.asterix.lang.common.statement.WriteStatement;
 import org.apache.asterix.lang.common.struct.Identifier;
 import org.apache.asterix.lang.common.struct.QuantifiedPair;
 import org.apache.asterix.lang.common.struct.VarIdentifier;
-import org.apache.asterix.metadata.bootstrap.MetadataConstants;
 import org.apache.hyracks.algebricks.common.utils.Pair;
 import org.apache.hyracks.algebricks.common.utils.Triple;
 import org.apache.hyracks.algebricks.core.algebra.expressions.IExpressionAnnotation;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DatasetDecl.java
----------------------------------------------------------------------
diff --git a/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DatasetDecl.java b/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DatasetDecl.java
index 49aa74b..d9d09f7 100644
--- a/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DatasetDecl.java
+++ b/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/DatasetDecl.java
@@ -20,12 +20,12 @@ package org.apache.asterix.lang.common.statement;
 
 import java.util.Map;
 
+import org.apache.asterix.common.config.MetadataConstants;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.lang.common.base.Statement;
 import org.apache.asterix.lang.common.struct.Identifier;
 import org.apache.asterix.lang.common.visitor.base.ILangVisitor;
-import org.apache.asterix.metadata.bootstrap.MetadataConstants;
 
 public class DatasetDecl implements Statement {
     protected final Identifier name;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/FormatPrintVisitor.java
----------------------------------------------------------------------
diff --git a/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/FormatPrintVisitor.java b/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/FormatPrintVisitor.java
index 28e5af0..a1e6363 100644
--- a/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/FormatPrintVisitor.java
+++ b/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/FormatPrintVisitor.java
@@ -27,6 +27,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.asterix.common.config.MetadataConstants;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.config.DatasetConfig.IndexType;
 import org.apache.asterix.common.exceptions.AsterixException;
@@ -95,7 +96,6 @@ import org.apache.asterix.lang.common.struct.Identifier;
 import org.apache.asterix.lang.common.struct.OperatorType;
 import org.apache.asterix.lang.common.struct.QuantifiedPair;
 import org.apache.asterix.lang.common.visitor.base.ILangVisitor;
-import org.apache.asterix.metadata.bootstrap.MetadataConstants;
 import org.apache.hyracks.algebricks.common.utils.Pair;
 import org.apache.hyracks.algebricks.core.algebra.expressions.IExpressionAnnotation;
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/VariableCheckAndRewriteVisitor.java
----------------------------------------------------------------------
diff --git a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/VariableCheckAndRewriteVisitor.java b/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/VariableCheckAndRewriteVisitor.java
index b016b62..f9cf99f 100644
--- a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/VariableCheckAndRewriteVisitor.java
+++ b/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/VariableCheckAndRewriteVisitor.java
@@ -22,6 +22,7 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
+import org.apache.asterix.common.config.MetadataConstants;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.functions.FunctionSignature;
 import org.apache.asterix.lang.common.base.Expression;
@@ -69,7 +70,6 @@ import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
 import org.apache.asterix.lang.sqlpp.struct.SetOperationRight;
 import org.apache.asterix.lang.sqlpp.util.SqlppFormatPrintUtil;
 import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppQueryExpressionVisitor;
-import org.apache.asterix.metadata.bootstrap.MetadataConstants;
 import org.apache.hyracks.algebricks.core.algebra.base.Counter;
 
 public class VariableCheckAndRewriteVisitor extends AbstractSqlppQueryExpressionVisitor<Expression, Void> {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj
----------------------------------------------------------------------
diff --git a/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj b/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj
index 0e3fad8..547a10b 100644
--- a/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj
+++ b/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj
@@ -44,6 +44,7 @@ import org.apache.asterix.common.annotations.TypeDataGen;
 import org.apache.asterix.common.annotations.UndeclaredFieldsDataGen;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.config.DatasetConfig.IndexType;
+import org.apache.asterix.common.config.MetadataConstants;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.functions.FunctionSignature;
 import org.apache.asterix.lang.common.base.Expression;
@@ -140,7 +141,6 @@ import org.apache.asterix.lang.sqlpp.optype.JoinType;
 import org.apache.asterix.lang.sqlpp.optype.SetOpType;
 import org.apache.asterix.lang.sqlpp.struct.SetOperationInput;
 import org.apache.asterix.lang.sqlpp.struct.SetOperationRight;
-import org.apache.asterix.metadata.bootstrap.MetadataConstants;
 import org.apache.hyracks.algebricks.common.utils.Pair;
 import org.apache.hyracks.algebricks.common.utils.Triple;
 import org.apache.hyracks.algebricks.core.algebra.expressions.IExpressionAnnotation;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataCache.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataCache.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataCache.java
index ea65b20..a3a81de 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataCache.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataCache.java
@@ -35,7 +35,7 @@ import org.apache.asterix.metadata.entities.DatasourceAdapter;
 import org.apache.asterix.metadata.entities.Datatype;
 import org.apache.asterix.metadata.entities.Dataverse;
 import org.apache.asterix.metadata.entities.Feed;
-import org.apache.asterix.metadata.entities.FeedPolicy;
+import org.apache.asterix.metadata.entities.FeedPolicyEntity;
 import org.apache.asterix.metadata.entities.Function;
 import org.apache.asterix.metadata.entities.Index;
 import org.apache.asterix.metadata.entities.InternalDatasetDetails;
@@ -66,9 +66,9 @@ public class MetadataCache {
     protected final Map<FunctionSignature, Function> functions = new HashMap<FunctionSignature, Function>();
     // Key is adapter dataverse. Key of value map is the adapter name  
     protected final Map<String, Map<String, DatasourceAdapter>> adapters = new HashMap<String, Map<String, DatasourceAdapter>>();
-  
+
     // Key is DataverseName, Key of the value map is the Policy name   
-    protected final Map<String, Map<String, FeedPolicy>> feedPolicies = new HashMap<String, Map<String, FeedPolicy>>();
+    protected final Map<String, Map<String, FeedPolicyEntity>> feedPolicies = new HashMap<String, Map<String, FeedPolicyEntity>>();
     // Key is library dataverse. Key of value map is the library name
     protected final Map<String, Map<String, Library>> libraries = new HashMap<String, Map<String, Library>>();
     // Key is library dataverse. Key of value map is the feed name  
@@ -110,18 +110,17 @@ public class MetadataCache {
                         synchronized (datatypes) {
                             synchronized (functions) {
                                 synchronized (adapters) {
-                                        synchronized (libraries) {
-                                            synchronized (compactionPolicies) {
-                                                dataverses.clear();
-                                                nodeGroups.clear();
-                                                datasets.clear();
-                                                indexes.clear();
-                                                datatypes.clear();
-                                                functions.clear();
-                                                adapters.clear();
-                                                libraries.clear();
-                                                compactionPolicies.clear();
-                                            }
+                                    synchronized (libraries) {
+                                        synchronized (compactionPolicies) {
+                                            dataverses.clear();
+                                            nodeGroups.clear();
+                                            datasets.clear();
+                                            indexes.clear();
+                                            datatypes.clear();
+                                            functions.clear();
+                                            adapters.clear();
+                                            libraries.clear();
+                                            compactionPolicies.clear();
                                         }
                                     }
                                 }
@@ -131,9 +130,9 @@ public class MetadataCache {
                 }
             }
         }
-    
+    }
 
-    public Object addDataverseIfNotExists(Dataverse dataverse) {
+    public Dataverse addDataverseIfNotExists(Dataverse dataverse) {
         synchronized (dataverses) {
             synchronized (datasets) {
                 synchronized (datatypes) {
@@ -149,7 +148,7 @@ public class MetadataCache {
         }
     }
 
-    public Object addDatasetIfNotExists(Dataset dataset) {
+    public Dataset addDatasetIfNotExists(Dataset dataset) {
         synchronized (datasets) {
             synchronized (indexes) {
                 // Add the primary index associated with the dataset, if the dataset is an
@@ -175,13 +174,13 @@ public class MetadataCache {
         }
     }
 
-    public Object addIndexIfNotExists(Index index) {
+    public Index addIndexIfNotExists(Index index) {
         synchronized (indexes) {
             return addIndexIfNotExistsInternal(index);
         }
     }
 
-    public Object addDatatypeIfNotExists(Datatype datatype) {
+    public Datatype addDatatypeIfNotExists(Datatype datatype) {
         synchronized (datatypes) {
             Map<String, Datatype> m = datatypes.get(datatype.getDataverseName());
             if (m == null) {
@@ -195,7 +194,7 @@ public class MetadataCache {
         }
     }
 
-    public Object addNodeGroupIfNotExists(NodeGroup nodeGroup) {
+    public NodeGroup addNodeGroupIfNotExists(NodeGroup nodeGroup) {
         synchronized (nodeGroups) {
             if (!nodeGroups.containsKey(nodeGroup.getNodeGroupName())) {
                 return nodeGroups.put(nodeGroup.getNodeGroupName(), nodeGroup);
@@ -204,7 +203,7 @@ public class MetadataCache {
         }
     }
 
-    public Object addCompactionPolicyIfNotExists(CompactionPolicy compactionPolicy) {
+    public CompactionPolicy addCompactionPolicyIfNotExists(CompactionPolicy compactionPolicy) {
         synchronized (compactionPolicy) {
             Map<String, CompactionPolicy> p = compactionPolicies.get(compactionPolicy.getDataverseName());
             if (p == null) {
@@ -220,17 +219,17 @@ public class MetadataCache {
         }
     }
 
-    public Object dropCompactionPolicy(CompactionPolicy compactionPolicy) {
+    public CompactionPolicy dropCompactionPolicy(CompactionPolicy compactionPolicy) {
         synchronized (compactionPolicies) {
             Map<String, CompactionPolicy> p = compactionPolicies.get(compactionPolicy.getDataverseName());
             if (p != null && p.get(compactionPolicy.getPolicyName()) != null) {
-                return p.remove(compactionPolicy).getPolicyName();
+                return p.remove(compactionPolicy);
             }
             return null;
         }
     }
 
-    public Object dropDataverse(Dataverse dataverse) {
+    public Dataverse dropDataverse(Dataverse dataverse) {
         synchronized (dataverses) {
             synchronized (datasets) {
                 synchronized (indexes) {
@@ -238,26 +237,25 @@ public class MetadataCache {
                         synchronized (functions) {
                             synchronized (adapters) {
                                 synchronized (libraries) {
-                                        synchronized (feeds) {
-                                            synchronized (compactionPolicies) {
-                                                datasets.remove(dataverse.getDataverseName());
-                                                indexes.remove(dataverse.getDataverseName());
-                                                datatypes.remove(dataverse.getDataverseName());
-                                                adapters.remove(dataverse.getDataverseName());
-                                                compactionPolicies.remove(dataverse.getDataverseName());
-                                                List<FunctionSignature> markedFunctionsForRemoval = new ArrayList<FunctionSignature>();
-                                                for (FunctionSignature signature : functions.keySet()) {
-                                                    if (signature.getNamespace().equals(dataverse.getDataverseName())) {
-                                                        markedFunctionsForRemoval.add(signature);
-                                                    }
-                                                }
-                                                for (FunctionSignature signature : markedFunctionsForRemoval) {
-                                                    functions.remove(signature);
+                                    synchronized (feeds) {
+                                        synchronized (compactionPolicies) {
+                                            datasets.remove(dataverse.getDataverseName());
+                                            indexes.remove(dataverse.getDataverseName());
+                                            datatypes.remove(dataverse.getDataverseName());
+                                            adapters.remove(dataverse.getDataverseName());
+                                            compactionPolicies.remove(dataverse.getDataverseName());
+                                            List<FunctionSignature> markedFunctionsForRemoval = new ArrayList<FunctionSignature>();
+                                            for (FunctionSignature signature : functions.keySet()) {
+                                                if (signature.getNamespace().equals(dataverse.getDataverseName())) {
+                                                    markedFunctionsForRemoval.add(signature);
                                                 }
-                                                libraries.remove(dataverse.getDataverseName());
-                                                feeds.remove(dataverse.getDataverseName());
-                                                return dataverses.remove(dataverse.getDataverseName());
                                             }
+                                            for (FunctionSignature signature : markedFunctionsForRemoval) {
+                                                functions.remove(signature);
+                                            }
+                                            libraries.remove(dataverse.getDataverseName());
+                                            feeds.remove(dataverse.getDataverseName());
+                                            return dataverses.remove(dataverse.getDataverseName());
                                         }
                                     }
                                 }
@@ -267,9 +265,9 @@ public class MetadataCache {
                 }
             }
         }
-    
+    }
 
-    public Object dropDataset(Dataset dataset) {
+    public Dataset dropDataset(Dataset dataset) {
         synchronized (datasets) {
             synchronized (indexes) {
 
@@ -289,7 +287,7 @@ public class MetadataCache {
         }
     }
 
-    public Object dropIndex(Index index) {
+    public Index dropIndex(Index index) {
         synchronized (indexes) {
             Map<String, Map<String, Index>> datasetMap = indexes.get(index.getDataverseName());
             if (datasetMap == null) {
@@ -304,7 +302,7 @@ public class MetadataCache {
         }
     }
 
-    public Object dropDatatype(Datatype datatype) {
+    public Datatype dropDatatype(Datatype datatype) {
         synchronized (datatypes) {
             Map<String, Datatype> m = datatypes.get(datatype.getDataverseName());
             if (m == null) {
@@ -314,7 +312,7 @@ public class MetadataCache {
         }
     }
 
-    public Object dropNodeGroup(NodeGroup nodeGroup) {
+    public NodeGroup dropNodeGroup(NodeGroup nodeGroup) {
         synchronized (nodeGroups) {
             return nodeGroups.remove(nodeGroup.getNodeGroupName());
         }
@@ -405,11 +403,11 @@ public class MetadataCache {
      */
     protected class MetadataLogicalOperation {
         // Entity to be added/dropped.
-        public final IMetadataEntity entity;
+        public final IMetadataEntity<?> entity;
         // True for add, false for drop.
         public final boolean isAdd;
 
-        public MetadataLogicalOperation(IMetadataEntity entity, boolean isAdd) {
+        public MetadataLogicalOperation(IMetadataEntity<?> entity, boolean isAdd) {
             this.entity = entity;
             this.isAdd = isAdd;
         }
@@ -431,7 +429,7 @@ public class MetadataCache {
         }
     }
 
-    public Object addFunctionIfNotExists(Function function) {
+    public Function addFunctionIfNotExists(Function function) {
         synchronized (functions) {
             FunctionSignature signature = new FunctionSignature(function.getDataverseName(), function.getName(),
                     function.getArity());
@@ -443,7 +441,7 @@ public class MetadataCache {
         }
     }
 
-    public Object dropFunction(Function function) {
+    public Function dropFunction(Function function) {
         synchronized (functions) {
             FunctionSignature signature = new FunctionSignature(function.getDataverseName(), function.getName(),
                     function.getArity());
@@ -455,11 +453,11 @@ public class MetadataCache {
         }
     }
 
-    public Object addFeedPolicyIfNotExists(FeedPolicy feedPolicy) {
+    public Object addFeedPolicyIfNotExists(FeedPolicyEntity feedPolicy) {
         synchronized (feedPolicy) {
-            Map<String, FeedPolicy> p = feedPolicies.get(feedPolicy.getDataverseName());
+            Map<String, FeedPolicyEntity> p = feedPolicies.get(feedPolicy.getDataverseName());
             if (p == null) {
-                p = new HashMap<String, FeedPolicy>();
+                p = new HashMap<String, FeedPolicyEntity>();
                 p.put(feedPolicy.getPolicyName(), feedPolicy);
                 feedPolicies.put(feedPolicy.getDataverseName(), p);
             } else {
@@ -471,9 +469,9 @@ public class MetadataCache {
         }
     }
 
-    public Object dropFeedPolicy(FeedPolicy feedPolicy) {
+    public Object dropFeedPolicy(FeedPolicyEntity feedPolicy) {
         synchronized (feedPolicies) {
-            Map<String, FeedPolicy> p = feedPolicies.get(feedPolicy.getDataverseName());
+            Map<String, FeedPolicyEntity> p = feedPolicies.get(feedPolicy.getDataverseName());
             if (p != null && p.get(feedPolicy.getPolicyName()) != null) {
                 return p.remove(feedPolicy).getPolicyName();
             }
@@ -481,10 +479,10 @@ public class MetadataCache {
         }
     }
 
-    public Object addAdapterIfNotExists(DatasourceAdapter adapter) {
+    public DatasourceAdapter addAdapterIfNotExists(DatasourceAdapter adapter) {
         synchronized (adapters) {
-            Map<String, DatasourceAdapter> adaptersInDataverse = adapters.get(adapter.getAdapterIdentifier()
-                    .getNamespace());
+            Map<String, DatasourceAdapter> adaptersInDataverse = adapters
+                    .get(adapter.getAdapterIdentifier().getNamespace());
             if (adaptersInDataverse == null) {
                 adaptersInDataverse = new HashMap<String, DatasourceAdapter>();
                 adapters.put(adapter.getAdapterIdentifier().getNamespace(), adaptersInDataverse);
@@ -497,10 +495,10 @@ public class MetadataCache {
         }
     }
 
-    public Object dropAdapter(DatasourceAdapter adapter) {
+    public DatasourceAdapter dropAdapter(DatasourceAdapter adapter) {
         synchronized (adapters) {
-            Map<String, DatasourceAdapter> adaptersInDataverse = adapters.get(adapter.getAdapterIdentifier()
-                    .getNamespace());
+            Map<String, DatasourceAdapter> adaptersInDataverse = adapters
+                    .get(adapter.getAdapterIdentifier().getNamespace());
             if (adaptersInDataverse != null) {
                 return adaptersInDataverse.remove(adapter.getAdapterIdentifier().getName());
             }
@@ -508,10 +506,7 @@ public class MetadataCache {
         }
     }
 
-  
-
-
-    public Object addLibraryIfNotExists(Library library) {
+    public Library addLibraryIfNotExists(Library library) {
         synchronized (libraries) {
             Map<String, Library> libsInDataverse = libraries.get(library.getDataverseName());
             boolean needToAddd = (libsInDataverse == null || libsInDataverse.get(library.getName()) != null);
@@ -526,7 +521,7 @@ public class MetadataCache {
         }
     }
 
-    public Object dropLibrary(Library library) {
+    public Library dropLibrary(Library library) {
         synchronized (libraries) {
             Map<String, Library> librariesInDataverse = libraries.get(library.getDataverseName());
             if (librariesInDataverse != null) {
@@ -536,12 +531,11 @@ public class MetadataCache {
         }
     }
 
-    public Object addFeedIfNotExists(Feed feed) {
-        // TODO Auto-generated method stub
+    public Feed addFeedIfNotExists(Feed feed) {
         return null;
     }
 
-    public Object dropFeed(Feed feed) {
+    public Feed dropFeed(Feed feed) {
         synchronized (feeds) {
             Map<String, Feed> feedsInDataverse = feeds.get(feed.getDataverseName());
             if (feedsInDataverse != null) {
@@ -551,7 +545,7 @@ public class MetadataCache {
         }
     }
 
-    private Object addIndexIfNotExistsInternal(Index index) {
+    private Index addIndexIfNotExistsInternal(Index index) {
         Map<String, Map<String, Index>> datasetMap = indexes.get(index.getDataverseName());
         if (datasetMap == null) {
             datasetMap = new HashMap<String, Map<String, Index>>();


[14/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedConnectionRequest.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedConnectionRequest.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedConnectionRequest.java
new file mode 100644
index 0000000..5f22a72
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedConnectionRequest.java
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.management;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.asterix.external.feed.api.IFeedLifecycleListener.ConnectionLocation;
+import org.apache.commons.lang3.StringUtils;
+
+/**
+ * A request for connecting a feed to a dataset.
+ */
+public class FeedConnectionRequest {
+
+    public enum ConnectionStatus {
+        /** initial state upon creating a connection request **/
+        INITIALIZED,
+
+        /** connection establish; feed is receiving data **/
+        ACTIVE,
+
+        /** connection removed; feed is not receiving data **/
+        INACTIVE,
+
+        /** connection request failed **/
+        FAILED
+    }
+
+    /** Feed joint on the feed pipeline that serves as the source for this subscription **/
+    private final FeedJointKey feedJointKey;
+
+    /** Location in the source feed pipeline from where feed tuples are received. **/
+    private final ConnectionLocation connectionLocation;
+
+    /** List of functions that need to be applied in sequence after the data hand-off at the source feedPointKey. **/
+    private final List<String> functionsToApply;
+
+    /** Status associated with the subscription. */
+    private ConnectionStatus connectionStatus;
+
+    /** Name of the policy that governs feed ingestion **/
+    private final String policy;
+
+    /** Policy associated with a feed connection **/
+    private final Map<String, String> policyParameters;
+
+    /** Target dataset associated with the connection request **/
+    private final String targetDataset;
+
+    private final FeedId receivingFeedId;
+
+    
+    public FeedConnectionRequest(FeedJointKey feedPointKey, ConnectionLocation connectionLocation,
+            List<String> functionsToApply, String targetDataset, String policy, Map<String, String> policyParameters,
+            FeedId receivingFeedId) {
+        this.feedJointKey = feedPointKey;
+        this.connectionLocation = connectionLocation;
+        this.functionsToApply = functionsToApply;
+        this.targetDataset = targetDataset;
+        this.policy = policy;
+        this.policyParameters = policyParameters;
+        this.receivingFeedId = receivingFeedId;
+        this.connectionStatus = ConnectionStatus.INITIALIZED;
+    }
+
+    public FeedJointKey getFeedJointKey() {
+        return feedJointKey;
+    }
+
+    public ConnectionStatus getConnectionStatus() {
+        return connectionStatus;
+    }
+
+    public void setSubscriptionStatus(ConnectionStatus connectionStatus) {
+        this.connectionStatus = connectionStatus;
+    }
+
+    public String getPolicy() {
+        return policy;
+    }
+
+    public String getTargetDataset() {
+        return targetDataset;
+    }
+
+    public ConnectionLocation getSubscriptionLocation() {
+        return connectionLocation;
+    }
+
+    public FeedId getReceivingFeedId() {
+        return receivingFeedId;
+    }
+
+    public Map<String, String> getPolicyParameters() {
+        return policyParameters;
+    }
+
+    public List<String> getFunctionsToApply() {
+        return functionsToApply;
+    }
+
+    @Override
+    public String toString() {
+        return "Feed Connection Request " + feedJointKey + " [" + connectionLocation + "]" + " Apply ("
+                + StringUtils.join(functionsToApply, ",") + ")";
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedId.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedId.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedId.java
new file mode 100644
index 0000000..3145d72
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedId.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.management;
+
+import java.io.Serializable;
+
+/**
+ * A unique identifier for a data feed.
+ */
+public class FeedId implements Serializable {
+
+    private static final long serialVersionUID = 1L;
+
+    private final String dataverse;
+    private final String feedName;
+
+    public FeedId(String dataverse, String feedName) {
+        this.dataverse = dataverse;
+        this.feedName = feedName;
+    }
+
+    public String getDataverse() {
+        return dataverse;
+    }
+
+    public String getFeedName() {
+        return feedName;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (o == null || !(o instanceof FeedId)) {
+            return false;
+        }
+        if (this == o || ((FeedId) o).getFeedName().equals(feedName) && ((FeedId) o).getDataverse().equals(dataverse)) {
+            return true;
+        }
+        return false;
+    }
+
+    @Override
+    public int hashCode() {
+        return toString().hashCode();
+    }
+
+    @Override
+    public String toString() {
+        return dataverse + "." + feedName;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedInfo.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedInfo.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedInfo.java
new file mode 100644
index 0000000..93f81d9
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedInfo.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.management;
+
+import org.apache.hyracks.api.job.JobId;
+import org.apache.hyracks.api.job.JobInfo;
+import org.apache.hyracks.api.job.JobSpecification;
+
+public class FeedInfo {
+    public JobSpecification jobSpec;
+    public JobInfo jobInfo;
+    public JobId jobId;
+    public FeedInfoType infoType;
+    public State state;
+
+    public enum State {
+        ACTIVE,
+        INACTIVE
+    }
+
+    public enum FeedInfoType {
+        INTAKE,
+        COLLECT
+    }
+
+    public FeedInfo(JobSpecification jobSpec, JobId jobId, FeedInfoType infoType) {
+        this.jobSpec = jobSpec;
+        this.jobId = jobId;
+        this.infoType = infoType;
+        this.state = State.INACTIVE;
+    }
+
+    @Override
+    public String toString() {
+        return " job id " + jobId;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedJointKey.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedJointKey.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedJointKey.java
new file mode 100644
index 0000000..7b08599
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedJointKey.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.management;
+
+import java.util.List;
+
+import org.apache.commons.lang3.StringUtils;
+
+/**
+ * Represents a unique identifier for a Feed Joint. A Feed joint is a logical entity located
+ * along a feed ingestion pipeline at a point where the tuples moving as part of data flow
+ * constitute the feed. The feed joint acts as a network tap and allows the flowing data to be
+ * routed to multiple paths.
+ */
+public class FeedJointKey {
+
+    private final FeedId primaryFeedId;
+    private final List<String> appliedFunctions;
+    private final String stringRep;
+
+    public FeedJointKey(FeedId feedId, List<String> appliedFunctions) {
+        this.primaryFeedId = feedId;
+        this.appliedFunctions = appliedFunctions;
+        StringBuilder builder = new StringBuilder();
+        builder.append(feedId);
+        builder.append(":");
+        builder.append(StringUtils.join(appliedFunctions, ':'));
+        stringRep = builder.toString();
+    }
+
+    public FeedId getFeedId() {
+        return primaryFeedId;
+    }
+
+    public List<String> getAppliedFunctions() {
+        return appliedFunctions;
+    }
+
+    public String getStringRep() {
+        return stringRep;
+    }
+
+    @Override
+    public final String toString() {
+        return stringRep;
+    }
+
+    @Override
+    public int hashCode() {
+        return stringRep.hashCode();
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o)
+            return true;
+        if (o == null || !(o instanceof FeedJointKey)) {
+            return false;
+        }
+        return stringRep.equals(((FeedJointKey) o).stringRep);
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedLifecycleEventSubscriber.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedLifecycleEventSubscriber.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedLifecycleEventSubscriber.java
new file mode 100644
index 0000000..6e3cebc
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedLifecycleEventSubscriber.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.management;
+
+import java.util.Iterator;
+import java.util.concurrent.LinkedBlockingQueue;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.feed.api.IFeedLifecycleEventSubscriber;
+
+public class FeedLifecycleEventSubscriber implements IFeedLifecycleEventSubscriber {
+
+    private LinkedBlockingQueue<FeedLifecycleEvent> inbox;
+
+    public FeedLifecycleEventSubscriber() {
+        this.inbox = new LinkedBlockingQueue<FeedLifecycleEvent>();
+    }
+
+    @Override
+    public void handleFeedEvent(FeedLifecycleEvent event) {
+        inbox.add(event);
+    }
+
+    @Override
+    public void assertEvent(FeedLifecycleEvent event) throws AsterixException, InterruptedException {
+        boolean eventOccurred = false;
+        FeedLifecycleEvent e = null;
+        Iterator<FeedLifecycleEvent> eventsSoFar = inbox.iterator();
+        while (eventsSoFar.hasNext()) {
+            e = eventsSoFar.next();
+            assertNoFailure(e);
+            eventOccurred = e.equals(event);
+        }
+
+        while (!eventOccurred) {
+            e = inbox.take();
+            eventOccurred = e.equals(event);
+            if (!eventOccurred) {
+                assertNoFailure(e);
+            }
+        }
+    }
+
+    private void assertNoFailure(FeedLifecycleEvent e) throws AsterixException {
+        if (e.equals(FeedLifecycleEvent.FEED_INTAKE_FAILURE) || e.equals(FeedLifecycleEvent.FEED_COLLECT_FAILURE)) {
+            throw new AsterixException("Failure in feed");
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedManager.java
new file mode 100644
index 0000000..5095e7d
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedManager.java
@@ -0,0 +1,137 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.management;
+
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.common.config.AsterixFeedProperties;
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.feed.api.IFeedConnectionManager;
+import org.apache.asterix.external.feed.api.IFeedManager;
+import org.apache.asterix.external.feed.api.IFeedMemoryManager;
+import org.apache.asterix.external.feed.api.IFeedMessageService;
+import org.apache.asterix.external.feed.api.IFeedMetadataManager;
+import org.apache.asterix.external.feed.api.IFeedMetricCollector;
+import org.apache.asterix.external.feed.api.IFeedSubscriptionManager;
+import org.apache.asterix.external.feed.message.FeedMessageService;
+import org.apache.asterix.external.feed.watch.FeedMetricCollector;
+import org.apache.asterix.external.feed.watch.NodeLoadReportService;
+import org.apache.asterix.om.util.AsterixClusterProperties;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * An implementation of the IFeedManager interface.
+ * Provider necessary central repository for registering/retrieving
+ * artifacts/services associated with a feed.
+ */
+public class FeedManager implements IFeedManager {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedManager.class.getName());
+
+    private final IFeedSubscriptionManager feedSubscriptionManager;
+
+    private final IFeedConnectionManager feedConnectionManager;
+
+    private final IFeedMemoryManager feedMemoryManager;
+
+    private final IFeedMetricCollector feedMetricCollector;
+
+    private final IFeedMetadataManager feedMetadataManager;
+
+    private final IFeedMessageService feedMessageService;
+
+    private final NodeLoadReportService nodeLoadReportService;
+
+    private final AsterixFeedProperties asterixFeedProperties;
+
+    private final String nodeId;
+
+    private final int frameSize;
+
+    public FeedManager(String nodeId, AsterixFeedProperties feedProperties, int frameSize)
+            throws AsterixException, HyracksDataException {
+        this.nodeId = nodeId;
+        this.feedSubscriptionManager = new FeedSubscriptionManager(nodeId);
+        this.feedConnectionManager = new FeedConnectionManager(nodeId);
+        this.feedMetadataManager = new FeedMetadataManager(nodeId);
+        this.feedMemoryManager = new FeedMemoryManager(nodeId, feedProperties, frameSize);
+        String ccClusterIp = AsterixClusterProperties.INSTANCE.getCluster() != null
+                ? AsterixClusterProperties.INSTANCE.getCluster().getMasterNode().getClusterIp() : "localhost";
+        this.feedMessageService = new FeedMessageService(feedProperties, nodeId, ccClusterIp);
+        this.nodeLoadReportService = new NodeLoadReportService(nodeId, this);
+        try {
+            this.feedMessageService.start();
+            this.nodeLoadReportService.start();
+        } catch (Exception e) {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Unable to start feed services " + e.getMessage());
+            }
+            e.printStackTrace();
+        }
+        this.feedMetricCollector = new FeedMetricCollector(nodeId);
+        this.frameSize = frameSize;
+        this.asterixFeedProperties = feedProperties;
+    }
+
+    @Override
+    public IFeedSubscriptionManager getFeedSubscriptionManager() {
+        return feedSubscriptionManager;
+    }
+
+    @Override
+    public IFeedConnectionManager getFeedConnectionManager() {
+        return feedConnectionManager;
+    }
+
+    @Override
+    public IFeedMemoryManager getFeedMemoryManager() {
+        return feedMemoryManager;
+    }
+
+    @Override
+    public IFeedMetricCollector getFeedMetricCollector() {
+        return feedMetricCollector;
+    }
+
+    public int getFrameSize() {
+        return frameSize;
+    }
+
+    @Override
+    public IFeedMetadataManager getFeedMetadataManager() {
+        return feedMetadataManager;
+    }
+
+    @Override
+    public IFeedMessageService getFeedMessageService() {
+        return feedMessageService;
+    }
+
+    @Override
+    public String toString() {
+        return "FeedManager " + "[" + nodeId + "]";
+    }
+
+    @Override
+    public AsterixFeedProperties getAsterixFeedProperties() {
+        return asterixFeedProperties;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedMemoryManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedMemoryManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedMemoryManager.java
new file mode 100644
index 0000000..de9d22c
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedMemoryManager.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.management;
+
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.common.config.AsterixFeedProperties;
+import org.apache.asterix.external.feed.api.IFeedMemoryComponent;
+import org.apache.asterix.external.feed.api.IFeedMemoryManager;
+import org.apache.asterix.external.feed.api.IFeedMemoryComponent.Type;
+import org.apache.asterix.external.feed.dataflow.DataBucketPool;
+import org.apache.asterix.external.feed.dataflow.FrameCollection;
+
+public class FeedMemoryManager implements IFeedMemoryManager {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedMemoryManager.class.getName());
+    private static final int ALLOCATION_INCREMENT = 10;
+
+    private final AtomicInteger componentId = new AtomicInteger(0);
+    private final String nodeId;
+    private final int budget;
+    private final int frameSize;
+
+    private int committed;
+
+    public FeedMemoryManager(String nodeId, AsterixFeedProperties feedProperties, int frameSize) {
+        this.nodeId = nodeId;
+        this.frameSize = frameSize;
+        budget = (int) feedProperties.getMemoryComponentGlobalBudget() / frameSize;
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Feed Memory budget " + budget + " frames (frame size=" + frameSize + ")");
+        }
+    }
+
+    @Override
+    public synchronized IFeedMemoryComponent getMemoryComponent(Type type) {
+        IFeedMemoryComponent memoryComponent = null;
+        boolean valid = false;
+        switch (type) {
+            case COLLECTION:
+                valid = committed + START_COLLECTION_SIZE <= budget;
+                if (valid) {
+                    memoryComponent = new FrameCollection(componentId.incrementAndGet(), this, START_COLLECTION_SIZE);
+                }
+                break;
+            case POOL:
+                valid = committed + START_POOL_SIZE <= budget;
+                if (valid) {
+                    memoryComponent = new DataBucketPool(componentId.incrementAndGet(), this, START_POOL_SIZE,
+                            frameSize);
+                }
+                committed += START_POOL_SIZE;
+                break;
+        }
+        if (!valid) {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Unable to allocate memory component of type" + type);
+            }
+        }
+        return valid ? memoryComponent : null;
+    }
+
+    @Override
+    public synchronized boolean expandMemoryComponent(IFeedMemoryComponent memoryComponent) {
+        if (committed + ALLOCATION_INCREMENT > budget) {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Memory budget " + budget + " is exhausted. Space left: " + (budget - committed)
+                        + " frames.");
+            }
+            return false;
+        } else {
+            memoryComponent.expand(ALLOCATION_INCREMENT);
+            committed += ALLOCATION_INCREMENT;
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Expanded memory component " + memoryComponent + " by " + ALLOCATION_INCREMENT + " " + this);
+            }
+            return true;
+        }
+    }
+
+    @Override
+    public synchronized void releaseMemoryComponent(IFeedMemoryComponent memoryComponent) {
+        int delta = memoryComponent.getTotalAllocation();
+        committed -= delta;
+        memoryComponent.reset();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Reset " + memoryComponent + " and reclaimed " + delta + " frames " + this);
+        }
+    }
+
+    @Override
+    public String toString() {
+        return "FeedMemoryManager  [" + nodeId + "]" + "(" + committed + "/" + budget + ")";
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedMetadataManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedMetadataManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedMetadataManager.java
new file mode 100644
index 0000000..34ae461
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedMetadataManager.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.management;
+
+import java.util.Date;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.feed.api.IFeedManager;
+import org.apache.asterix.external.feed.api.IFeedMetadataManager;
+import org.apache.asterix.external.feed.message.XAQLFeedMessage;
+import org.apache.asterix.external.util.FeedConstants;
+import org.apache.asterix.om.base.ARecord;
+import org.apache.asterix.om.base.AString;
+import org.apache.asterix.om.base.IAObject;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.BuiltinType;
+import org.apache.asterix.om.types.IAType;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class FeedMetadataManager implements IFeedMetadataManager {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedMetadataManager.class.getName());
+
+    private final String nodeId;
+    private ARecordType recordType;
+
+    public FeedMetadataManager(String nodeId) throws AsterixException, HyracksDataException {
+        this.nodeId = nodeId;
+        String[] fieldNames = new String[] { "id", "dataverseName", "feedName", "targetDataset", "tuple", "message",
+                "timestamp" };
+        IAType[] fieldTypes = new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
+                BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING };
+
+        recordType = new ARecordType(FeedConstants.FAILED_TUPLE_DATASET_TYPE, fieldNames, fieldTypes, true);
+    }
+
+    @Override
+    public void logTuple(FeedConnectionId connectionId, String tuple, String message, IFeedManager feedManager)
+            throws AsterixException {
+        try {
+            AString id = new AString("1");
+            AString dataverseValue = new AString(connectionId.getFeedId().getDataverse());
+            AString feedValue = new AString(connectionId.getFeedId().getFeedName());
+            AString targetDatasetValue = new AString(connectionId.getDatasetName());
+            AString tupleValue = new AString(tuple);
+            AString messageValue = new AString(message);
+            AString dateTime = new AString(new Date().toString());
+
+            IAObject[] fields = new IAObject[] { id, dataverseValue, feedValue, targetDatasetValue, tupleValue,
+                    messageValue, dateTime };
+            ARecord record = new ARecord(recordType, fields);
+            StringBuilder builder = new StringBuilder();
+            builder.append("use dataverse " + FeedConstants.FEEDS_METADATA_DV + ";" + "\n");
+            builder.append("insert into dataset " + FeedConstants.FAILED_TUPLE_DATASET + " ");
+            builder.append(" (" + recordToString(record) + ")");
+            builder.append(";");
+
+            XAQLFeedMessage xAqlMessage = new XAQLFeedMessage(connectionId, builder.toString());
+            feedManager.getFeedMessageService().sendMessage(xAqlMessage);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info(" Sent " + xAqlMessage.toJSON());
+            }
+        } catch (Exception pe) {
+            throw new AsterixException(pe);
+        }
+    }
+
+    @Override
+    public String toString() {
+        return "FeedMetadataManager [" + nodeId + "]";
+    }
+
+    private String recordToString(ARecord record) {
+        String[] fieldNames = record.getType().getFieldNames();
+        StringBuilder sb = new StringBuilder();
+        sb.append("{ ");
+        for (int i = 0; i < fieldNames.length; i++) {
+            if (i > 0) {
+                sb.append(", ");
+            }
+            sb.append("\"" + fieldNames[i] + "\"");
+            sb.append(": ");
+            switch (record.getType().getFieldTypes()[i].getTypeTag()) {
+                case STRING:
+                    sb.append("\"" + ((AString) record.getValueByPos(i)).getStringValue() + "\"");
+                    break;
+                default:
+                    break;
+            }
+        }
+        sb.append(" }");
+        return sb.toString();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedRuntimeManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedRuntimeManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedRuntimeManager.java
new file mode 100644
index 0000000..f7e98f7
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedRuntimeManager.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.management;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.external.feed.api.IFeedConnectionManager;
+import org.apache.asterix.external.feed.runtime.FeedRuntime;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+
+public class FeedRuntimeManager {
+
+    private static Logger LOGGER = Logger.getLogger(FeedRuntimeManager.class.getName());
+
+    private final FeedConnectionId connectionId;
+    private final IFeedConnectionManager connectionManager;
+    private final Map<FeedRuntimeId, FeedRuntime> feedRuntimes;
+
+    private final ExecutorService executorService;
+
+    public FeedRuntimeManager(FeedConnectionId connectionId, IFeedConnectionManager feedConnectionManager) {
+        this.connectionId = connectionId;
+        this.feedRuntimes = new ConcurrentHashMap<FeedRuntimeId, FeedRuntime>();
+        this.executorService = Executors.newCachedThreadPool();
+        this.connectionManager = feedConnectionManager;
+    }
+
+    public void close() throws IOException {
+        if (executorService != null) {
+            executorService.shutdownNow();
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Shut down executor service for :" + connectionId);
+            }
+        }
+    }
+
+    public FeedRuntime getFeedRuntime(FeedRuntimeId runtimeId) {
+        return feedRuntimes.get(runtimeId);
+    }
+
+    public void registerFeedRuntime(FeedRuntimeId runtimeId, FeedRuntime feedRuntime) {
+        feedRuntimes.put(runtimeId, feedRuntime);
+    }
+
+    public synchronized void deregisterFeedRuntime(FeedRuntimeId runtimeId) {
+        feedRuntimes.remove(runtimeId);
+        if (feedRuntimes.isEmpty()) {
+            connectionManager.deregisterFeed(connectionId);
+        }
+    }
+
+    public ExecutorService getExecutorService() {
+        return executorService;
+    }
+
+    public Set<FeedRuntimeId> getFeedRuntimes() {
+        return feedRuntimes.keySet();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedSubscriptionManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedSubscriptionManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedSubscriptionManager.java
new file mode 100644
index 0000000..e402f92
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedSubscriptionManager.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.management;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.external.feed.api.IFeedSubscriptionManager;
+import org.apache.asterix.external.feed.api.ISubscribableRuntime;
+import org.apache.asterix.external.feed.runtime.SubscribableFeedRuntimeId;
+
+public class FeedSubscriptionManager implements IFeedSubscriptionManager {
+
+    private static Logger LOGGER = Logger.getLogger(FeedSubscriptionManager.class.getName());
+
+    private final String nodeId;
+
+    private final Map<SubscribableFeedRuntimeId, ISubscribableRuntime> subscribableRuntimes;
+
+    public FeedSubscriptionManager(String nodeId) {
+        this.nodeId = nodeId;
+        this.subscribableRuntimes = new HashMap<SubscribableFeedRuntimeId, ISubscribableRuntime>();
+    }
+
+    @Override
+    public void registerFeedSubscribableRuntime(ISubscribableRuntime subscribableRuntime) {
+        SubscribableFeedRuntimeId sid = (SubscribableFeedRuntimeId) subscribableRuntime.getRuntimeId();
+        if (!subscribableRuntimes.containsKey(sid)) {
+            subscribableRuntimes.put(sid, subscribableRuntime);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Registered feed subscribable runtime " + subscribableRuntime);
+            }
+        } else {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Feed ingestion runtime " + subscribableRuntime + " already registered.");
+            }
+        }
+    }
+
+    @Override
+    public ISubscribableRuntime getSubscribableRuntime(SubscribableFeedRuntimeId subscribableFeedRuntimeId) {
+        return subscribableRuntimes.get(subscribableFeedRuntimeId);
+    }
+
+    @Override
+    public void deregisterFeedSubscribableRuntime(SubscribableFeedRuntimeId ingestionId) {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("De-registered feed subscribable runtime " + ingestionId);
+        }
+        subscribableRuntimes.remove(ingestionId);
+    }
+
+    @Override
+    public String toString() {
+        return "IngestionManager [" + nodeId + "]";
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedWorkManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedWorkManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedWorkManager.java
new file mode 100644
index 0000000..f2491db
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedWorkManager.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.management;
+
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+import org.apache.asterix.external.feed.api.IFeedWork;
+import org.apache.asterix.external.feed.api.IFeedWorkEventListener;
+import org.apache.asterix.external.feed.api.IFeedWorkManager;
+
+/**
+ * Handles asynchronous execution of feed management related tasks.
+ */
+public class FeedWorkManager implements IFeedWorkManager {
+
+    public static final FeedWorkManager INSTANCE = new FeedWorkManager();
+
+    private final ExecutorService executorService = Executors.newCachedThreadPool();
+
+    private FeedWorkManager() {
+    }
+
+    public void submitWork(IFeedWork work, IFeedWorkEventListener listener) {
+        Runnable runnable = work.getRunnable();
+        try {
+            executorService.execute(runnable);
+            listener.workCompleted(work);
+        } catch (Exception e) {
+            listener.workFailed(work, e);
+        }
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/EndFeedMessage.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/EndFeedMessage.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/EndFeedMessage.java
new file mode 100644
index 0000000..b0f7624
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/EndFeedMessage.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.message;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.util.FeedConstants;
+
+/**
+ * A feed control message indicating the need to end the feed. This message is dispatched
+ * to all locations that host an operator involved in the feed pipeline.
+ */
+public class EndFeedMessage extends FeedMessage {
+
+    private static final long serialVersionUID = 1L;
+
+    private final FeedId sourceFeedId;
+
+    private final FeedConnectionId connectionId;
+
+    private final FeedRuntimeType sourceRuntimeType;
+
+    private final boolean completeDisconnection;
+
+    private final EndMessageType endMessageType;
+
+    public enum EndMessageType {
+        DISCONNECT_FEED,
+        DISCONTINUE_SOURCE
+    }
+
+    public EndFeedMessage(FeedConnectionId connectionId, FeedRuntimeType sourceRuntimeType, FeedId sourceFeedId,
+            boolean completeDisconnection, EndMessageType endMessageType) {
+        super(MessageType.END);
+        this.connectionId = connectionId;
+        this.sourceRuntimeType = sourceRuntimeType;
+        this.sourceFeedId = sourceFeedId;
+        this.completeDisconnection = completeDisconnection;
+        this.endMessageType = endMessageType;
+    }
+
+    @Override
+    public String toString() {
+        return MessageType.END.name() + "  " + connectionId + " [" + sourceRuntimeType + "] ";
+    }
+
+    public FeedRuntimeType getSourceRuntimeType() {
+        return sourceRuntimeType;
+    }
+
+    public FeedId getSourceFeedId() {
+        return sourceFeedId;
+    }
+
+    public boolean isCompleteDisconnection() {
+        return completeDisconnection;
+    }
+
+    public EndMessageType getEndMessageType() {
+        return endMessageType;
+    }
+
+    @Override
+    public JSONObject toJSON() throws JSONException {
+        JSONObject obj = new JSONObject();
+        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
+        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
+        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
+        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
+        return obj;
+    }
+
+    public FeedConnectionId getFeedConnectionId() {
+        return connectionId;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedCongestionMessage.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedCongestionMessage.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedCongestionMessage.java
new file mode 100644
index 0000000..6c924d2
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedCongestionMessage.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.message;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.api.IFeedRuntime.Mode;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+import org.apache.asterix.external.util.FeedConstants;
+import org.apache.asterix.external.util.FeedConstants.MessageConstants;
+
+public class FeedCongestionMessage extends FeedMessage {
+
+    private static final long serialVersionUID = 1L;
+
+    private final FeedConnectionId connectionId;
+    private final FeedRuntimeId runtimeId;
+    private int inflowRate;
+    private int outflowRate;
+    private Mode mode;
+
+    public FeedCongestionMessage(FeedConnectionId connectionId, FeedRuntimeId runtimeId, int inflowRate,
+            int outflowRate, Mode mode) {
+        super(MessageType.CONGESTION);
+        this.connectionId = connectionId;
+        this.runtimeId = runtimeId;
+        this.inflowRate = inflowRate;
+        this.outflowRate = outflowRate;
+        this.mode = mode;
+    }
+
+    @Override
+    public JSONObject toJSON() throws JSONException {
+        JSONObject obj = new JSONObject();
+        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
+        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
+        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
+        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
+        obj.put(FeedConstants.MessageConstants.RUNTIME_TYPE, runtimeId.getFeedRuntimeType());
+        obj.put(FeedConstants.MessageConstants.OPERAND_ID, runtimeId.getOperandId());
+        obj.put(FeedConstants.MessageConstants.PARTITION, runtimeId.getPartition());
+        obj.put(FeedConstants.MessageConstants.INFLOW_RATE, inflowRate);
+        obj.put(FeedConstants.MessageConstants.OUTFLOW_RATE, outflowRate);
+        obj.put(FeedConstants.MessageConstants.MODE, mode);
+        return obj;
+    }
+
+    public FeedRuntimeId getRuntimeId() {
+        return runtimeId;
+    }
+
+    public int getInflowRate() {
+        return inflowRate;
+    }
+
+    public int getOutflowRate() {
+        return outflowRate;
+    }
+
+    public static FeedCongestionMessage read(JSONObject obj) throws JSONException {
+        FeedId feedId = new FeedId(obj.getString(FeedConstants.MessageConstants.DATAVERSE),
+                obj.getString(FeedConstants.MessageConstants.FEED));
+        FeedConnectionId connectionId = new FeedConnectionId(feedId,
+                obj.getString(FeedConstants.MessageConstants.DATASET));
+        FeedRuntimeId runtimeId = new FeedRuntimeId(FeedRuntimeType.valueOf(obj
+                .getString(FeedConstants.MessageConstants.RUNTIME_TYPE)),
+                obj.getInt(FeedConstants.MessageConstants.PARTITION),
+                obj.getString(FeedConstants.MessageConstants.OPERAND_ID));
+        Mode mode = Mode.valueOf(obj.getString(MessageConstants.MODE));
+        return new FeedCongestionMessage(connectionId, runtimeId,
+                obj.getInt(FeedConstants.MessageConstants.INFLOW_RATE),
+                obj.getInt(FeedConstants.MessageConstants.OUTFLOW_RATE), mode);
+    }
+
+    public FeedConnectionId getConnectionId() {
+        return connectionId;
+    }
+
+    public Mode getMode() {
+        return mode;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedMessage.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedMessage.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedMessage.java
new file mode 100644
index 0000000..f2b354b
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedMessage.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.message;
+
+import org.apache.asterix.external.feed.api.IFeedMessage;
+import org.apache.hyracks.api.dataflow.value.JSONSerializable;
+
+/**
+ * A control message that can be sent to the runtime instance of a
+ * feed's adapter.
+ */
+public abstract class FeedMessage implements IFeedMessage, JSONSerializable {
+
+    private static final long serialVersionUID = 1L;
+
+    protected final MessageType messageType;
+
+    public FeedMessage(MessageType messageType) {
+        this.messageType = messageType;
+    }
+
+    public MessageType getMessageType() {
+        return messageType;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedMessageService.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedMessageService.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedMessageService.java
new file mode 100644
index 0000000..13d6622
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedMessageService.java
@@ -0,0 +1,145 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.message;
+
+import java.net.Socket;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import org.apache.asterix.common.config.AsterixFeedProperties;
+import org.apache.asterix.external.feed.api.IFeedMessage;
+import org.apache.asterix.external.feed.api.IFeedMessageService;
+import org.apache.asterix.external.util.FeedConstants;
+
+/**
+ * Sends feed report messages on behalf of an operator instance
+ * to the SuperFeedManager associated with the feed.
+ */
+public class FeedMessageService implements IFeedMessageService {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedMessageService.class.getName());
+
+    private final LinkedBlockingQueue<String> inbox;
+    private final FeedMessageHandler mesgHandler;
+    private final String nodeId;
+    private ExecutorService executor;
+
+    public FeedMessageService(AsterixFeedProperties feedProperties, String nodeId, String ccClusterIp) {
+        this.inbox = new LinkedBlockingQueue<String>();
+        this.mesgHandler = new FeedMessageHandler(inbox, ccClusterIp, feedProperties.getFeedCentralManagerPort());
+        this.nodeId = nodeId;
+        this.executor = Executors.newSingleThreadExecutor();
+    }
+
+    public void start() throws Exception {
+
+        executor.execute(mesgHandler);
+    }
+
+    public void stop() {
+        synchronized (mesgHandler.getLock()) {
+            executor.shutdownNow();
+        }
+        mesgHandler.stop();
+    }
+
+    @Override
+    public void sendMessage(IFeedMessage message) {
+        try {
+            JSONObject obj = message.toJSON();
+            obj.put(FeedConstants.MessageConstants.NODE_ID, nodeId);
+            obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, message.getMessageType().name());
+            inbox.add(obj.toString());
+        } catch (JSONException jse) {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("JSON exception in parsing message " + message + " exception [" + jse.getMessage() + "]");
+            }
+        }
+    }
+
+    private static class FeedMessageHandler implements Runnable {
+
+        private final LinkedBlockingQueue<String> inbox;
+        private final String host;
+        private final int port;
+        private final Object lock;
+
+        private Socket cfmSocket;
+
+        private static final byte[] EOL = "\n".getBytes();
+
+        public FeedMessageHandler(LinkedBlockingQueue<String> inbox, String host, int port) {
+            this.inbox = inbox;
+            this.host = host;
+            this.port = port;
+            this.lock = new Object();
+        }
+
+        public void run() {
+            try {
+                cfmSocket = new Socket(host, port);
+                if (cfmSocket != null) {
+                    while (true) {
+                        String message = inbox.take();
+                        synchronized (lock) { // lock prevents message handler from sending incomplete message midst shutdown attempt
+                            cfmSocket.getOutputStream().write(message.getBytes());
+                            cfmSocket.getOutputStream().write(EOL);
+                        }
+                    }
+                } else {
+                    if (LOGGER.isLoggable(Level.WARNING)) {
+                        LOGGER.warning("Unable to start feed message service");
+                    }
+                }
+            } catch (Exception e) {
+                e.printStackTrace();
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("Exception in handling incoming feed messages" + e.getMessage());
+                }
+            } finally {
+                stop();
+            }
+
+        }
+
+        public void stop() {
+            if (cfmSocket != null) {
+                try {
+                    cfmSocket.close();
+                } catch (Exception e) {
+                    if (LOGGER.isLoggable(Level.WARNING)) {
+                        LOGGER.warning("Exception in closing socket " + e.getMessage());
+                    }
+                }
+            }
+        }
+
+        public Object getLock() {
+            return lock;
+        }
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedReportMessage.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedReportMessage.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedReportMessage.java
new file mode 100644
index 0000000..1b8c45d
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedReportMessage.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.external.feed.message;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.apache.asterix.external.feed.api.IFeedMetricCollector.ValueType;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+import org.apache.asterix.external.util.FeedConstants;
+import org.apache.asterix.external.util.FeedConstants.MessageConstants;
+
+public class FeedReportMessage extends FeedMessage {
+
+    private static final long serialVersionUID = 1L;
+
+    private final FeedConnectionId connectionId;
+    private final FeedRuntimeId runtimeId;
+    private final ValueType valueType;
+    private int value;
+
+    public FeedReportMessage(FeedConnectionId connectionId, FeedRuntimeId runtimeId, ValueType valueType, int value) {
+        super(MessageType.FEED_REPORT);
+        this.connectionId = connectionId;
+        this.runtimeId = runtimeId;
+        this.valueType = valueType;
+        this.value = value;
+    }
+
+    public void reset(int value) {
+        this.value = value;
+    }
+
+    @Override
+    public JSONObject toJSON() throws JSONException {
+        JSONObject obj = new JSONObject();
+        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
+        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
+        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
+        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
+        obj.put(FeedConstants.MessageConstants.RUNTIME_TYPE, runtimeId.getFeedRuntimeType());
+        obj.put(FeedConstants.MessageConstants.PARTITION, runtimeId.getPartition());
+        obj.put(FeedConstants.MessageConstants.VALUE_TYPE, valueType);
+        obj.put(FeedConstants.MessageConstants.VALUE, value);
+        return obj;
+    }
+
+    public static FeedReportMessage read(JSONObject obj) throws JSONException {
+        FeedId feedId = new FeedId(obj.getString(FeedConstants.MessageConstants.DATAVERSE),
+                obj.getString(FeedConstants.MessageConstants.FEED));
+        FeedConnectionId connectionId = new FeedConnectionId(feedId,
+                obj.getString(FeedConstants.MessageConstants.DATASET));
+        FeedRuntimeId runtimeId = new FeedRuntimeId(FeedRuntimeType.valueOf(obj
+                .getString(FeedConstants.MessageConstants.RUNTIME_TYPE)),
+                obj.getInt(FeedConstants.MessageConstants.PARTITION), FeedConstants.MessageConstants.NOT_APPLICABLE);
+        ValueType type = ValueType.valueOf(obj.getString(MessageConstants.VALUE_TYPE));
+        int value = Integer.parseInt(obj.getString(MessageConstants.VALUE));
+        return new FeedReportMessage(connectionId, runtimeId, type, value);
+    }
+
+    public int getValue() {
+        return value;
+    }
+
+    public void setValue(int value) {
+        this.value = value;
+    }
+
+    public FeedConnectionId getConnectionId() {
+        return connectionId;
+    }
+
+    public FeedRuntimeId getRuntimeId() {
+        return runtimeId;
+    }
+
+    public ValueType getValueType() {
+        return valueType;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedTupleCommitAckMessage.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedTupleCommitAckMessage.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedTupleCommitAckMessage.java
new file mode 100644
index 0000000..61e26de
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedTupleCommitAckMessage.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.message;
+
+import javax.xml.bind.DatatypeConverter;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.util.FeedConstants;
+
+public class FeedTupleCommitAckMessage extends FeedMessage {
+
+    private static final long serialVersionUID = 1L;
+
+    private final FeedConnectionId connectionId;
+    private int intakePartition;
+    private int base;
+    private byte[] commitAcks;
+
+    public FeedTupleCommitAckMessage(FeedConnectionId connectionId, int intakePartition, int base, byte[] commitAcks) {
+        super(MessageType.COMMIT_ACK);
+        this.connectionId = connectionId;
+        this.intakePartition = intakePartition;
+        this.base = base;
+        this.commitAcks = commitAcks;
+    }
+
+    @Override
+    public JSONObject toJSON() throws JSONException {
+        JSONObject obj = new JSONObject();
+        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
+        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
+        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
+        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
+        obj.put(FeedConstants.MessageConstants.INTAKE_PARTITION, intakePartition);
+        obj.put(FeedConstants.MessageConstants.BASE, base);
+        String commitAcksString = DatatypeConverter.printBase64Binary(commitAcks);
+        obj.put(FeedConstants.MessageConstants.COMMIT_ACKS, commitAcksString);
+        return obj;
+    }
+
+    public static FeedTupleCommitAckMessage read(JSONObject obj) throws JSONException {
+        FeedId feedId = new FeedId(obj.getString(FeedConstants.MessageConstants.DATAVERSE),
+                obj.getString(FeedConstants.MessageConstants.FEED));
+        FeedConnectionId connectionId = new FeedConnectionId(feedId,
+                obj.getString(FeedConstants.MessageConstants.DATASET));
+        int intakePartition = obj.getInt(FeedConstants.MessageConstants.INTAKE_PARTITION);
+        int base = obj.getInt(FeedConstants.MessageConstants.BASE);
+        String commitAcksString = obj.getString(FeedConstants.MessageConstants.COMMIT_ACKS);
+        byte[] commitAcks = DatatypeConverter.parseBase64Binary(commitAcksString);
+        return new FeedTupleCommitAckMessage(connectionId, intakePartition, base, commitAcks);
+    }
+
+    public FeedConnectionId getConnectionId() {
+        return connectionId;
+    }
+
+    public int getIntakePartition() {
+        return intakePartition;
+    }
+
+    public byte[] getCommitAcks() {
+        return commitAcks;
+    }
+
+    public void reset(int intakePartition, int base, byte[] commitAcks) {
+        this.intakePartition = intakePartition;
+        this.base = base;
+        this.commitAcks = commitAcks;
+    }
+
+    public int getBase() {
+        return base;
+    }
+
+    public void setBase(int base) {
+        this.base = base;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedTupleCommitResponseMessage.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedTupleCommitResponseMessage.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedTupleCommitResponseMessage.java
new file mode 100644
index 0000000..a61dc06
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/FeedTupleCommitResponseMessage.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.message;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.util.FeedConstants;
+
+public class FeedTupleCommitResponseMessage extends FeedMessage {
+
+    private static final long serialVersionUID = 1L;
+
+    private final FeedConnectionId connectionId;
+    private final int intakePartition;
+    private final int maxWindowAcked;
+
+    public FeedTupleCommitResponseMessage(FeedConnectionId connectionId, int intakePartition, int maxWindowAcked) {
+        super(MessageType.COMMIT_ACK_RESPONSE);
+        this.connectionId = connectionId;
+        this.intakePartition = intakePartition;
+        this.maxWindowAcked = maxWindowAcked;
+    }
+
+    @Override
+    public JSONObject toJSON() throws JSONException {
+        JSONObject obj = new JSONObject();
+        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
+        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
+        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
+        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
+        obj.put(FeedConstants.MessageConstants.INTAKE_PARTITION, intakePartition);
+        obj.put(FeedConstants.MessageConstants.MAX_WINDOW_ACKED, maxWindowAcked);
+        return obj;
+    }
+
+    @Override
+    public String toString() {
+        return connectionId + "[" + intakePartition + "]" + "(" + maxWindowAcked + ")";
+    }
+
+    public static FeedTupleCommitResponseMessage read(JSONObject obj) throws JSONException {
+        FeedId feedId = new FeedId(obj.getString(FeedConstants.MessageConstants.DATAVERSE),
+                obj.getString(FeedConstants.MessageConstants.FEED));
+        FeedConnectionId connectionId = new FeedConnectionId(feedId,
+                obj.getString(FeedConstants.MessageConstants.DATASET));
+        int intakePartition = obj.getInt(FeedConstants.MessageConstants.INTAKE_PARTITION);
+        int maxWindowAcked = obj.getInt(FeedConstants.MessageConstants.MAX_WINDOW_ACKED);
+        return new FeedTupleCommitResponseMessage(connectionId, intakePartition, maxWindowAcked);
+    }
+
+    public FeedConnectionId getConnectionId() {
+        return connectionId;
+    }
+
+    public int getMaxWindowAcked() {
+        return maxWindowAcked;
+    }
+
+    public int getIntakePartition() {
+        return intakePartition;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/MessageListener.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/MessageListener.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/MessageListener.java
new file mode 100644
index 0000000..67f2884
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/MessageListener.java
@@ -0,0 +1,126 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.message;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.ServerSocket;
+import java.net.Socket;
+import java.nio.CharBuffer;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+public class MessageListener {
+
+    private static final Logger LOGGER = Logger.getLogger(MessageListener.class.getName());
+
+    private int port;
+    private final LinkedBlockingQueue<String> outbox;
+
+    private ExecutorService executorService = Executors.newFixedThreadPool(10);
+
+    private MessageListenerServer listenerServer;
+
+    public MessageListener(int port, LinkedBlockingQueue<String> outbox) {
+        this.port = port;
+        this.outbox = outbox;
+    }
+
+    public void stop() {
+        listenerServer.stop();
+        if (!executorService.isShutdown()) {
+            executorService.shutdownNow();
+        }
+    }
+
+    public void start() throws IOException {
+        listenerServer = new MessageListenerServer(port, outbox);
+        executorService.execute(listenerServer);
+    }
+
+    private static class MessageListenerServer implements Runnable {
+
+        private final int port;
+        private final LinkedBlockingQueue<String> outbox;
+        private ServerSocket server;
+
+        private static final char EOL = (char) "\n".getBytes()[0];
+
+        public MessageListenerServer(int port, LinkedBlockingQueue<String> outbox) {
+            this.port = port;
+            this.outbox = outbox;
+        }
+
+        public void stop() {
+            try {
+                server.close();
+            } catch (IOException e) {
+                e.printStackTrace();
+            }
+        }
+
+        @Override
+        public void run() {
+            Socket client = null;
+            try {
+                server = new ServerSocket(port);
+                client = server.accept();
+                InputStream in = client.getInputStream();
+                CharBuffer buffer = CharBuffer.allocate(5000);
+                char ch;
+                while (true) {
+                    ch = (char) in.read();
+                    if (((int) ch) == -1) {
+                        break;
+                    }
+                    while (ch != EOL) {
+                        buffer.put(ch);
+                        ch = (char) in.read();
+                    }
+                    buffer.flip();
+                    String s = new String(buffer.array());
+                    synchronized (outbox) {
+                        outbox.add(s + "\n");
+                    }
+                    buffer.position(0);
+                    buffer.limit(5000);
+                }
+
+            } catch (Exception e) {
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("Unable to start Message listener" + server);
+                }
+            } finally {
+                if (server != null) {
+                    try {
+                        server.close();
+                    } catch (Exception e) {
+                        e.printStackTrace();
+                    }
+                }
+            }
+
+        }
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/MessageReceiver.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/MessageReceiver.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/MessageReceiver.java
new file mode 100644
index 0000000..abeb994
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/MessageReceiver.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.message;
+
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.external.feed.api.IMessageReceiver;
+
+public abstract class MessageReceiver<T> implements IMessageReceiver<T> {
+
+    protected static final Logger LOGGER = Logger.getLogger(MessageReceiver.class.getName());
+
+    protected final LinkedBlockingQueue<T> inbox;
+    protected ExecutorService executor;
+
+    public MessageReceiver() {
+        inbox = new LinkedBlockingQueue<T>();
+    }
+
+    public abstract void processMessage(T message) throws Exception;
+
+    @Override
+    public void start() {
+        executor = Executors.newSingleThreadExecutor();
+        executor.execute(new MessageReceiverRunnable<T>(this));
+    }
+
+    @Override
+    public synchronized void sendMessage(T message) {
+        inbox.add(message);
+    }
+
+    @Override
+    public void close(boolean processPending) {
+        if (executor != null) {
+            executor.shutdown();
+            executor = null;
+            if (processPending) {
+                flushPendingMessages();
+            } else {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Will discard the pending frames " + inbox.size());
+                }
+            }
+        }
+    }
+
+    private static class MessageReceiverRunnable<T> implements Runnable {
+
+        private final LinkedBlockingQueue<T> inbox;
+        private final MessageReceiver<T> messageReceiver;
+
+        public MessageReceiverRunnable(MessageReceiver<T> messageReceiver) {
+            this.inbox = messageReceiver.inbox;
+            this.messageReceiver = messageReceiver;
+        }
+
+        @Override
+        public void run() {
+            while (true) {
+                try {
+                    T message = inbox.take();
+                    messageReceiver.processMessage(message);
+                } catch (InterruptedException e) {
+                    e.printStackTrace();
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+    }
+
+    protected void flushPendingMessages() {
+        while (!inbox.isEmpty()) {
+            T message = null;
+            try {
+                message = inbox.take();
+                processMessage(message);
+            } catch (InterruptedException ie) {
+                // ignore exception but break from the loop
+                break;
+            } catch (Exception e) {
+                e.printStackTrace();
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("Exception " + e + " in processing message " + message);
+                }
+            }
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/NodeReportMessage.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/NodeReportMessage.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/NodeReportMessage.java
new file mode 100644
index 0000000..1548d6d
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/NodeReportMessage.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.message;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.apache.asterix.external.feed.api.IFeedMessage;
+import org.apache.asterix.external.util.FeedConstants;
+
+public class NodeReportMessage extends FeedMessage {
+
+    private static final long serialVersionUID = 1L;
+
+    private double cpuLoad;
+    private double usedHeap;
+    private int nRuntimes;
+
+    public NodeReportMessage(float cpuLoad, long usedHeap, int nRuntimes) {
+        super(IFeedMessage.MessageType.NODE_REPORT);
+        this.usedHeap = usedHeap;
+        this.cpuLoad = cpuLoad;
+        this.nRuntimes = nRuntimes;
+    }
+
+    public void reset(double cpuLoad, double usedHeap, int nRuntimes) {
+        this.cpuLoad = cpuLoad;
+        this.usedHeap = usedHeap;
+        this.nRuntimes = nRuntimes;
+    }
+
+    @Override
+    public JSONObject toJSON() throws JSONException {
+        JSONObject obj = new JSONObject();
+        obj.put(FeedConstants.MessageConstants.CPU_LOAD, cpuLoad);
+        obj.put(FeedConstants.MessageConstants.HEAP_USAGE, usedHeap);
+        obj.put(FeedConstants.MessageConstants.N_RUNTIMES, nRuntimes);
+        return obj;
+    }
+
+    public double getCpuLoad() {
+        return cpuLoad;
+    }
+
+    public double getUsedHeap() {
+        return usedHeap;
+    }
+
+    public int getnRuntimes() {
+        return nRuntimes;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/PrepareStallMessage.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/PrepareStallMessage.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/PrepareStallMessage.java
new file mode 100644
index 0000000..76fe0c2
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/message/PrepareStallMessage.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.feed.message;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.util.FeedConstants;
+
+/**
+ * A feed control message indicating the need to end the feed. This message is dispatched
+ * to all locations that host an operator involved in the feed pipeline.
+ */
+public class PrepareStallMessage extends FeedMessage {
+
+    private static final long serialVersionUID = 1L;
+
+    private final FeedConnectionId connectionId;
+
+    private final int computePartitionsRetainLimit;
+
+    public PrepareStallMessage(FeedConnectionId connectionId, int computePartitionsRetainLimit) {
+        super(MessageType.PREPARE_STALL);
+        this.connectionId = connectionId;
+        this.computePartitionsRetainLimit = computePartitionsRetainLimit;
+    }
+
+    @Override
+    public String toString() {
+        return MessageType.PREPARE_STALL.name() + "  " + connectionId;
+    }
+
+    @Override
+    public JSONObject toJSON() throws JSONException {
+        JSONObject obj = new JSONObject();
+        obj.put(FeedConstants.MessageConstants.MESSAGE_TYPE, messageType.name());
+        obj.put(FeedConstants.MessageConstants.DATAVERSE, connectionId.getFeedId().getDataverse());
+        obj.put(FeedConstants.MessageConstants.FEED, connectionId.getFeedId().getFeedName());
+        obj.put(FeedConstants.MessageConstants.DATASET, connectionId.getDatasetName());
+        obj.put(FeedConstants.MessageConstants.COMPUTE_PARTITION_RETAIN_LIMIT, computePartitionsRetainLimit);
+        return obj;
+    }
+
+    public FeedConnectionId getConnectionId() {
+        return connectionId;
+    }
+
+    public int getComputePartitionsRetainLimit() {
+        return computePartitionsRetainLimit;
+    }
+
+}



[22/26] incubator-asterixdb git commit: Feed Fixes and Cleanup

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/file/SecondaryIndexOperationsHelper.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/SecondaryIndexOperationsHelper.java b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryIndexOperationsHelper.java
index 44af0ff..d67ca0d 100644
--- a/asterix-app/src/main/java/org/apache/asterix/file/SecondaryIndexOperationsHelper.java
+++ b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryIndexOperationsHelper.java
@@ -38,6 +38,7 @@ import org.apache.asterix.common.transactions.IRecoveryManager.ResourceType;
 import org.apache.asterix.common.transactions.JobId;
 import org.apache.asterix.external.indexing.ExternalFile;
 import org.apache.asterix.external.indexing.IndexingConstants;
+import org.apache.asterix.external.operators.ExternalDataScanOperatorDescriptor;
 import org.apache.asterix.external.operators.ExternalIndexBulkModifyOperatorDescriptor;
 import org.apache.asterix.formats.nontagged.AqlBinaryBooleanInspectorImpl;
 import org.apache.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
@@ -46,7 +47,6 @@ import org.apache.asterix.formats.nontagged.AqlTypeTraitProvider;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.declared.AqlMetadataProvider;
 import org.apache.asterix.metadata.entities.Dataset;
-import org.apache.asterix.metadata.feeds.ExternalDataScanOperatorDescriptor;
 import org.apache.asterix.metadata.utils.DatasetUtils;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.IAType;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/file/SecondaryRTreeOperationsHelper.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/SecondaryRTreeOperationsHelper.java b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryRTreeOperationsHelper.java
index 4d887dc..418d143 100644
--- a/asterix-app/src/main/java/org/apache/asterix/file/SecondaryRTreeOperationsHelper.java
+++ b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryRTreeOperationsHelper.java
@@ -31,12 +31,12 @@ import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.ioopcallbacks.LSMRTreeIOOperationCallbackFactory;
 import org.apache.asterix.dataflow.data.nontagged.valueproviders.AqlPrimitiveValueProviderFactory;
 import org.apache.asterix.external.indexing.IndexingConstants;
+import org.apache.asterix.external.operators.ExternalDataScanOperatorDescriptor;
 import org.apache.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
 import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.formats.nontagged.AqlTypeTraitProvider;
 import org.apache.asterix.metadata.declared.AqlMetadataProvider;
 import org.apache.asterix.metadata.entities.Index;
-import org.apache.asterix.metadata.feeds.ExternalDataScanOperatorDescriptor;
 import org.apache.asterix.metadata.utils.ExternalDatasetsRegistry;
 import org.apache.asterix.om.types.ATypeTag;
 import org.apache.asterix.om.types.IAType;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/AsterixGlobalRecoveryManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/AsterixGlobalRecoveryManager.java b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/AsterixGlobalRecoveryManager.java
index c6e04df..4fae7e9 100644
--- a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/AsterixGlobalRecoveryManager.java
+++ b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/AsterixGlobalRecoveryManager.java
@@ -26,15 +26,15 @@ import java.util.logging.Logger;
 import org.apache.asterix.common.api.IClusterEventsSubscriber;
 import org.apache.asterix.common.api.IClusterManagementWork;
 import org.apache.asterix.common.api.IClusterManagementWorkResponse;
+import org.apache.asterix.common.config.MetadataConstants;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.config.DatasetConfig.ExternalDatasetTransactionState;
 import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
 import org.apache.asterix.external.indexing.ExternalFile;
-import org.apache.asterix.feeds.CentralFeedManager;
+import org.apache.asterix.feed.CentralFeedManager;
 import org.apache.asterix.file.ExternalIndexingOperations;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
-import org.apache.asterix.metadata.bootstrap.MetadataConstants;
 import org.apache.asterix.metadata.declared.AqlMetadataProvider;
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.entities.Dataverse;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
index d2164f4..2a7b3e4 100644
--- a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
+++ b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
@@ -37,12 +37,12 @@ import org.apache.asterix.common.api.IClusterManagementWork.ClusterState;
 import org.apache.asterix.common.config.AsterixExternalProperties;
 import org.apache.asterix.common.config.AsterixMetadataProperties;
 import org.apache.asterix.common.config.AsterixReplicationProperties;
-import org.apache.asterix.common.feeds.api.ICentralFeedManager;
 import org.apache.asterix.compiler.provider.AqlCompilationProvider;
 import org.apache.asterix.compiler.provider.SqlppCompilationProvider;
 import org.apache.asterix.event.service.ILookupService;
-import org.apache.asterix.feeds.CentralFeedManager;
-import org.apache.asterix.feeds.FeedLifecycleListener;
+import org.apache.asterix.external.feed.api.ICentralFeedManager;
+import org.apache.asterix.feed.CentralFeedManager;
+import org.apache.asterix.feed.FeedLifecycleListener;
 import org.apache.asterix.messaging.CCMessageBroker;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.api.IAsterixStateProxy;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ExternalLibraryBootstrap.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ExternalLibraryBootstrap.java b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ExternalLibraryBootstrap.java
index 01775ab..b0dfd58 100755
--- a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ExternalLibraryBootstrap.java
+++ b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ExternalLibraryBootstrap.java
@@ -36,6 +36,8 @@ import javax.xml.bind.Unmarshaller;
 import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.external.api.IDataSourceAdapter;
+import org.apache.asterix.external.dataset.adapter.AdapterIdentifier;
 import org.apache.asterix.external.library.ExternalLibrary;
 import org.apache.asterix.external.library.ExternalLibraryManager;
 import org.apache.asterix.external.library.LibraryAdapter;
@@ -44,10 +46,8 @@ import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
 import org.apache.asterix.metadata.api.IMetadataEntity;
 import org.apache.asterix.metadata.entities.DatasourceAdapter;
-import org.apache.asterix.metadata.entities.DatasourceAdapter.AdapterType;
 import org.apache.asterix.metadata.entities.Dataverse;
 import org.apache.asterix.metadata.entities.Library;
-import org.apache.asterix.metadata.feeds.AdapterIdentifier;
 import org.apache.asterix.runtime.formats.NonTaggedDataFormat;
 
 public class ExternalLibraryBootstrap {
@@ -210,7 +210,8 @@ public class ExternalLibraryBootstrap {
                     String adapterFactoryClass = adapter.getFactoryClass().trim();
                     String adapterName = libraryName + "#" + adapter.getName().trim();
                     AdapterIdentifier aid = new AdapterIdentifier(dataverse, adapterName);
-                    DatasourceAdapter dsa = new DatasourceAdapter(aid, adapterFactoryClass, AdapterType.EXTERNAL);
+                    DatasourceAdapter dsa = new DatasourceAdapter(aid, adapterFactoryClass,
+                            IDataSourceAdapter.AdapterType.EXTERNAL);
                     MetadataManager.INSTANCE.addAdapter(mdTxnCtx, dsa);
                     if (LOGGER.isLoggable(Level.INFO)) {
                         LOGGER.info("Installed adapter: " + adapterName);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/FeedBootstrap.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/FeedBootstrap.java b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/FeedBootstrap.java
index 2d443c7..d5f1a51 100644
--- a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/FeedBootstrap.java
+++ b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/FeedBootstrap.java
@@ -18,29 +18,23 @@
  */
 package org.apache.asterix.hyracks.bootstrap;
 
-import org.apache.asterix.feeds.CentralFeedManager;
-import org.apache.asterix.metadata.bootstrap.MetadataConstants;
+import org.apache.asterix.common.config.MetadataConstants;
+import org.apache.asterix.external.util.FeedConstants;
+import org.apache.asterix.feed.CentralFeedManager;
 import org.apache.asterix.om.types.BuiltinType;
 import org.apache.asterix.om.types.IAType;
 
 public class FeedBootstrap {
 
-    public final static String FEEDS_METADATA_DV = "feeds_metadata";
-    public final static String FAILED_TUPLE_DATASET = "failed_tuple";
-    public final static String FAILED_TUPLE_DATASET_TYPE = "FailedTupleType";
-    public final static String FAILED_TUPLE_DATASET_KEY = "id";
-
     public static void setUpInitialArtifacts() throws Exception {
 
         StringBuilder builder = new StringBuilder();
         try {
-            builder.append("create dataverse " + FEEDS_METADATA_DV + ";" + "\n");
-            builder.append("use dataverse " + FEEDS_METADATA_DV + ";" + "\n");
-
-            builder.append("create type " + FAILED_TUPLE_DATASET_TYPE + " as open { ");
-
-            String[] fieldNames = new String[] { "id", "dataverseName", "feedName", "targetDataset", "tuple",
-                    "message", "timestamp" };
+            builder.append("create dataverse " + FeedConstants.FEEDS_METADATA_DV + ";" + "\n");
+            builder.append("use dataverse " + FeedConstants.FEEDS_METADATA_DV + ";" + "\n");
+            builder.append("create type " + FeedConstants.FAILED_TUPLE_DATASET_TYPE + " as open { ");
+            String[] fieldNames = new String[] { "id", "dataverseName", "feedName", "targetDataset", "tuple", "message",
+                    "timestamp" };
             IAType[] fieldTypes = new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
                     BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING };
 
@@ -52,9 +46,9 @@ public class FeedBootstrap {
                 builder.append(fieldTypes[i].getTypeName());
             }
             builder.append("}" + ";" + "\n");
-
-            builder.append("create dataset " + FAILED_TUPLE_DATASET + " " + "(" + FAILED_TUPLE_DATASET_TYPE + ")" + " "
-                    + "primary key " + FAILED_TUPLE_DATASET_KEY + " on  " + MetadataConstants.METADATA_NODEGROUP_NAME
+            builder.append("create dataset " + FeedConstants.FAILED_TUPLE_DATASET + " " + "("
+                    + FeedConstants.FAILED_TUPLE_DATASET_TYPE + ")" + " " + "primary key "
+                    + FeedConstants.FAILED_TUPLE_DATASET_KEY + " on  " + MetadataConstants.METADATA_NODEGROUP_NAME
                     + ";");
 
             CentralFeedManager.AQLExecutor.executeAQL(builder.toString());

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
index 76e7856..3341387 100644
--- a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
+++ b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
@@ -38,6 +38,7 @@ import org.apache.asterix.common.messaging.api.INCMessageBroker;
 import org.apache.asterix.common.replication.IRemoteRecoveryManager;
 import org.apache.asterix.common.transactions.IRecoveryManager;
 import org.apache.asterix.common.transactions.IRecoveryManager.SystemState;
+import org.apache.asterix.common.utils.StoragePathUtil;
 import org.apache.asterix.event.schema.cluster.Cluster;
 import org.apache.asterix.event.schema.cluster.Node;
 import org.apache.asterix.messaging.NCMessageBroker;
@@ -46,7 +47,6 @@ import org.apache.asterix.metadata.MetadataNode;
 import org.apache.asterix.metadata.api.IAsterixStateProxy;
 import org.apache.asterix.metadata.api.IMetadataNode;
 import org.apache.asterix.metadata.bootstrap.MetadataBootstrap;
-import org.apache.asterix.metadata.utils.SplitsAndConstraintsUtil;
 import org.apache.asterix.om.util.AsterixClusterProperties;
 import org.apache.asterix.transaction.management.resource.PersistentLocalResourceRepository;
 import org.apache.asterix.transaction.management.service.recovery.RecoveryManager;
@@ -114,7 +114,7 @@ public class NCApplicationEntryPoint implements INCApplicationEntryPoint {
         runtimeContext.initialize(initialRun);
         ncApplicationContext.setApplicationObject(runtimeContext);
 
-        //if replication is enabled, check if there is a replica for this node
+        //If replication is enabled, check if there is a replica for this node
         AsterixReplicationProperties asterixReplicationProperties = ((IAsterixPropertiesProvider) runtimeContext)
                 .getReplicationProperties();
 
@@ -123,7 +123,7 @@ public class NCApplicationEntryPoint implements INCApplicationEntryPoint {
         if (initialRun) {
             LOGGER.info("System is being initialized. (first run)");
         } else {
-            // #. recover if the system is corrupted by checking system state.
+            //#. recover if the system is corrupted by checking system state.
             IRecoveryManager recoveryMgr = runtimeContext.getTransactionSubsystem().getRecoveryManager();
             systemState = recoveryMgr.getSystemState();
 
@@ -133,7 +133,7 @@ public class NCApplicationEntryPoint implements INCApplicationEntryPoint {
 
             if (replicationEnabled) {
                 if (systemState == SystemState.NEW_UNIVERSE || systemState == SystemState.CORRUPTED) {
-                    //try to perform remote recovery
+                    //Try to perform remote recovery
                     IRemoteRecoveryManager remoteRecoveryMgr = runtimeContext.getRemoteRecoveryManager();
                     remoteRecoveryMgr.performRemoteRecovery();
                     performedRemoteRecovery = true;
@@ -152,20 +152,20 @@ public class NCApplicationEntryPoint implements INCApplicationEntryPoint {
     }
 
     private void startReplicationService() throws IOException {
-        //open replication channel
+        //Open replication channel
         runtimeContext.getReplicationChannel().start();
 
-        //check the state of remote replicas
+        //Check the state of remote replicas
         runtimeContext.getReplicationManager().initializeReplicasState();
 
         if (performedRemoteRecovery) {
-            //notify remote replicas about the new IP Address if changed
+            //Notify remote replicas about the new IP Address if changed
             //Note: this is a hack since each node right now maintains its own copy of the cluster configuration.
             //Once the configuration is centralized on the CC, this step wont be needed.
             runtimeContext.getReplicationManager().broadcastNewIPAddress();
         }
 
-        //start replication after the state of remote replicas has been initialized. 
+        //Start replication after the state of remote replicas has been initialized.
         runtimeContext.getReplicationManager().startReplicationThreads();
     }
 
@@ -182,7 +182,7 @@ public class NCApplicationEntryPoint implements INCApplicationEntryPoint {
                 MetadataBootstrap.stopUniverse();
             }
 
-            //clean any temporary files
+            //Clean any temporary files
             performLocalCleanUp();
 
             //Note: stopping recovery manager will make a sharp checkpoint
@@ -197,7 +197,7 @@ public class NCApplicationEntryPoint implements INCApplicationEntryPoint {
 
     @Override
     public void notifyStartupComplete() throws Exception {
-        //send max resource id on this NC to the CC
+        //Send max resource id on this NC to the CC
         ((INCMessageBroker) ncApplicationContext.getMessageBroker()).reportMaxResourceId();
 
         AsterixMetadataProperties metadataProperties = ((IAsterixPropertiesProvider) runtimeContext)
@@ -228,9 +228,9 @@ public class NCApplicationEntryPoint implements INCApplicationEntryPoint {
                 throw new IllegalStateException("Metadata node cannot access distributed state");
             }
 
-            // This is a special case, we just give the metadataNode directly.
-            // This way we can delay the registration of the metadataNode until
-            // it is completely initialized.
+            //This is a special case, we just give the metadataNode directly.
+            //This way we can delay the registration of the metadataNode until
+            //it is completely initialized.
             MetadataManager.INSTANCE = new MetadataManager(proxy, MetadataNode.INSTANCE);
             MetadataBootstrap.startUniverse(((IAsterixPropertiesProvider) runtimeContext), ncApplicationContext,
                     systemState == SystemState.NEW_UNIVERSE);
@@ -272,26 +272,26 @@ public class NCApplicationEntryPoint implements INCApplicationEntryPoint {
             proxy.setMetadataNode(stub);
         }
 
-        //clean any temporary files
+        //Clean any temporary files
         performLocalCleanUp();
     }
 
     private void performLocalCleanUp() {
-        //delete working area files from failed jobs
+        //Delete working area files from failed jobs
         runtimeContext.getIOManager().deleteWorkspaceFiles();
 
-        //reclaim storage for temporary datasets.
+        //Reclaim storage for temporary datasets.
         String storageDirName = AsterixClusterProperties.INSTANCE.getStorageDirectoryName();
         String[] ioDevices = ((PersistentLocalResourceRepository) runtimeContext.getLocalResourceRepository())
                 .getStorageMountingPoints();
         for (String ioDevice : ioDevices) {
             String tempDatasetsDir = ioDevice + storageDirName + File.separator
-                    + SplitsAndConstraintsUtil.TEMP_DATASETS_STORAGE_FOLDER;
+                    + StoragePathUtil.TEMP_DATASETS_STORAGE_FOLDER;
             FileUtils.deleteQuietly(new File(tempDatasetsDir));
         }
 
-        // TODO
-        //reclaim storage for orphaned index artifacts in NCs.
+        //TODO
+        //Reclaim storage for orphaned index artifacts in NCs.
         //Note: currently LSM indexes invalid components are deleted when an index is activated.
     }
 
@@ -321,7 +321,7 @@ public class NCApplicationEntryPoint implements INCApplicationEntryPoint {
                     String nodeIoDevices = node.getIodevices() == null ? cluster.getIodevices() : node.getIodevices();
                     String[] ioDevicePaths = nodeIoDevices.trim().split(",");
                     for (int i = 0; i < ioDevicePaths.length; i++) {
-                        //construct full store path
+                        // construct full store path
                         ioDevicePaths[i] += File.separator + storeDir;
                     }
                     metadataProperties.getStores().put(nodeId, ioDevicePaths);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java b/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java
index 8ff6d9b..83beb7c 100644
--- a/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java
+++ b/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java
@@ -36,7 +36,7 @@ import javax.servlet.ServletContext;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import org.apache.asterix.feeds.CentralFeedManager;
+import org.apache.asterix.feed.CentralFeedManager;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
 import org.apache.asterix.metadata.declared.AqlMetadataProvider;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/test/resources/metadata/results/basic/meta15/meta15.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta15/meta15.1.adm b/asterix-app/src/test/resources/metadata/results/basic/meta15/meta15.1.adm
index c4dde05..aabf05d 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta15/meta15.1.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta15/meta15.1.adm
@@ -1,3 +1 @@
-{ "DataverseName": "Metadata", "Name": "adapter", "Classname": "org.apache.asterix.external.adapter.factory.GenericAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Jan 03 15:39:35 AST 2016" }
-{ "DataverseName": "Metadata", "Name": "socket_adapter", "Classname": "org.apache.asterix.external.runtime.GenericSocketFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Jan 03 15:39:35 AST 2016" }
-{ "DataverseName": "Metadata", "Name": "socket_client", "Classname": "org.apache.asterix.external.runtime.SocketClientAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Jan 03 15:39:35 AST 2016" }
\ No newline at end of file
+{ "DataverseName": "Metadata", "Name": "adapter", "Classname": "org.apache.asterix.external.adapter.factory.GenericAdapterFactory", "Type": "INTERNAL", "Timestamp": "Sun Jan 10 16:13:18 AST 2016" }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01/feeds_01.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01/feeds_01.1.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01/feeds_01.1.adm
index f981aca..f3e4605 100644
--- a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01/feeds_01.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01/feeds_01.1.adm
@@ -1 +1 @@
-{ "DataverseName": "feeds", "FeedName": "TweetFeed", "Function": null, "FeedType": "PRIMARY", "PrimaryTypeDetails": { "AdapterName": "file_feed", "AdapterConfiguration": {{ { "Name": "output-type-name", "Value": "TweetType" }, { "Name": "fs", "Value": "localfs" }, { "Name": "path", "Value": "nc1://data/twitter/obamatweets.adm" }, { "Name": "format", "Value": "adm" }, { "Name": "tuple-interval", "Value": "10" } }} }, "SecondaryTypeDetails": null, "Timestamp": "Sat Jun 20 13:55:58 PDT 2015" }
+{ "DataverseName": "feeds", "FeedName": "TweetFeed", "Function": null, "FeedType": "PRIMARY", "PrimaryTypeDetails": { "AdapterName": "file_feed", "AdapterConfiguration": {{ { "Name": "output-type-name", "Value": "TweetType" }, { "Name": "fs", "Value": "localfs" }, { "Name": "path", "Value": "asterix_nc1://data/twitter/obamatweets.adm" }, { "Name": "format", "Value": "adm" }, { "Name": "tuple-interval", "Value": "10" } }} }, "SecondaryTypeDetails": null, "Timestamp": "Sat Jun 20 13:55:58 PDT 2015" }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-app/src/test/resources/runtimets/testsuite.xml
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterix-app/src/test/resources/runtimets/testsuite.xml
index e14b558..375d05a 100644
--- a/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -27,6 +27,82 @@
         ResultOffsetPath="results"
         QueryOffsetPath="queries"
         QueryFileExtension=".aql">
+    <test-group name="feeds">
+        <test-case FilePath="feeds">
+            <compilation-unit name="feeds_01">
+                <output-dir compare="Text">feeds_01</output-dir>
+            </compilation-unit>
+        </test-case>
+        <!--Disable it because of sporadic failures. Abdullah will re-enable it.
+        <test-case FilePath="feeds">
+            <compilation-unit name="feeds_02">
+                <output-dir compare="Text">feeds_02</output-dir>
+            </compilation-unit>
+        </test-case>
+        <test-case FilePath="feeds">
+            <compilation-unit name="feeds_03">
+                <output-dir compare="Text">feeds_03</output-dir>
+            </compilation-unit>
+        </test-case>
+        <test-case FilePath="feeds">
+            <compilation-unit name="feeds_04">
+                <output-dir compare="Text">feeds_04</output-dir>
+            </compilation-unit>
+        </test-case>
+
+        <test-case FilePath="feeds">
+          <compilation-unit name="feeds_06">
+            <output-dir compare="Text">feeds_06</output-dir>
+          </compilation-unit>
+        </test-case>
+        <test-case FilePath="feeds">
+            <compilation-unit name="feeds_07">
+                <output-dir compare="Text">feeds_07</output-dir>
+            </compilation-unit>
+        </test-case>
+
+        <test-case FilePath="feeds">
+            <compilation-unit name="feeds_08">
+                <output-dir compare="Text">feeds_08</output-dir>
+            </compilation-unit>
+        </test-case>
+        <test-case FilePath="feeds">
+            <compilation-unit name="feeds_09">
+                <output-dir compare="Text">feeds_09</output-dir>
+            </compilation-unit>
+        </test-case>
+
+        <test-case FilePath="feeds">
+            <compilation-unit name="feeds_10">
+                <output-dir compare="Text">feeds_10</output-dir>
+            </compilation-unit>
+        </test-case>
+
+        <test-case FilePath="feeds">
+            <compilation-unit name="feeds_11">
+                <output-dir compare="Text">feeds_11</output-dir>
+            </compilation-unit>
+        </test-case>
+        <test-case FilePath="feeds">
+            <compilation-unit name="feeds_12">
+                <output-dir compare="Text">feeds_12</output-dir>
+            </compilation-unit>
+        </test-case>
+
+        <test-case FilePath="feeds">
+            <compilation-unit name="issue_230_feeds">
+                <output-dir compare="Text">issue_230_feeds</output-dir>
+            </compilation-unit>
+        </test-case>
+
+        <test-case FilePath="feeds">
+            <compilation-unit name="issue_711_feeds">
+                <output-dir compare="Text">issue_711_feeds</output-dir>
+            </compilation-unit>
+        </test-case>
+        -->
+
+    </test-group>
     <test-group name="flwor">
         <test-case FilePath="flwor">
             <compilation-unit name="at00">
@@ -6137,83 +6213,6 @@
             </compilation-unit>
         </test-case>
     </test-group>
-    <test-group name="feeds">
-
-        <!--Disable it because of sporadic failures. Raman will re-enable it.
-        <test-case FilePath="feeds">
-            <compilation-unit name="feeds_01">
-                <output-dir compare="Text">feeds_01</output-dir>
-            </compilation-unit>
-        </test-case>
-        <test-case FilePath="feeds">
-            <compilation-unit name="feeds_02">
-                <output-dir compare="Text">feeds_02</output-dir>
-            </compilation-unit>
-        </test-case>
-        <test-case FilePath="feeds">
-            <compilation-unit name="feeds_03">
-                <output-dir compare="Text">feeds_03</output-dir>
-            </compilation-unit>
-        </test-case>
-        <test-case FilePath="feeds">
-            <compilation-unit name="feeds_04">
-                <output-dir compare="Text">feeds_04</output-dir>
-            </compilation-unit>
-        </test-case>
-
-        <test-case FilePath="feeds">
-          <compilation-unit name="feeds_06">
-            <output-dir compare="Text">feeds_06</output-dir>
-          </compilation-unit>
-        </test-case>
-        <test-case FilePath="feeds">
-            <compilation-unit name="feeds_07">
-                <output-dir compare="Text">feeds_07</output-dir>
-            </compilation-unit>
-        </test-case>
-
-        <test-case FilePath="feeds">
-            <compilation-unit name="feeds_08">
-                <output-dir compare="Text">feeds_08</output-dir>
-            </compilation-unit>
-        </test-case>
-        <test-case FilePath="feeds">
-            <compilation-unit name="feeds_09">
-                <output-dir compare="Text">feeds_09</output-dir>
-            </compilation-unit>
-        </test-case>
-
-        <test-case FilePath="feeds">
-            <compilation-unit name="feeds_10">
-                <output-dir compare="Text">feeds_10</output-dir>
-            </compilation-unit>
-        </test-case>
-
-        <test-case FilePath="feeds">
-            <compilation-unit name="feeds_11">
-                <output-dir compare="Text">feeds_11</output-dir>
-            </compilation-unit>
-        </test-case>
-        <test-case FilePath="feeds">
-            <compilation-unit name="feeds_12">
-                <output-dir compare="Text">feeds_12</output-dir>
-            </compilation-unit>
-        </test-case>
-
-        <test-case FilePath="feeds">
-            <compilation-unit name="issue_230_feeds">
-                <output-dir compare="Text">issue_230_feeds</output-dir>
-            </compilation-unit>
-        </test-case>
-
-        <test-case FilePath="feeds">
-            <compilation-unit name="issue_711_feeds">
-                <output-dir compare="Text">issue_711_feeds</output-dir>
-            </compilation-unit>
-        </test-case>
-        -->
-
-    </test-group>
     <test-group name="hdfs">
         <test-case FilePath="hdfs">
             <compilation-unit name="large-record">

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/api/IAsterixAppRuntimeContext.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/api/IAsterixAppRuntimeContext.java b/asterix-common/src/main/java/org/apache/asterix/common/api/IAsterixAppRuntimeContext.java
index b8c3f2f..3386252 100644
--- a/asterix-common/src/main/java/org/apache/asterix/common/api/IAsterixAppRuntimeContext.java
+++ b/asterix-common/src/main/java/org/apache/asterix/common/api/IAsterixAppRuntimeContext.java
@@ -24,7 +24,6 @@ import java.util.concurrent.Executor;
 
 import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.feeds.api.IFeedManager;
 import org.apache.asterix.common.replication.IRemoteRecoveryManager;
 import org.apache.asterix.common.replication.IReplicaResourcesManager;
 import org.apache.asterix.common.replication.IReplicationChannel;
@@ -79,7 +78,7 @@ public interface IAsterixAppRuntimeContext {
 
     public List<IVirtualBufferCache> getVirtualBufferCaches(int datasetID);
 
-    public IFeedManager getFeedManager();
+    public Object getFeedManager();
 
     public IRemoteRecoveryManager getRemoteRecoveryManager();
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixPropertiesAccessor.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixPropertiesAccessor.java b/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixPropertiesAccessor.java
index cc7ec84..13ce403 100644
--- a/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixPropertiesAccessor.java
+++ b/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixPropertiesAccessor.java
@@ -197,7 +197,7 @@ public class AsterixPropertiesAccessor {
     }
 
     public ClusterPartition getMetadataPartiton() {
-        //metadata partition is always the first partition on the metadata node
+        // metadata partition is always the first partition on the metadata node
         return nodePartitionsMap.get(metadataNodeName)[0];
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/config/MetadataConstants.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/config/MetadataConstants.java b/asterix-common/src/main/java/org/apache/asterix/common/config/MetadataConstants.java
new file mode 100644
index 0000000..943e385
--- /dev/null
+++ b/asterix-common/src/main/java/org/apache/asterix/common/config/MetadataConstants.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.common.config;
+
+public class MetadataConstants {
+
+    // Name of the dataverse the metadata lives in.
+    public final static String METADATA_DATAVERSE_NAME = "Metadata";
+
+    // Name of the node group where metadata is stored on.
+    public final static String METADATA_NODEGROUP_NAME = "MetadataGroup";
+
+    // Name of the default nodegroup where internal/feed datasets will be partitioned
+    // if an explicit nodegroup is not specified at the time of creation of a dataset
+    public static final String METADATA_DEFAULT_NODEGROUP_NAME = "DEFAULT_NG_ALL_NODES";
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/dataflow/AsterixLSMInsertDeleteOperatorNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/dataflow/AsterixLSMInsertDeleteOperatorNodePushable.java b/asterix-common/src/main/java/org/apache/asterix/common/dataflow/AsterixLSMInsertDeleteOperatorNodePushable.java
index fd1ebb8..d25e51f 100644
--- a/asterix-common/src/main/java/org/apache/asterix/common/dataflow/AsterixLSMInsertDeleteOperatorNodePushable.java
+++ b/asterix-common/src/main/java/org/apache/asterix/common/dataflow/AsterixLSMInsertDeleteOperatorNodePushable.java
@@ -21,6 +21,7 @@ package org.apache.asterix.common.dataflow;
 import java.nio.ByteBuffer;
 
 import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
+import org.apache.asterix.common.exceptions.FrameDataException;
 import org.apache.hyracks.api.comm.VSizeFrame;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
@@ -41,6 +42,7 @@ public class AsterixLSMInsertDeleteOperatorNodePushable extends LSMIndexInsertUp
 
     private final boolean isPrimary;
     private AbstractLSMIndex lsmIndex;
+    private int i = 0;
 
     public boolean isPrimary() {
         return isPrimary;
@@ -85,7 +87,7 @@ public class AsterixLSMInsertDeleteOperatorNodePushable extends LSMIndexInsertUp
         ILSMIndexAccessor lsmAccessor = (ILSMIndexAccessor) indexAccessor;
         int tupleCount = accessor.getTupleCount();
         try {
-            for (int i = 0; i < tupleCount; i++) {
+            for (; i < tupleCount; i++) {
                 if (tupleFilter != null) {
                     frameTuple.reset(accessor, i);
                     if (!tupleFilter.accept(frameTuple)) {
@@ -117,11 +119,13 @@ public class AsterixLSMInsertDeleteOperatorNodePushable extends LSMIndexInsertUp
                 }
             }
         } catch (Throwable th) {
-            throw new HyracksDataException(th);
+            FrameDataException fde = new FrameDataException(i, th);
+            throw fde;
         }
         writeBuffer.ensureFrameSize(buffer.capacity());
         FrameUtils.copyAndFlip(buffer, writeBuffer.getBuffer());
         FrameUtils.flushFrame(writeBuffer.getBuffer(), writer);
+        i = 0;
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/exceptions/FrameDataException.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/exceptions/FrameDataException.java b/asterix-common/src/main/java/org/apache/asterix/common/exceptions/FrameDataException.java
index 136a196..18b5264 100644
--- a/asterix-common/src/main/java/org/apache/asterix/common/exceptions/FrameDataException.java
+++ b/asterix-common/src/main/java/org/apache/asterix/common/exceptions/FrameDataException.java
@@ -26,7 +26,7 @@ public class FrameDataException extends HyracksDataException {
 
     private final int tupleIndex;
 
-    public FrameDataException(int tupleIndex, Exception cause) {
+    public FrameDataException(int tupleIndex, Throwable cause) {
         super(cause);
         this.tupleIndex = tupleIndex;
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/BasicMonitoredBuffer.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/BasicMonitoredBuffer.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/BasicMonitoredBuffer.java
deleted file mode 100644
index 70833fc..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/BasicMonitoredBuffer.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import org.apache.asterix.common.feeds.api.IExceptionHandler;
-import org.apache.asterix.common.feeds.api.IFeedMetricCollector;
-import org.apache.asterix.common.feeds.api.IFrameEventCallback;
-import org.apache.asterix.common.feeds.api.IFramePostProcessor;
-import org.apache.asterix.common.feeds.api.IFramePreprocessor;
-import org.apache.hyracks.api.comm.IFrameWriter;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-
-public class BasicMonitoredBuffer extends MonitoredBuffer {
-
-    public BasicMonitoredBuffer(IHyracksTaskContext ctx, FeedRuntimeInputHandler inputHandler, IFrameWriter frameWriter, FrameTupleAccessor fta,
-            RecordDescriptor recordDesc, IFeedMetricCollector metricCollector,
-            FeedConnectionId connectionId, FeedRuntimeId runtimeId, IExceptionHandler exceptionHandler,
-            IFrameEventCallback callback, int nPartitions, FeedPolicyAccessor policyAccessor) {
-        super(ctx, inputHandler, frameWriter, fta, recordDesc, metricCollector, connectionId, runtimeId,
-                exceptionHandler, callback, nPartitions, policyAccessor);
-    }
-
-    @Override
-    protected boolean monitorProcessingRate() {
-        return false;
-    }
-
-    @Override
-    protected boolean logInflowOutflowRate() {
-        return false;
-    }
-
-    @Override
-    protected IFramePreprocessor getFramePreProcessor() {
-        return null;
-    }
-
-    @Override
-    protected IFramePostProcessor getFramePostProcessor() {
-        return null;
-    }
-
-    @Override
-    protected boolean monitorInputQueueLength() {
-        return false;
-    }
-
-    @Override
-    protected boolean reportInflowRate() {
-        return false;
-    }
-
-    @Override
-    protected boolean reportOutflowRate() {
-        return false;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/CollectionRuntime.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/CollectionRuntime.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/CollectionRuntime.java
deleted file mode 100644
index 9865501..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/CollectionRuntime.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.util.Map;
-
-import org.apache.asterix.common.feeds.FeedFrameCollector.State;
-import org.apache.asterix.common.feeds.api.ISubscribableRuntime;
-import org.apache.asterix.common.feeds.api.ISubscriberRuntime;
-import org.apache.hyracks.api.comm.IFrameWriter;
-
-/**
- * Represents the feed runtime that collects feed tuples from another feed.
- * In case of a primary feed, the CollectionRuntime collects tuples from the feed
- * intake job. For a secondary feed, tuples are collected from the intake/compute
- * runtime associated with the source feed.
- */
-public class CollectionRuntime extends FeedRuntime implements ISubscriberRuntime {
-
-    private final FeedConnectionId connectionId;
-    private final ISubscribableRuntime sourceRuntime;
-    private final Map<String, String> feedPolicy;
-    private FeedFrameCollector frameCollector;
-
-    public CollectionRuntime(FeedConnectionId connectionId, FeedRuntimeId runtimeId,
-            FeedRuntimeInputHandler inputSideHandler, IFrameWriter outputSideWriter, ISubscribableRuntime sourceRuntime,
-            Map<String, String> feedPolicy) {
-        super(runtimeId, inputSideHandler, outputSideWriter);
-        this.connectionId = connectionId;
-        this.sourceRuntime = sourceRuntime;
-        this.feedPolicy = feedPolicy;
-    }
-
-    public State waitTillCollectionOver() throws InterruptedException {
-        if (!(isCollectionOver())) {
-            synchronized (frameCollector) {
-                while (!isCollectionOver()) {
-                    frameCollector.wait();
-                }
-            }
-        }
-        return frameCollector.getState();
-    }
-
-    private boolean isCollectionOver() {
-        return frameCollector.getState().equals(FeedFrameCollector.State.FINISHED)
-                || frameCollector.getState().equals(FeedFrameCollector.State.HANDOVER);
-    }
-
-    @Override
-    public void setMode(Mode mode) {
-        getInputHandler().setMode(mode);
-    }
-
-    @Override
-    public Map<String, String> getFeedPolicy() {
-        return feedPolicy;
-    }
-
-    public FeedConnectionId getConnectionId() {
-        return connectionId;
-    }
-
-    public ISubscribableRuntime getSourceRuntime() {
-        return sourceRuntime;
-    }
-
-    public void setFrameCollector(FeedFrameCollector frameCollector) {
-        this.frameCollector = frameCollector;
-    }
-
-    @Override
-    public FeedFrameCollector getFrameCollector() {
-        return frameCollector;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/ComputeSideMonitoredBuffer.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/ComputeSideMonitoredBuffer.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/ComputeSideMonitoredBuffer.java
deleted file mode 100644
index 7ec3fdf..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/ComputeSideMonitoredBuffer.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import org.apache.asterix.common.feeds.api.IExceptionHandler;
-import org.apache.asterix.common.feeds.api.IFeedMetricCollector;
-import org.apache.asterix.common.feeds.api.IFrameEventCallback;
-import org.apache.asterix.common.feeds.api.IFramePostProcessor;
-import org.apache.asterix.common.feeds.api.IFramePreprocessor;
-import org.apache.hyracks.api.comm.IFrameWriter;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-
-public class ComputeSideMonitoredBuffer extends MonitoredBuffer {
-
-    public ComputeSideMonitoredBuffer(IHyracksTaskContext ctx, FeedRuntimeInputHandler inputHandler, IFrameWriter frameWriter,
-            FrameTupleAccessor fta, RecordDescriptor recordDesc, IFeedMetricCollector metricCollector,
-            FeedConnectionId connectionId, FeedRuntimeId runtimeId, IExceptionHandler exceptionHandler,
-            IFrameEventCallback callback, int nPartitions, FeedPolicyAccessor policyAccessor) {
-        super(ctx, inputHandler, frameWriter, fta, recordDesc, metricCollector, connectionId, runtimeId,
-                exceptionHandler, callback, nPartitions, policyAccessor);
-    }
-
-    @Override
-    protected boolean monitorProcessingRate() {
-        return true;
-    }
-
-    protected boolean logInflowOutflowRate() {
-        return true;
-    }
-
-    @Override
-    protected boolean monitorInputQueueLength() {
-        return true;
-    }
-
-    @Override
-    protected IFramePreprocessor getFramePreProcessor() {
-        return null;
-    }
-
-    @Override
-    protected IFramePostProcessor getFramePostProcessor() {
-        return null;
-    }
-
-    @Override
-    protected boolean reportOutflowRate() {
-        return false;
-    }
-
-    @Override
-    protected boolean reportInflowRate() {
-        return false;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/DataBucket.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/DataBucket.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/DataBucket.java
deleted file mode 100644
index ccd6547..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/DataBucket.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.nio.ByteBuffer;
-import java.util.concurrent.atomic.AtomicInteger;
-
-public class DataBucket {
-
-    private static final AtomicInteger globalBucketId = new AtomicInteger(0);
-
-    private final ByteBuffer content;
-    private final AtomicInteger readCount;
-    private final int bucketId;
-
-    private int desiredReadCount;
-    private ContentType contentType;
-
-    private final DataBucketPool pool;
-
-    public enum ContentType {
-        DATA, // data (feed tuple)
-        EOD, // A signal indicating that there shall be no more data
-        EOSD // End of processing of spilled data
-    }
-
-    public DataBucket(DataBucketPool pool) {
-        this.content = ByteBuffer.allocate(pool.getFrameSize());
-        this.readCount = new AtomicInteger(0);
-        this.pool = pool;
-        this.contentType = ContentType.DATA;
-        this.bucketId = globalBucketId.incrementAndGet();
-    }
-
-    public synchronized void reset(ByteBuffer frame) {
-        if (frame != null) {
-            content.flip();
-            System.arraycopy(frame.array(), 0, content.array(), 0, frame.limit());
-            content.limit(frame.limit());
-            content.position(0);
-        }
-    }
-
-    public synchronized void doneReading() {
-        if (readCount.incrementAndGet() == desiredReadCount) {
-            readCount.set(0);
-            pool.returnDataBucket(this);
-        }
-    }
-
-    public void setDesiredReadCount(int rCount) {
-        this.desiredReadCount = rCount;
-    }
-
-    public ContentType getContentType() {
-        return contentType;
-    }
-
-    public void setContentType(ContentType contentType) {
-        this.contentType = contentType;
-    }
-
-    public synchronized ByteBuffer getContent() {
-        return content;
-    }
-
-    @Override
-    public String toString() {
-        return "DataBucket [" + bucketId + "]" + " (" + readCount + "," + desiredReadCount + ")";
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/DataBucketPool.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/DataBucketPool.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/DataBucketPool.java
deleted file mode 100644
index 2e7e60c..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/DataBucketPool.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.util.Stack;
-
-import org.apache.asterix.common.feeds.api.IFeedMemoryComponent;
-import org.apache.asterix.common.feeds.api.IFeedMemoryManager;
-
-/**
- * Represents a pool of reusable {@link DataBucket}
- */
-public class DataBucketPool implements IFeedMemoryComponent {
-
-    /** A unique identifier for the memory component **/
-    private final int componentId;
-
-    /** The {@link IFeedMemoryManager} for the NodeController **/
-    private final IFeedMemoryManager memoryManager;
-
-    /** A collection of available data buckets {@link DataBucket} **/
-    private final Stack<DataBucket> pool;
-
-    /** The total number of data buckets {@link DataBucket} allocated **/
-    private int totalAllocation;
-
-    /** The fixed frame size as configured for the asterix runtime **/
-    private final int frameSize;
-
-    public DataBucketPool(int componentId, IFeedMemoryManager memoryManager, int size, int frameSize) {
-        this.componentId = componentId;
-        this.memoryManager = memoryManager;
-        this.pool = new Stack<DataBucket>();
-        this.frameSize = frameSize;
-        expand(size);
-    }
-
-    public synchronized void returnDataBucket(DataBucket bucket) {
-        pool.push(bucket);
-    }
-
-    public synchronized DataBucket getDataBucket() {
-        if (pool.size() == 0) {
-            if (!memoryManager.expandMemoryComponent(this)) {
-                return null;
-            }
-        }
-        return pool.pop();
-    }
-
-    @Override
-    public Type getType() {
-        return Type.POOL;
-    }
-
-    @Override
-    public int getTotalAllocation() {
-        return totalAllocation;
-    }
-
-    @Override
-    public int getComponentId() {
-        return componentId;
-    }
-
-    @Override
-    public void expand(int delta) {
-        for (int i = 0; i < delta; i++) {
-            DataBucket bucket = new DataBucket(this);
-            pool.add(bucket);
-        }
-        totalAllocation += delta;
-    }
-
-    @Override
-    public void reset() {
-        totalAllocation -= pool.size();
-        pool.clear();
-    }
-
-    @Override
-    public String toString() {
-        return "DataBucketPool" + "[" + componentId + "]" + "(" + totalAllocation + ")";
-    }
-
-    public int getSize() {
-        return pool.size();
-    }
-
-    public int getFrameSize() {
-        return frameSize;
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/DistributeFeedFrameWriter.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/DistributeFeedFrameWriter.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/DistributeFeedFrameWriter.java
deleted file mode 100644
index d0e371e..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/DistributeFeedFrameWriter.java
+++ /dev/null
@@ -1,143 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.feeds.api.IFeedManager;
-import org.apache.asterix.common.feeds.api.IFeedOperatorOutputSideHandler;
-import org.apache.asterix.common.feeds.api.IFeedOperatorOutputSideHandler.Type;
-import org.apache.asterix.common.feeds.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.hyracks.api.comm.IFrameWriter;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-
-/**
- * Provides mechanism for distributing the frames, as received from an operator to a
- * set of registered readers. Each reader typically operates at a different pace. Readers
- * are isolated from each other to ensure that a slow reader does not impact the progress of
- * others.
- **/
-public class DistributeFeedFrameWriter implements IFrameWriter {
-
-    private static final Logger LOGGER = Logger.getLogger(DistributeFeedFrameWriter.class.getName());
-
-    /** A unique identifier for the feed to which the incoming tuples belong. **/
-    private final FeedId feedId;
-
-    /** An instance of FrameDistributor that provides the mechanism for distributing a frame to multiple readers, each operating in isolation. **/
-    private final FrameDistributor frameDistributor;
-
-    /** The original frame writer instantiated as part of job creation **/
-    private IFrameWriter writer;
-
-    /** The feed operation whose output is being distributed by the DistributeFeedFrameWriter **/
-    private final FeedRuntimeType feedRuntimeType;
-
-    /** The value of the partition 'i' if this is the i'th instance of the associated operator **/
-    private final int partition;
-
-    public DistributeFeedFrameWriter(IHyracksTaskContext ctx, FeedId feedId, IFrameWriter writer,
-            FeedRuntimeType feedRuntimeType, int partition, FrameTupleAccessor fta, IFeedManager feedManager)
-                    throws IOException {
-        this.feedId = feedId;
-        this.frameDistributor = new FrameDistributor(feedId, feedRuntimeType, partition, true,
-                feedManager.getFeedMemoryManager(), fta);
-        this.feedRuntimeType = feedRuntimeType;
-        this.partition = partition;
-        this.writer = writer;
-    }
-
-    public FeedFrameCollector subscribeFeed(FeedPolicyAccessor fpa, IFrameWriter frameWriter,
-            FeedConnectionId connectionId) throws Exception {
-        FeedFrameCollector collector = null;
-        if (!frameDistributor.isRegistered(frameWriter)) {
-            collector = new FeedFrameCollector(frameDistributor, fpa, frameWriter, connectionId);
-            frameDistributor.registerFrameCollector(collector);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Registered subscriber, new mode " + frameDistributor.getMode());
-            }
-            return collector;
-        } else {
-            throw new IllegalStateException("subscriber " + feedId + " already registered");
-        }
-    }
-
-    public void unsubscribeFeed(IFrameWriter recipientFeedFrameWriter) throws Exception {
-        boolean success = frameDistributor.deregisterFrameCollector(recipientFeedFrameWriter);
-        if (!success) {
-            throw new IllegalStateException(
-                    "Invalid attempt to unregister FeedFrameWriter " + recipientFeedFrameWriter + " not registered.");
-        }
-    }
-
-    public void notifyEndOfFeed() {
-        frameDistributor.notifyEndOfFeed();
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        try {
-            frameDistributor.close();
-        } finally {
-            writer.close();
-        }
-    }
-
-    @Override
-    public void fail() throws HyracksDataException {
-        writer.fail();
-    }
-
-    @Override
-    public void nextFrame(ByteBuffer frame) throws HyracksDataException {
-        frameDistributor.nextFrame(frame);
-    }
-
-    @Override
-    public void open() throws HyracksDataException {
-        writer.open();
-    }
-
-    public Map<IFrameWriter, FeedFrameCollector> getRegisteredReaders() {
-        return frameDistributor.getRegisteredReaders();
-    }
-
-    public void setWriter(IFrameWriter writer) {
-        this.writer = writer;
-    }
-
-    public Type getType() {
-        return IFeedOperatorOutputSideHandler.Type.DISTRIBUTE_FEED_OUTPUT_HANDLER;
-    }
-
-    @Override
-    public String toString() {
-        return feedId.toString() + feedRuntimeType + "[" + partition + "]";
-    }
-
-    public FrameDistributor.DistributionMode getDistributionMode() {
-        return frameDistributor.getDistributionMode();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedActivity.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedActivity.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedActivity.java
deleted file mode 100644
index 11130a1..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedActivity.java
+++ /dev/null
@@ -1,119 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.asterix.common.feeds;
-
-import java.util.Map;
-
-public class FeedActivity implements Comparable<FeedActivity> {
-
-    private int activityId;
-
-    private final String dataverseName;
-    private final String datasetName;
-    private final String feedName;
-    private final Map<String, String> feedActivityDetails;
-
-    public static class FeedActivityDetails {
-        public static final String INTAKE_LOCATIONS = "intake-locations";
-        public static final String COMPUTE_LOCATIONS = "compute-locations";
-        public static final String STORAGE_LOCATIONS = "storage-locations";
-        public static final String COLLECT_LOCATIONS = "collect-locations";
-        public static final String FEED_POLICY_NAME = "feed-policy-name";
-        public static final String FEED_CONNECT_TIMESTAMP = "feed-connect-timestamp";
-
-    }
-
-    public FeedActivity(String dataverseName, String feedName, String datasetName,
-            Map<String, String> feedActivityDetails) {
-        this.dataverseName = dataverseName;
-        this.feedName = feedName;
-        this.datasetName = datasetName;
-        this.feedActivityDetails = feedActivityDetails;
-    }
-
-    public String getDataverseName() {
-        return dataverseName;
-    }
-
-    public String getDatasetName() {
-        return datasetName;
-    }
-
-    public String getFeedName() {
-        return feedName;
-    }
-
-    @Override
-    public boolean equals(Object other) {
-        if (this == other) {
-            return true;
-        }
-        if (!(other instanceof FeedActivity)) {
-            return false;
-        }
-
-        if (!((FeedActivity) other).dataverseName.equals(dataverseName)) {
-            return false;
-        }
-        if (!((FeedActivity) other).datasetName.equals(datasetName)) {
-            return false;
-        }
-        if (!((FeedActivity) other).getFeedName().equals(feedName)) {
-            return false;
-        }
-        if (((FeedActivity) other).getActivityId() != (activityId)) {
-            return false;
-        }
-
-        return true;
-    }
-
-    @Override
-    public int hashCode() {
-        return toString().hashCode();
-    }
-
-    @Override
-    public String toString() {
-        return dataverseName + "." + feedName + " --> " + datasetName + " " + activityId;
-    }
-
-    public String getConnectTimestamp() {
-        return feedActivityDetails.get(FeedActivityDetails.FEED_CONNECT_TIMESTAMP);
-    }
-
-    public int getActivityId() {
-        return activityId;
-    }
-
-    public void setActivityId(int activityId) {
-        this.activityId = activityId;
-    }
-
-    public Map<String, String> getFeedActivityDetails() {
-        return feedActivityDetails;
-    }
-
-    @Override
-    public int compareTo(FeedActivity o) {
-        return o.getActivityId() - this.activityId;
-    }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedCollectRuntimeInputHandler.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedCollectRuntimeInputHandler.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedCollectRuntimeInputHandler.java
deleted file mode 100644
index 97dc4f8..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedCollectRuntimeInputHandler.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-
-import org.apache.asterix.common.feeds.api.IFeedManager;
-import org.apache.hyracks.api.comm.IFrameWriter;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-
-public class FeedCollectRuntimeInputHandler extends FeedRuntimeInputHandler {
-
-    private final FeedFrameCache feedFrameCache;
-
-    public FeedCollectRuntimeInputHandler(IHyracksTaskContext ctx, FeedConnectionId connectionId, FeedRuntimeId runtimeId,
-            IFrameWriter coreOperator, FeedPolicyAccessor fpa, boolean bufferingEnabled,
-            FrameTupleAccessor fta, RecordDescriptor recordDesc, IFeedManager feedManager, int nPartitions)
-            throws IOException {
-        super(ctx, connectionId, runtimeId, coreOperator, fpa, bufferingEnabled, fta, recordDesc, feedManager,
-                nPartitions);
-        this.feedFrameCache = new FeedFrameCache(ctx, fta, coreOperator);
-    }
-
-    public void process(ByteBuffer frame) throws HyracksDataException {
-        feedFrameCache.sendMessage(frame);
-        super.process(frame);
-    }
-
-    public void replayFrom(int recordId) throws HyracksDataException {
-        feedFrameCache.replayRecords(recordId);
-    }
-
-    public void dropTill(int recordId) {
-        feedFrameCache.dropTillRecordId(recordId);
-    }
-    
-    public void replayCached() throws HyracksDataException{
-        feedFrameCache.replayAll();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedConnectJobInfo.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedConnectJobInfo.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedConnectJobInfo.java
deleted file mode 100644
index 4d6a427..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedConnectJobInfo.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.util.List;
-import java.util.Map;
-
-import org.apache.asterix.common.feeds.api.IFeedJoint;
-import org.apache.hyracks.api.job.JobId;
-import org.apache.hyracks.api.job.JobSpecification;
-
-public class FeedConnectJobInfo extends FeedJobInfo {
-
-    private final FeedConnectionId connectionId;
-    private final Map<String, String> feedPolicy;
-    private final IFeedJoint sourceFeedJoint;
-    private IFeedJoint computeFeedJoint;
-
-    private List<String> collectLocations;
-    private List<String> computeLocations;
-    private List<String> storageLocations;
-
-    public FeedConnectJobInfo(JobId jobId, FeedJobState state, FeedConnectionId connectionId,
-            IFeedJoint sourceFeedJoint, IFeedJoint computeFeedJoint, JobSpecification spec,
-            Map<String, String> feedPolicy) {
-        super(jobId, state, FeedJobInfo.JobType.FEED_CONNECT, spec);
-        this.connectionId = connectionId;
-        this.sourceFeedJoint = sourceFeedJoint;
-        this.computeFeedJoint = computeFeedJoint;
-        this.feedPolicy = feedPolicy;
-    }
-
-    public FeedConnectionId getConnectionId() {
-        return connectionId;
-    }
-
-    public List<String> getCollectLocations() {
-        return collectLocations;
-    }
-
-    public List<String> getComputeLocations() {
-        return computeLocations;
-    }
-
-    public List<String> getStorageLocations() {
-        return storageLocations;
-    }
-
-    public void setCollectLocations(List<String> collectLocations) {
-        this.collectLocations = collectLocations;
-    }
-
-    public void setComputeLocations(List<String> computeLocations) {
-        this.computeLocations = computeLocations;
-    }
-
-    public void setStorageLocations(List<String> storageLocations) {
-        this.storageLocations = storageLocations;
-    }
-
-    public IFeedJoint getSourceFeedJoint() {
-        return sourceFeedJoint;
-    }
-
-    public IFeedJoint getComputeFeedJoint() {
-        return computeFeedJoint;
-    }
-
-    public Map<String, String> getFeedPolicy() {
-        return feedPolicy;
-    }
-
-    public void setComputeFeedJoint(IFeedJoint computeFeedJoint) {
-        this.computeFeedJoint = computeFeedJoint;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedConnectionId.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedConnectionId.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedConnectionId.java
deleted file mode 100644
index 355d340..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedConnectionId.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.io.Serializable;
-
-/**
- * A unique identifier for a feed connection. A feed connection is an instance of a data feed that is flowing into a dataset.
- */
-public class FeedConnectionId implements Serializable {
-
-    private static final long serialVersionUID = 1L;
-
-    private final FeedId feedId;
-    private final String datasetName;
-
-    public FeedConnectionId(FeedId feedId, String datasetName) {
-        this.feedId = feedId;
-        this.datasetName = datasetName;
-    }
-
-    public FeedConnectionId(String dataverse, String feedName, String datasetName) {
-        this.feedId = new FeedId(dataverse, feedName);
-        this.datasetName = datasetName;
-    }
-
-    public FeedId getFeedId() {
-        return feedId;
-    }
-
-    public String getDatasetName() {
-        return datasetName;
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (o == null || !(o instanceof FeedConnectionId)) {
-            return false;
-        }
-
-        if (this == o
-                || (((FeedConnectionId) o).getFeedId().equals(feedId) && ((FeedConnectionId) o).getDatasetName()
-                        .equals(datasetName))) {
-            return true;
-        }
-        return false;
-    }
-
-    @Override
-    public int hashCode() {
-        return toString().hashCode();
-    }
-
-    @Override
-    public String toString() {
-        return feedId.toString() + "-->" + datasetName;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedConnectionRequest.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedConnectionRequest.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedConnectionRequest.java
deleted file mode 100644
index 6230eac..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedConnectionRequest.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.lang3.StringUtils;
-
-import org.apache.asterix.common.feeds.api.IFeedLifecycleListener.ConnectionLocation;
-
-/**
- * A request for connecting a feed to a dataset.
- */
-public class FeedConnectionRequest {
-
-    public enum ConnectionStatus {
-        /** initial state upon creating a connection request **/
-        INITIALIZED,
-
-        /** connection establish; feed is receiving data **/
-        ACTIVE,
-
-        /** connection removed; feed is not receiving data **/
-        INACTIVE,
-
-        /** connection request failed **/
-        FAILED
-    }
-
-    /** Feed joint on the feed pipeline that serves as the source for this subscription **/
-    private final FeedJointKey feedJointKey;
-
-    /** Location in the source feed pipeline from where feed tuples are received. **/
-    private final ConnectionLocation connectionLocation;
-
-    /** List of functions that need to be applied in sequence after the data hand-off at the source feedPointKey. **/
-    private final List<String> functionsToApply;
-
-    /** Status associated with the subscription. */
-    private ConnectionStatus connectionStatus;
-
-    /** Name of the policy that governs feed ingestion **/
-    private final String policy;
-
-    /** Policy associated with a feed connection **/
-    private final Map<String, String> policyParameters;
-
-    /** Target dataset associated with the connection request **/
-    private final String targetDataset;
-
-    private final FeedId receivingFeedId;
-
-    
-    public FeedConnectionRequest(FeedJointKey feedPointKey, ConnectionLocation connectionLocation,
-            List<String> functionsToApply, String targetDataset, String policy, Map<String, String> policyParameters,
-            FeedId receivingFeedId) {
-        this.feedJointKey = feedPointKey;
-        this.connectionLocation = connectionLocation;
-        this.functionsToApply = functionsToApply;
-        this.targetDataset = targetDataset;
-        this.policy = policy;
-        this.policyParameters = policyParameters;
-        this.receivingFeedId = receivingFeedId;
-        this.connectionStatus = ConnectionStatus.INITIALIZED;
-    }
-
-    public FeedJointKey getFeedJointKey() {
-        return feedJointKey;
-    }
-
-    public ConnectionStatus getConnectionStatus() {
-        return connectionStatus;
-    }
-
-    public void setSubscriptionStatus(ConnectionStatus connectionStatus) {
-        this.connectionStatus = connectionStatus;
-    }
-
-    public String getPolicy() {
-        return policy;
-    }
-
-    public String getTargetDataset() {
-        return targetDataset;
-    }
-
-    public ConnectionLocation getSubscriptionLocation() {
-        return connectionLocation;
-    }
-
-    public FeedId getReceivingFeedId() {
-        return receivingFeedId;
-    }
-
-    public Map<String, String> getPolicyParameters() {
-        return policyParameters;
-    }
-
-    public List<String> getFunctionsToApply() {
-        return functionsToApply;
-    }
-
-    @Override
-    public String toString() {
-        return "Feed Connection Request " + feedJointKey + " [" + connectionLocation + "]" + " Apply ("
-                + StringUtils.join(functionsToApply, ",") + ")";
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e800e6d5/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedConstants.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedConstants.java b/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedConstants.java
deleted file mode 100644
index 05e554b..0000000
--- a/asterix-common/src/main/java/org/apache/asterix/common/feeds/FeedConstants.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.feeds;
-
-public class FeedConstants {
-
-    public static final class StatisticsConstants {
-        public static final String INTAKE_TUPLEID = "intake-tupleid";
-        public static final String INTAKE_PARTITION = "intake-partition";
-        public static final String INTAKE_TIMESTAMP = "intake-timestamp";
-        public static final String COMPUTE_TIMESTAMP = "compute-timestamp";
-        public static final String STORE_TIMESTAMP = "store-timestamp";
-
-    }
-
-    public static final class MessageConstants {
-        public static final String MESSAGE_TYPE = "message-type";
-        public static final String NODE_ID = "nodeId";
-        public static final String DATAVERSE = "dataverse";
-        public static final String FEED = "feed";
-        public static final String DATASET = "dataset";
-        public static final String AQL = "aql";
-        public static final String RUNTIME_TYPE = "runtime-type";
-        public static final String PARTITION = "partition";
-        public static final String INTAKE_PARTITION = "intake-partition";
-        public static final String INFLOW_RATE = "inflow-rate";
-        public static final String OUTFLOW_RATE = "outflow-rate";
-        public static final String MODE = "mode";
-        public static final String CURRENT_CARDINALITY = "current-cardinality";
-        public static final String REDUCED_CARDINALITY = "reduced-cardinality";
-        public static final String VALUE_TYPE = "value-type";
-        public static final String VALUE = "value";
-        public static final String CPU_LOAD = "cpu-load";
-        public static final String N_RUNTIMES = "n_runtimes";
-        public static final String HEAP_USAGE = "heap_usage";
-        public static final String OPERAND_ID = "operand-id";
-        public static final String COMPUTE_PARTITION_RETAIN_LIMIT = "compute-partition-retain-limit";
-        public static final String LAST_PERSISTED_TUPLE_INTAKE_TIMESTAMP = "last-persisted-tuple-intake_timestamp";
-        public static final String PERSISTENCE_DELAY_WITHIN_LIMIT = "persistence-delay-within-limit";
-        public static final String AVERAGE_PERSISTENCE_DELAY = "average-persistence-delay";
-        public static final String COMMIT_ACKS = "commit-acks";
-        public static final String MAX_WINDOW_ACKED = "max-window-acked";
-        public static final String BASE = "base";
-        public static final String NOT_APPLICABLE = "N/A";
-        
-    }
-
-    public static final class NamingConstants {
-        public static final String LIBRARY_NAME_SEPARATOR = "#";
-    }
-
-    public static class JobConstants {
-        public static final int DEFAULT_FRAME_SIZE = 8192;
-    }
-}