You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@asterixdb.apache.org by am...@apache.org on 2016/03/16 00:36:17 UTC

[01/19] incubator-asterixdb git commit: Support Change Feeds and Ingestion of Records with MetaData

Repository: incubator-asterixdb
Updated Branches:
  refs/heads/master 205e4900e -> d3338f665


http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-installer/src/test/resources/integrationts/lifecycle/results/asterix-lifecycle/backupRestore/backupRestore.1.adm
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/resources/integrationts/lifecycle/results/asterix-lifecycle/backupRestore/backupRestore.1.adm b/asterix-installer/src/test/resources/integrationts/lifecycle/results/asterix-lifecycle/backupRestore/backupRestore.1.adm
index f28e810..7523328 100644
--- a/asterix-installer/src/test/resources/integrationts/lifecycle/results/asterix-lifecycle/backupRestore/backupRestore.1.adm
+++ b/asterix-installer/src/test/resources/integrationts/lifecycle/results/asterix-lifecycle/backupRestore/backupRestore.1.adm
@@ -1 +1 @@
-{ "DataverseName": "backupDataverse", "DataFormat": "org.apache.asterix.runtime.formats.NonTaggedDataFormat", "Timestamp": "Wed Apr 24 16:13:46 PDT 2013", "PendingOp": 0 }
+{ "DataverseName": "backupDataverse", "DataFormat": "org.apache.asterix.runtime.formats.NonTaggedDataFormat", "Timestamp": "Wed Apr 24 16:13:46 PDT 2013", "PendingOp": 0i32 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-installer/src/test/resources/integrationts/replication/queries/failback/node_failback/node_failback.2.update.aql
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/resources/integrationts/replication/queries/failback/node_failback/node_failback.2.update.aql b/asterix-installer/src/test/resources/integrationts/replication/queries/failback/node_failback/node_failback.2.update.aql
index 47f5975..c0b4919 100644
--- a/asterix-installer/src/test/resources/integrationts/replication/queries/failback/node_failback/node_failback.2.update.aql
+++ b/asterix-installer/src/test/resources/integrationts/replication/queries/failback/node_failback/node_failback.2.update.aql
@@ -30,6 +30,7 @@
 use dataverse TinySocial;
 
 load dataset FacebookUsers using localfs
-(("path"="asterix_nc1:///vagrant/data/fbu.adm"),("format"="adm"));
+(("path"="asterix_nc1:///vagrant/data/fbu.adm"),
+("format"="adm"));
 
 insert into dataset TinySocial.FacebookUsersInMemory(for $x in dataset TinySocial.FacebookUsers return $x);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/statement/SubscribeFeedStatement.java
----------------------------------------------------------------------
diff --git a/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/statement/SubscribeFeedStatement.java b/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/statement/SubscribeFeedStatement.java
index 3f85ba9..ae657d4 100644
--- a/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/statement/SubscribeFeedStatement.java
+++ b/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/statement/SubscribeFeedStatement.java
@@ -19,18 +19,19 @@
 package org.apache.asterix.lang.aql.statement;
 
 import java.io.StringReader;
+import java.rmi.RemoteException;
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
+import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.functions.FunctionSignature;
-import org.apache.asterix.external.api.IAdapterFactory;
-import org.apache.asterix.external.api.IDataSourceAdapter;
 import org.apache.asterix.external.feed.management.FeedConnectionRequest;
 import org.apache.asterix.external.feed.management.FeedId;
 import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
 import org.apache.asterix.external.feed.watch.FeedActivity;
+import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.lang.aql.parser.AQLParserFactory;
 import org.apache.asterix.lang.common.base.IParser;
 import org.apache.asterix.lang.common.base.IParserFactory;
@@ -45,9 +46,7 @@ import org.apache.asterix.metadata.MetadataTransactionContext;
 import org.apache.asterix.metadata.entities.Feed;
 import org.apache.asterix.metadata.entities.Function;
 import org.apache.asterix.metadata.feeds.FeedMetadataUtil;
-import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.algebricks.common.utils.Triple;
 
 /**
  * Represents the AQL statement for subscribing to a feed.
@@ -58,7 +57,7 @@ public class SubscribeFeedStatement implements Statement {
     private static final Logger LOGGER = Logger.getLogger(SubscribeFeedStatement.class.getName());
     private final FeedConnectionRequest connectionRequest;
     private Query query;
-    private int varCounter;
+    private final int varCounter;
     private final String[] locations;
 
     public static final String WAIT_FOR_COMPLETION = "wait-for-completion-feed";
@@ -106,7 +105,7 @@ public class SubscribeFeedStatement implements Statement {
                 + connectionRequest.getTargetDataset() + "'" + "," + "'" + feedOutputType + "'" + ")");
 
         List<String> functionsToApply = connectionRequest.getFunctionsToApply();
-        if (functionsToApply != null && functionsToApply.isEmpty()) {
+        if ((functionsToApply != null) && functionsToApply.isEmpty()) {
             builder.append(" return $x");
         } else {
             String rValueName = "x";
@@ -186,10 +185,9 @@ public class SubscribeFeedStatement implements Statement {
         try {
             switch (feed.getFeedType()) {
                 case PRIMARY:
-                    Triple<IAdapterFactory, ARecordType, IDataSourceAdapter.AdapterType> factoryOutput = null;
-
-                    factoryOutput = FeedMetadataUtil.getPrimaryFeedFactoryAndOutput(feed, policyAccessor, mdTxnCtx);
-                    outputType = factoryOutput.second.getTypeName();
+                    outputType = FeedMetadataUtil
+                            .getOutputType(feed, feed.getAdapterConfiguration(), ExternalDataConstants.KEY_TYPE_NAME)
+                            .getTypeName();
                     break;
                 case SECONDARY:
                     outputType = FeedMetadataUtil.getSecondaryFeedOutput(feed, policyAccessor, mdTxnCtx);
@@ -197,7 +195,7 @@ public class SubscribeFeedStatement implements Statement {
             }
             return outputType;
 
-        } catch (AlgebricksException ae) {
+        } catch (AlgebricksException | RemoteException | ACIDException ae) {
             throw new MetadataException(ae);
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlDataSource.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlDataSource.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlDataSource.java
index e2605ec..a5347fc 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlDataSource.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlDataSource.java
@@ -46,13 +46,13 @@ import org.apache.hyracks.algebricks.core.algebra.properties.UnorderedPartitione
 
 public abstract class AqlDataSource implements IDataSource<AqlSourceId> {
 
-    private final AqlSourceId id;
-    private final IAType itemType;
-    private final IAType metaItemType;
-    private final AqlDataSourceType datasourceType;
+    protected final AqlSourceId id;
+    protected final IAType itemType;
+    protected final IAType metaItemType;
+    protected final AqlDataSourceType datasourceType;
     protected IAType[] schemaTypes;
     protected INodeDomain domain;
-    private Map<String, Serializable> properties = new HashMap<>();
+    protected Map<String, Serializable> properties = new HashMap<>();
 
     public enum AqlDataSourceType {
         INTERNAL_DATASET,
@@ -142,7 +142,7 @@ public abstract class AqlDataSource implements IDataSource<AqlSourceId> {
                         for (LogicalVariable v : scanVariables) {
                             pvars.add(v);
                             ++i;
-                            if (i >= n - 1) {
+                            if (i >= (n - 1)) {
                                 break;
                             }
                         }
@@ -162,7 +162,7 @@ public abstract class AqlDataSource implements IDataSource<AqlSourceId> {
                         for (LogicalVariable v : scanVariables) {
                             pvars.add(v);
                             ++i;
-                            if (i >= n - 1) {
+                            if (i >= (n - 1)) {
                                 break;
                             }
                         }
@@ -170,7 +170,7 @@ public abstract class AqlDataSource implements IDataSource<AqlSourceId> {
                     }
                     propsLocal = new ArrayList<ILocalStructuralProperty>();
                     List<OrderColumn> orderColumns = new ArrayList<OrderColumn>();
-                    for (int i = 0; i < n - 1; i++) {
+                    for (int i = 0; i < (n - 1); i++) {
                         orderColumns.add(new OrderColumn(scanVariables.get(i), OrderKind.ASC));
                     }
                     propsLocal.add(new LocalOrderProperty(orderColumns));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
index e0084f8..650dc21 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
@@ -71,6 +71,7 @@ import org.apache.asterix.external.util.FeedConstants;
 import org.apache.asterix.formats.base.IDataFormat;
 import org.apache.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
 import org.apache.asterix.formats.nontagged.AqlLinearizeComparatorFactoryProvider;
+import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.formats.nontagged.AqlTypeTraitProvider;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.MetadataManager;
@@ -180,6 +181,7 @@ import org.apache.hyracks.storage.am.rtree.dataflow.RTreeSearchOperatorDescripto
 import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
 
 public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, String> {
+
     private static Logger LOGGER = Logger.getLogger(AqlMetadataProvider.class.getName());
     private MetadataTransactionContext mdTxnCtx;
     private boolean isWriteTransaction;
@@ -334,7 +336,7 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
         try {
             switch (((AqlDataSource) dataSource).getDatasourceType()) {
                 case FEED:
-                    return buildFeedCollectRuntime(jobSpec, dataSource);
+                    return buildFeedCollectRuntime(jobSpec, (FeedDataSource) dataSource);
                 case INTERNAL_DATASET: {
                     // querying an internal dataset
                     return buildInternalDatasetScan(jobSpec, scanVariables, minFilterVars, maxFilterVars, opSchema,
@@ -349,7 +351,7 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
 
                     ExternalDatasetDetails edd = (ExternalDatasetDetails) dataset.getDatasetDetails();
                     IAdapterFactory adapterFactory = getConfiguredAdapterFactory(dataset, edd.getAdapter(),
-                            edd.getProperties(), itemType, false, null);
+                            edd.getProperties(), (ARecordType) itemType, false, null, null);
                     return buildExternalDatasetDataScannerRuntime(jobSpec, itemType, adapterFactory,
                             NonTaggedDataFormat.INSTANCE);
                 }
@@ -363,7 +365,7 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
                     int pkIndex = 0;
                     IAdapterFactory adapterFactory = getConfiguredAdapterFactory(alds.getTargetDataset(),
                             alds.getAdapter(), alds.getAdapterProperties(), itemType, isPKAutoGenerated,
-                            partitioningKeys);
+                            partitioningKeys, null);
                     RecordDescriptor rDesc = JobGenHelper.mkRecordDescriptor(typeEnv, opSchema, context);
                     return buildLoadableDatasetScan(jobSpec, alds, adapterFactory, rDesc, isPKAutoGenerated,
                             partitioningKeys, itemType, pkIndex);
@@ -380,18 +382,27 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
 
     @SuppressWarnings("rawtypes")
     public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildFeedCollectRuntime(JobSpecification jobSpec,
-            IDataSource<AqlSourceId> dataSource) throws AlgebricksException {
-
-        FeedDataSource feedDataSource = (FeedDataSource) dataSource;
-        FeedCollectOperatorDescriptor feedCollector = null;
+            FeedDataSource feedDataSource) throws AlgebricksException {
 
         try {
             ARecordType feedOutputType = (ARecordType) feedDataSource.getItemType();
             ISerializerDeserializer payloadSerde = NonTaggedDataFormat.INSTANCE.getSerdeProvider()
                     .getSerializerDeserializer(feedOutputType);
-            RecordDescriptor feedDesc = new RecordDescriptor(new ISerializerDeserializer[] { payloadSerde });
-
-            FeedPolicyEntity feedPolicy = (FeedPolicyEntity) ((AqlDataSource) dataSource).getProperties()
+            IAType metaType = feedDataSource.getMetaItemType();
+            List<IAType> pkTypes = feedDataSource.getPkTypes();
+            ArrayList<ISerializerDeserializer> serdes = new ArrayList<>();
+            serdes.add(payloadSerde);
+            if (metaType != null) {
+                serdes.add(AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(metaType));
+            }
+            if (pkTypes != null) {
+                for (IAType type : pkTypes) {
+                    serdes.add(AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(type));
+                }
+            }
+            RecordDescriptor feedDesc = new RecordDescriptor(
+                    serdes.toArray(new ISerializerDeserializer[serdes.size()]));
+            FeedPolicyEntity feedPolicy = (FeedPolicyEntity) feedDataSource.getProperties()
                     .get(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY);
             if (feedPolicy == null) {
                 throw new AlgebricksException("Feed not configured with a policy");
@@ -399,7 +410,7 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
             feedPolicy.getProperties().put(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY, feedPolicy.getPolicyName());
             FeedConnectionId feedConnectionId = new FeedConnectionId(feedDataSource.getId().getDataverseName(),
                     feedDataSource.getId().getDatasourceName(), feedDataSource.getTargetDataset());
-            feedCollector = new FeedCollectOperatorDescriptor(jobSpec, feedConnectionId,
+            FeedCollectOperatorDescriptor feedCollector = new FeedCollectOperatorDescriptor(jobSpec, feedConnectionId,
                     feedDataSource.getSourceFeedId(), feedOutputType, feedDesc, feedPolicy.getProperties(),
                     feedDataSource.getLocation());
 
@@ -542,12 +553,12 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
     }
 
     private IAdapterFactory getConfiguredAdapterFactory(Dataset dataset, String adapterName,
-            Map<String, String> configuration, IAType itemType, boolean isPKAutoGenerated,
-            List<List<String>> primaryKeys) throws AlgebricksException {
+            Map<String, String> configuration, ARecordType itemType, boolean isPKAutoGenerated,
+            List<List<String>> primaryKeys, ARecordType metaType) throws AlgebricksException {
         try {
             configuration.put(ExternalDataConstants.KEY_DATAVERSE, dataset.getDataverseName());
             IAdapterFactory adapterFactory = AdapterFactoryProvider.getAdapterFactory(adapterName, configuration,
-                    (ARecordType) itemType);
+                    itemType, metaType);
 
             // check to see if dataset is indexed
             Index filesIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
@@ -599,20 +610,22 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
 
     public Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, IAdapterFactory> buildFeedIntakeRuntime(
             JobSpecification jobSpec, Feed primaryFeed, FeedPolicyAccessor policyAccessor) throws Exception {
-        Triple<IAdapterFactory, ARecordType, IDataSourceAdapter.AdapterType> factoryOutput = null;
+        Triple<IAdapterFactory, RecordDescriptor, IDataSourceAdapter.AdapterType> factoryOutput = null;
         factoryOutput = FeedMetadataUtil.getPrimaryFeedFactoryAndOutput(primaryFeed, policyAccessor, mdTxnCtx);
+        ARecordType recordType = FeedMetadataUtil.getOutputType(primaryFeed, primaryFeed.getAdapterConfiguration(),
+                ExternalDataConstants.KEY_TYPE_NAME);
         IAdapterFactory adapterFactory = factoryOutput.first;
         FeedIntakeOperatorDescriptor feedIngestor = null;
         switch (factoryOutput.third) {
             case INTERNAL:
-                feedIngestor = new FeedIntakeOperatorDescriptor(jobSpec, primaryFeed, adapterFactory,
-                        factoryOutput.second, policyAccessor);
+                feedIngestor = new FeedIntakeOperatorDescriptor(jobSpec, primaryFeed, adapterFactory, recordType,
+                        policyAccessor, factoryOutput.second);
                 break;
             case EXTERNAL:
                 String libraryName = primaryFeed.getAdapterName().trim()
                         .split(FeedConstants.NamingConstants.LIBRARY_NAME_SEPARATOR)[0];
                 feedIngestor = new FeedIntakeOperatorDescriptor(jobSpec, primaryFeed, libraryName,
-                        adapterFactory.getClass().getName(), factoryOutput.second, policyAccessor);
+                        adapterFactory.getClass().getName(), recordType, policyAccessor, factoryOutput.second);
                 break;
         }
 
@@ -633,7 +646,7 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
             boolean temp = dataset.getDatasetDetails().isTemp();
             Index primaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
                     dataset.getDatasetName(), dataset.getDatasetName());
-            if (primaryIndex != null && dataset.getDatasetType() != DatasetType.EXTERNAL) {
+            if (primaryIndex != null && (dataset.getDatasetType() != DatasetType.EXTERNAL)) {
                 isSecondary = !indexName.equals(primaryIndex.getIndexName());
             }
             int numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
@@ -1095,8 +1108,8 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
     public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getInsertOrDeleteRuntime(IndexOperation indexOp,
             IDataSource<AqlSourceId> dataSource, IOperatorSchema propagatedSchema, IVariableTypeEnvironment typeEnv,
             List<LogicalVariable> keys, LogicalVariable payload, List<LogicalVariable> additionalNonKeyFields,
-            RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec, boolean bulkload)
-                    throws AlgebricksException {
+            RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec, boolean bulkload,
+            List<LogicalVariable> additionalNonFilteringFields) throws AlgebricksException {
 
         String datasetName = dataSource.getId().getDatasourceName();
         Dataset dataset = findDataset(dataSource.getId().getDataverseName(), datasetName);
@@ -1110,7 +1123,8 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
         int numKeys = keys.size();
         int numFilterFields = DatasetUtils.getFilterField(dataset) == null ? 0 : 1;
         // Move key fields to front.
-        int[] fieldPermutation = new int[numKeys + 1 + numFilterFields];
+        int[] fieldPermutation = new int[numKeys + 1 + numFilterFields
+                + (additionalNonFilteringFields == null ? 0 : additionalNonFilteringFields.size())];
         int[] bloomFilterKeyFields = new int[numKeys];
         int i = 0;
         for (LogicalVariable varKey : keys) {
@@ -1119,10 +1133,16 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
             bloomFilterKeyFields[i] = i;
             i++;
         }
-        fieldPermutation[numKeys] = propagatedSchema.findVariable(payload);
+        fieldPermutation[i++] = propagatedSchema.findVariable(payload);
         if (numFilterFields > 0) {
             int idx = propagatedSchema.findVariable(additionalNonKeyFields.get(0));
-            fieldPermutation[numKeys + 1] = idx;
+            fieldPermutation[i++] = idx;
+        }
+        if (additionalNonFilteringFields != null) {
+            for (LogicalVariable variable : additionalNonFilteringFields) {
+                int idx = propagatedSchema.findVariable(variable);
+                fieldPermutation[i++] = idx;
+            }
         }
 
         try {
@@ -1196,10 +1216,10 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
     public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getInsertRuntime(
             IDataSource<AqlSourceId> dataSource, IOperatorSchema propagatedSchema, IVariableTypeEnvironment typeEnv,
             List<LogicalVariable> keys, LogicalVariable payload, List<LogicalVariable> additionalNonKeyFields,
-            RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec, boolean bulkload)
-                    throws AlgebricksException {
+            List<LogicalVariable> additionalNonFilteringFields, RecordDescriptor recordDesc, JobGenContext context,
+            JobSpecification spec, boolean bulkload) throws AlgebricksException {
         return getInsertOrDeleteRuntime(IndexOperation.INSERT, dataSource, propagatedSchema, typeEnv, keys, payload,
-                additionalNonKeyFields, recordDesc, context, spec, bulkload);
+                additionalNonKeyFields, recordDesc, context, spec, bulkload, additionalNonFilteringFields);
     }
 
     @Override
@@ -1208,7 +1228,7 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
             List<LogicalVariable> keys, LogicalVariable payload, List<LogicalVariable> additionalNonKeyFields,
             RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec) throws AlgebricksException {
         return getInsertOrDeleteRuntime(IndexOperation.DELETE, dataSource, propagatedSchema, typeEnv, keys, payload,
-                additionalNonKeyFields, recordDesc, context, spec, false);
+                additionalNonKeyFields, recordDesc, context, spec, false, null);
     }
 
     public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexInsertOrDeleteRuntime(
@@ -1503,7 +1523,7 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
             return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(tokenizerOp,
                     splitsAndConstraint.second);
 
-        } catch (MetadataException e) {
+        } catch (Exception e) {
             throw new AlgebricksException(e);
         }
     }
@@ -1671,7 +1691,7 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
                         NoOpOperationCallbackFactory.INSTANCE);
             }
             return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
-        } catch (MetadataException e) {
+        } catch (Exception e) {
             throw new AlgebricksException(e);
         }
     }
@@ -1766,9 +1786,9 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
 
             // SecondaryKeys.size() can be two if it comes from the bulkload.
             // In this case, [token, number of token] are the secondaryKeys.
-            if (!isPartitioned || secondaryKeys.size() > 1) {
+            if (!isPartitioned || (secondaryKeys.size() > 1)) {
                 numTokenFields = secondaryKeys.size();
-            } else if (isPartitioned && secondaryKeys.size() == 1) {
+            } else if (isPartitioned && (secondaryKeys.size() == 1)) {
                 numTokenFields = secondaryKeys.size() + 1;
             }
 
@@ -1881,7 +1901,7 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
                         indexDataFlowFactory, filterFactory, modificationCallbackFactory, indexName);
             }
             return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
-        } catch (MetadataException e) {
+        } catch (Exception e) {
             throw new AlgebricksException(e);
         }
     }
@@ -2193,7 +2213,7 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
             // Create the adapter factory <- right now there is only one. if there are more in the future, we can create
             // a map->
             ExternalDatasetDetails datasetDetails = (ExternalDatasetDetails) dataset.getDatasetDetails();
-            LookupAdapterFactory<?> adapterFactory = AdapterFactoryProvider.getAdapterFactory(
+            LookupAdapterFactory<?> adapterFactory = AdapterFactoryProvider.getLookupAdapterFactory(
                     datasetDetails.getProperties(), (ARecordType) itemType, ridIndexes, retainInput, retainNull,
                     context.getNullWriterFactory());
 
@@ -2236,13 +2256,12 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
         }
     }
 
-    //TODO: refactor this method
     @Override
     public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getUpsertRuntime(
             IDataSource<AqlSourceId> dataSource, IOperatorSchema propagatedSchema, IVariableTypeEnvironment typeEnv,
             List<LogicalVariable> primaryKeys, LogicalVariable payload, List<LogicalVariable> filterKeys,
-            LogicalVariable prevPayload, RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec)
-                    throws AlgebricksException {
+            List<LogicalVariable> additionalNonFilterFields, RecordDescriptor recordDesc, JobGenContext context,
+            JobSpecification spec) throws AlgebricksException {
         String datasetName = dataSource.getId().getDatasourceName();
         Dataset dataset = findDataset(dataSource.getId().getDataverseName(), datasetName);
         if (dataset == null) {
@@ -2254,8 +2273,9 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
 
         int numKeys = primaryKeys.size();
         int numFilterFields = DatasetUtils.getFilterField(dataset) == null ? 0 : 1;
+        int numOfAdditionalFields = additionalNonFilterFields == null ? 0 : additionalNonFilterFields.size();
         // Move key fields to front. {keys, record, filters}
-        int[] fieldPermutation = new int[numKeys + 1 + numFilterFields];
+        int[] fieldPermutation = new int[numKeys + 1 + numFilterFields + numOfAdditionalFields];
         int[] bloomFilterKeyFields = new int[numKeys];
         int i = 0;
         // set the keys' permutations
@@ -2266,11 +2286,18 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
             i++;
         }
         // set the record permutation
-        fieldPermutation[numKeys] = propagatedSchema.findVariable(payload);
+        fieldPermutation[i++] = propagatedSchema.findVariable(payload);
         // set the filters' permutations.
         if (numFilterFields > 0) {
             int idx = propagatedSchema.findVariable(filterKeys.get(0));
-            fieldPermutation[numKeys + 1] = idx;
+            fieldPermutation[i++] = idx;
+        }
+
+        if (additionalNonFilterFields != null) {
+            for (LogicalVariable var : additionalNonFilterFields) {
+                int idx = propagatedSchema.findVariable(var);
+                fieldPermutation[i++] = idx;
+            }
         }
 
         try {
@@ -2534,9 +2561,9 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
 
             // SecondaryKeys.size() can be two if it comes from the bulkload.
             // In this case, [token, number of token] are the secondaryKeys.
-            if (!isPartitioned || secondaryKeys.size() > 1) {
+            if (!isPartitioned || (secondaryKeys.size() > 1)) {
                 numTokenFields = secondaryKeys.size();
-            } else if (isPartitioned && secondaryKeys.size() == 1) {
+            } else if (isPartitioned && (secondaryKeys.size() == 1)) {
                 numTokenFields = secondaryKeys.size() + 1;
             }
 
@@ -2641,7 +2668,7 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
                     indexDataFlowFactory, filterFactory, modificationCallbackFactory, indexName, prevFieldPermutation);
 
             return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
-        } catch (MetadataException e) {
+        } catch (Exception e) {
             throw new AlgebricksException(e);
         }
     }
@@ -2944,7 +2971,7 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
                     idfh, filterFactory, false, indexName, null, modificationCallbackFactory,
                     NoOpOperationCallbackFactory.INSTANCE, prevFieldPermutation);
             return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
-        } catch (MetadataException e) {
+        } catch (Exception e) {
             throw new AlgebricksException(e);
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/FeedDataSource.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/FeedDataSource.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/FeedDataSource.java
index 46e3007..21e5729 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/FeedDataSource.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/FeedDataSource.java
@@ -18,57 +18,46 @@
  */
 package org.apache.asterix.metadata.declared;
 
+import java.util.List;
+
 import org.apache.asterix.external.feed.api.IFeed;
 import org.apache.asterix.external.feed.api.IFeedLifecycleListener.ConnectionLocation;
 import org.apache.asterix.external.feed.management.FeedId;
-import org.apache.asterix.metadata.MetadataManager;
-import org.apache.asterix.metadata.MetadataTransactionContext;
 import org.apache.asterix.metadata.entities.Feed;
 import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.om.util.AsterixClusterProperties;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
 import org.apache.hyracks.algebricks.core.algebra.properties.INodeDomain;
 
 public class FeedDataSource extends AqlDataSource {
 
-    private Feed feed;
+    private final Feed feed;
     private final FeedId sourceFeedId;
     private final IFeed.FeedType sourceFeedType;
     private final ConnectionLocation location;
     private final String targetDataset;
     private final String[] locations;
     private final int computeCardinality;
+    private final List<IAType> pkTypes;
+    private final List<ScalarFunctionCallExpression> keyAccessExpression;
 
-    public FeedDataSource(AqlSourceId id, String targetDataset, IAType itemType, IAType metaItemType,
-            AqlDataSourceType dataSourceType, FeedId sourceFeedId, IFeed.FeedType sourceFeedType,
+    public FeedDataSource(Feed feed, AqlSourceId id, String targetDataset, IAType itemType, IAType metaType,
+            List<IAType> pkTypes, List<List<String>> partitioningKeys,
+            List<ScalarFunctionCallExpression> keyAccessExpression, FeedId sourceFeedId, IFeed.FeedType sourceFeedType,
             ConnectionLocation location, String[] locations) throws AlgebricksException {
-        super(id, itemType, metaItemType, dataSourceType);
+        super(id, itemType, metaType, AqlDataSourceType.FEED);
+        this.feed = feed;
         this.targetDataset = targetDataset;
         this.sourceFeedId = sourceFeedId;
         this.sourceFeedType = sourceFeedType;
         this.location = location;
         this.locations = locations;
+        this.pkTypes = pkTypes;
+        this.keyAccessExpression = keyAccessExpression;
         this.computeCardinality = AsterixClusterProperties.INSTANCE.getParticipantNodes().size();
-        MetadataTransactionContext ctx = null;
-        try {
-            MetadataManager.INSTANCE.acquireReadLatch();
-            ctx = MetadataManager.INSTANCE.beginTransaction();
-            this.feed = MetadataManager.INSTANCE.getFeed(ctx, id.getDataverseName(), id.getDatasourceName());
-            MetadataManager.INSTANCE.commitTransaction(ctx);
-            initFeedDataSource(itemType);
-        } catch (Exception e) {
-            if (ctx != null) {
-                try {
-                    MetadataManager.INSTANCE.abortTransaction(ctx);
-                } catch (Exception e2) {
-                    e2.addSuppressed(e);
-                    throw new IllegalStateException("Unable to abort " + e2.getMessage());
-                }
-            }
-
-        } finally {
-            MetadataManager.INSTANCE.releaseReadLatch();
-        }
+        initFeedDataSource();
     }
 
     public Feed getFeed() {
@@ -96,9 +85,19 @@ public class FeedDataSource extends AqlDataSource {
         return locations;
     }
 
-    private void initFeedDataSource(IAType itemType) {
-        schemaTypes = new IAType[1];
-        schemaTypes[0] = itemType;
+    private void initFeedDataSource() {
+        int i = 0;
+        // record + meta (if exists) + PKs (if exists)
+        schemaTypes = new IAType[(1 + (metaItemType != null ? 1 : 0) + (pkTypes != null ? pkTypes.size() : 0))];
+        schemaTypes[i++] = itemType;
+        if (metaItemType != null) {
+            schemaTypes[i++] = metaItemType;
+        }
+        if (pkTypes != null) {
+            for (IAType type : pkTypes) {
+                schemaTypes[i++] = type;
+            }
+        }
         INodeDomain domainForExternalData = new INodeDomain() {
             @Override
             public Integer cardinality() {
@@ -120,4 +119,37 @@ public class FeedDataSource extends AqlDataSource {
     public int getComputeCardinality() {
         return computeCardinality;
     }
+
+    public List<IAType> getPkTypes() {
+        return pkTypes;
+    }
+
+    public List<ScalarFunctionCallExpression> getKeyAccessExpression() {
+        return keyAccessExpression;
+    }
+
+    @Override
+    public LogicalVariable getMetaVariable(List<LogicalVariable> dataScanVariables) {
+        return metaItemType == null ? null : dataScanVariables.get(1);
+    }
+
+    @Override
+    public LogicalVariable getDataRecordVariable(List<LogicalVariable> dataScanVariables) {
+        return dataScanVariables.get(0);
+    }
+
+    public boolean isChange() {
+        return pkTypes != null;
+    }
+
+    public List<LogicalVariable> getPkVars(List<LogicalVariable> allVars) {
+        if (pkTypes == null) {
+            return null;
+        }
+        if (metaItemType != null) {
+            return allVars.subList(2, allVars.size());
+        } else {
+            return allVars.subList(1, allVars.size());
+        }
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
index 1f815c0..78c6587 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
@@ -18,6 +18,7 @@
  */
 package org.apache.asterix.metadata.feeds;
 
+import java.rmi.RemoteException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;
@@ -32,6 +33,7 @@ import java.util.logging.Logger;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.config.MetadataConstants;
 import org.apache.asterix.common.dataflow.AsterixLSMTreeInsertDeleteOperatorDescriptor;
+import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.functions.FunctionSignature;
 import org.apache.asterix.external.api.IAdapterFactory;
@@ -45,12 +47,13 @@ import org.apache.asterix.external.library.ExternalLibraryManager;
 import org.apache.asterix.external.operators.FeedCollectOperatorDescriptor;
 import org.apache.asterix.external.operators.FeedMetaOperatorDescriptor;
 import org.apache.asterix.external.provider.AdapterFactoryProvider;
-import org.apache.asterix.external.util.ExternalDataCompatibilityUtils;
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.external.util.ExternalDataUtils;
+import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
+import org.apache.asterix.metadata.declared.AqlMetadataProvider;
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.entities.DatasourceAdapter;
 import org.apache.asterix.metadata.entities.Datatype;
@@ -79,7 +82,9 @@ import org.apache.hyracks.api.dataflow.ConnectorDescriptorId;
 import org.apache.hyracks.api.dataflow.IConnectorDescriptor;
 import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
 import org.apache.hyracks.api.dataflow.OperatorDescriptorId;
+import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.dataflow.value.ITuplePartitionComputerFactory;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.dataflow.common.data.partition.RandomPartitionComputerFactory;
 import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
@@ -92,7 +97,7 @@ import org.apache.hyracks.dataflow.std.connectors.MToNPartitioningWithMessageCon
  */
 public class FeedMetadataUtil {
 
-    private static Logger LOGGER = Logger.getLogger(FeedMetadataUtil.class.getName());
+    private static final Logger LOGGER = Logger.getLogger(FeedMetadataUtil.class.getName());
 
     private static class LocationConstraint {
         int partition;
@@ -158,7 +163,7 @@ public class FeedMetadataUtil {
                         orig.getFeedConnectionId(), orig.getSourceFeedId(), (ARecordType) orig.getOutputType(),
                         orig.getRecordDescriptor(), orig.getFeedPolicyProperties(), orig.getSubscriptionLocation());
                 oldNewOID.put(opDesc.getOperatorId(), fiop.getOperatorId());
-            } else if (opDesc instanceof AsterixLSMTreeInsertDeleteOperatorDescriptor
+            } else if ((opDesc instanceof AsterixLSMTreeInsertDeleteOperatorDescriptor)
                     && ((AsterixLSMTreeInsertDeleteOperatorDescriptor) opDesc).isPrimary()) {
                 // only introduce store before primary index
                 operandId = ((AsterixLSMTreeInsertDeleteOperatorDescriptor) opDesc).getIndexName();
@@ -343,7 +348,7 @@ public class FeedMetadataUtil {
             sourceOp = entry.getValue().getKey().getKey();
             if (sourceOp instanceof FeedCollectOperatorDescriptor) {
                 targetOp = entry.getValue().getValue().getKey();
-                if (targetOp instanceof FeedMetaOperatorDescriptor
+                if ((targetOp instanceof FeedMetaOperatorDescriptor)
                         && (((FeedMetaOperatorDescriptor) targetOp).getRuntimeType().equals(FeedRuntimeType.COMPUTE))) {
                     connDesc = connectors.get(cid);
                     break;
@@ -458,7 +463,8 @@ public class FeedMetadataUtil {
         return preProcessingRequired;
     }
 
-    public static Triple<IAdapterFactory, ARecordType, IDataSourceAdapter.AdapterType> getPrimaryFeedFactoryAndOutput(
+    @SuppressWarnings("rawtypes")
+    public static Triple<IAdapterFactory, RecordDescriptor, IDataSourceAdapter.AdapterType> getPrimaryFeedFactoryAndOutput(
             Feed feed, FeedPolicyAccessor policyAccessor, MetadataTransactionContext mdTxnCtx)
                     throws AlgebricksException {
         // This method needs to be re-visited
@@ -467,13 +473,15 @@ public class FeedMetadataUtil {
         String adapterFactoryClassname = null;
         IAdapterFactory adapterFactory = null;
         ARecordType adapterOutputType = null;
-        Triple<IAdapterFactory, ARecordType, IDataSourceAdapter.AdapterType> feedProps = null;
+        ARecordType metaType = null;
+        Triple<IAdapterFactory, RecordDescriptor, IDataSourceAdapter.AdapterType> feedProps = null;
         IDataSourceAdapter.AdapterType adapterType = null;
         try {
             adapterName = feed.getAdapterName();
             Map<String, String> configuration = feed.getAdapterConfiguration();
             configuration.putAll(policyAccessor.getFeedPolicy());
-            adapterOutputType = getOutputType(feed, configuration);
+            adapterOutputType = getOutputType(feed, configuration, ExternalDataConstants.KEY_TYPE_NAME);
+            metaType = getOutputType(feed, configuration, ExternalDataConstants.KEY_META_TYPE_NAME);
             ExternalDataUtils.prepareFeed(configuration, feed.getDataverseName(), feed.getFeedName());
             // Get adapter from metadata dataset <Metadata dataverse>
             adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, MetadataConstants.METADATA_DATAVERSE_NAME,
@@ -482,8 +490,6 @@ public class FeedMetadataUtil {
             if (adapterEntity == null) {
                 adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, feed.getDataverseName(), adapterName);
             }
-
-            ExternalDataCompatibilityUtils.addCompatabilityParameters(adapterName, adapterOutputType, configuration);
             if (adapterEntity != null) {
                 adapterType = adapterEntity.getType();
                 adapterFactoryClassname = adapterEntity.getClassname();
@@ -499,26 +505,64 @@ public class FeedMetadataUtil {
                         adapterFactory = (IAdapterFactory) cl.loadClass(adapterFactoryClassname).newInstance();
                         break;
                 }
-                adapterFactory.configure(configuration, adapterOutputType);
+                adapterFactory.configure(configuration, adapterOutputType, metaType);
             } else {
-                adapterFactory = AdapterFactoryProvider.getAdapterFactory(adapterName, configuration,
-                        adapterOutputType);
+                adapterFactory = AdapterFactoryProvider.getAdapterFactory(adapterName, configuration, adapterOutputType,
+                        metaType);
                 adapterType = IDataSourceAdapter.AdapterType.INTERNAL;
             }
-            feedProps = new Triple<IAdapterFactory, ARecordType, IDataSourceAdapter.AdapterType>(adapterFactory,
-                    adapterOutputType, adapterType);
+            int numOfOutputs = 1;
+            if (metaType != null) {
+                numOfOutputs++;
+            }
+            if (ExternalDataUtils.isChangeFeed(configuration)) {
+                // get number of PKs
+                numOfOutputs += ExternalDataUtils.getNumberOfKeys(configuration);
+            }
+            ISerializerDeserializer[] serdes = new ISerializerDeserializer[numOfOutputs];
+            int i = 0;
+            serdes[i++] = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(adapterOutputType);
+            if (metaType != null) {
+                serdes[i++] = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(metaType);
+            }
+            if (ExternalDataUtils.isChangeFeed(configuration)) {
+                int[] pkIndexes = ExternalDataUtils.getPKIndexes(configuration);
+                if (metaType != null) {
+                    int[] pkIndicators = ExternalDataUtils.getPKSourceIndicators(configuration);
+                    for (int j = 0; j < pkIndexes.length; j++) {
+                        int aInt = pkIndexes[j];
+                        if (pkIndicators[j] == 0) {
+                            serdes[i++] = AqlSerializerDeserializerProvider.INSTANCE
+                                    .getSerializerDeserializer(adapterOutputType.getFieldTypes()[aInt]);
+                        } else if (pkIndicators[j] == 1) {
+                            serdes[i++] = AqlSerializerDeserializerProvider.INSTANCE
+                                    .getSerializerDeserializer(metaType.getFieldTypes()[aInt]);
+                        } else {
+                            throw new AlgebricksException("a key source indicator can only be 0 or 1");
+                        }
+                    }
+                } else {
+                    for (int aInt : pkIndexes) {
+                        serdes[i++] = AqlSerializerDeserializerProvider.INSTANCE
+                                .getSerializerDeserializer(adapterOutputType.getFieldTypes()[aInt]);
+                    }
+                }
+            }
+            feedProps = new Triple<IAdapterFactory, RecordDescriptor, IDataSourceAdapter.AdapterType>(adapterFactory,
+                    new RecordDescriptor(serdes), adapterType);
         } catch (Exception e) {
             throw new AlgebricksException("unable to create adapter", e);
         }
         return feedProps;
     }
 
-    public static ARecordType getOutputType(IFeed feed, Map<String, String> configuration) throws Exception {
+    public static ARecordType getOutputType(IFeed feed, Map<String, String> configuration, String key)
+            throws RemoteException, ACIDException, MetadataException {
         ARecordType outputType = null;
-        String fqOutputType = configuration.get(ExternalDataConstants.KEY_TYPE_NAME);
+        String fqOutputType = configuration.get(key);
 
         if (fqOutputType == null) {
-            throw new IllegalArgumentException("No output type specified");
+            return null;
         }
         String[] dataverseAndType = fqOutputType.split("[.]");
         String dataverseName;
@@ -530,9 +574,9 @@ public class FeedMetadataUtil {
         } else if (dataverseAndType.length == 2) {
             dataverseName = dataverseAndType[0];
             datatypeName = dataverseAndType[1];
-        } else
-            throw new IllegalArgumentException(
-                    "Invalid value for the parameter " + ExternalDataConstants.KEY_TYPE_NAME);
+        } else {
+            throw new IllegalArgumentException("Invalid value for the parameter " + key);
+        }
 
         MetadataTransactionContext ctx = null;
         MetadataManager.INSTANCE.acquireReadLatch();
@@ -545,11 +589,15 @@ public class FeedMetadataUtil {
             }
             outputType = (ARecordType) t.getDatatype();
             MetadataManager.INSTANCE.commitTransaction(ctx);
-        } catch (Exception e) {
+        } catch (ACIDException | RemoteException | MetadataException e) {
             if (ctx != null) {
-                MetadataManager.INSTANCE.abortTransaction(ctx);
+                try {
+                    MetadataManager.INSTANCE.abortTransaction(ctx);
+                } catch (ACIDException | RemoteException e2) {
+                    e.addSuppressed(e2);
+                }
+                throw e;
             }
-            throw e;
         } finally {
             MetadataManager.INSTANCE.releaseReadLatch();
         }
@@ -557,15 +605,15 @@ public class FeedMetadataUtil {
     }
 
     public static String getSecondaryFeedOutput(Feed feed, FeedPolicyAccessor policyAccessor,
-            MetadataTransactionContext mdTxnCtx) throws AlgebricksException, MetadataException {
+            MetadataTransactionContext mdTxnCtx)
+                    throws AlgebricksException, MetadataException, RemoteException, ACIDException {
         String outputType = null;
         String primaryFeedName = feed.getSourceFeedName();
         Feed primaryFeed = MetadataManager.INSTANCE.getFeed(mdTxnCtx, feed.getDataverseName(), primaryFeedName);
         FunctionSignature appliedFunction = primaryFeed.getAppliedFunction();
         if (appliedFunction == null) {
-            Triple<IAdapterFactory, ARecordType, IDataSourceAdapter.AdapterType> result = getPrimaryFeedFactoryAndOutput(
-                    primaryFeed, policyAccessor, mdTxnCtx);
-            outputType = result.second.getTypeName();
+            outputType = getOutputType(feed, feed.getAdapterConfiguration(), ExternalDataConstants.KEY_TYPE_NAME)
+                    .getDisplayName();
         } else {
             Function function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, appliedFunction);
             if (function != null) {
@@ -583,4 +631,10 @@ public class FeedMetadataUtil {
         return outputType;
     }
 
+    public static boolean isChangeFeed(AqlMetadataProvider mdProvider, String dataverse, String feedName)
+            throws AlgebricksException {
+        Feed feed = mdProvider.findFeed(dataverse, feedName);
+        return ExternalDataUtils.isChangeFeed(feed.getAdapterConfiguration());
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtils.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtils.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtils.java
index 4e8c34c..581d01c 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtils.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtils.java
@@ -107,15 +107,37 @@ public class DatasetUtils {
 
     public static ITypeTraits[] computeTupleTypeTraits(Dataset dataset, ARecordType itemType)
             throws AlgebricksException {
+        return computeTupleTypeTraits(dataset, itemType, null);
+    }
+
+    public static ITypeTraits[] computeTupleTypeTraits(Dataset dataset, ARecordType itemType, ARecordType metaItemType)
+            throws AlgebricksException {
         if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
             throw new AlgebricksException("not implemented");
         }
         List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
         int numKeys = partitioningKeys.size();
-        ITypeTraits[] typeTraits = new ITypeTraits[numKeys + 1];
-        for (int i = 0; i < numKeys; i++) {
-            IAType keyType = itemType.getSubFieldType(partitioningKeys.get(i));
-            typeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
+        ITypeTraits[] typeTraits;
+        if (metaItemType != null) {
+            typeTraits = new ITypeTraits[numKeys + 2];
+            List<Integer> indicator = ((InternalDatasetDetails) dataset.getDatasetDetails()).getKeySourceIndicator();
+            typeTraits[numKeys + 1] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(metaItemType);
+            for (int i = 0; i < numKeys; i++) {
+                IAType keyType;
+                if (indicator.get(i) == 0) {
+                    keyType = itemType.getSubFieldType(partitioningKeys.get(i));
+                } else {
+                    keyType = metaItemType.getSubFieldType(partitioningKeys.get(i));
+                }
+                typeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
+            }
+        } else {
+            typeTraits = new ITypeTraits[numKeys + 1];
+            for (int i = 0; i < numKeys; i++) {
+                IAType keyType;
+                keyType = itemType.getSubFieldType(partitioningKeys.get(i));
+                typeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
+            }
         }
         typeTraits[numKeys] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(itemType);
         return typeTraits;
@@ -202,7 +224,7 @@ public class DatasetUtils {
     public static int getPositionOfPartitioningKeyField(Dataset dataset, String fieldExpr) {
         List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
         for (int i = 0; i < partitioningKeys.size(); i++) {
-            if (partitioningKeys.get(i).size() == 1 && partitioningKeys.get(i).get(0).equals(fieldExpr)) {
+            if ((partitioningKeys.get(i).size() == 1) && partitioningKeys.get(i).get(0).equals(fieldExpr)) {
                 return i;
             }
         }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-om/src/main/java/org/apache/asterix/om/functions/AsterixBuiltinFunctions.java
----------------------------------------------------------------------
diff --git a/asterix-om/src/main/java/org/apache/asterix/om/functions/AsterixBuiltinFunctions.java b/asterix-om/src/main/java/org/apache/asterix/om/functions/AsterixBuiltinFunctions.java
index 9f8d5d7..d604f35 100644
--- a/asterix-om/src/main/java/org/apache/asterix/om/functions/AsterixBuiltinFunctions.java
+++ b/asterix-om/src/main/java/org/apache/asterix/om/functions/AsterixBuiltinFunctions.java
@@ -726,6 +726,8 @@ public class AsterixBuiltinFunctions {
 
     public static final FunctionIdentifier META = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "meta",
             FunctionIdentifier.VARARGS);
+    public static final FunctionIdentifier META_KEY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "meta-key",
+            FunctionIdentifier.VARARGS);
 
     public static IFunctionInfo getAsterixFunctionInfo(FunctionIdentifier fid) {
         return registeredFunctions.get(fid);
@@ -1034,6 +1036,7 @@ public class AsterixBuiltinFunctions {
 
         // meta() function
         addFunction(META, OptionalOpenARecordTypeComputer.INSTANCE, true);
+        addPrivateFunction(META_KEY, AnyTypeComputer.INSTANCE, false);
 
         addPrivateFunction(COLLECTION_TO_SEQUENCE, CollectionToSequenceTypeComputer.INSTANCE, true);
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-om/src/main/java/org/apache/asterix/om/types/ARecordType.java
----------------------------------------------------------------------
diff --git a/asterix-om/src/main/java/org/apache/asterix/om/types/ARecordType.java b/asterix-om/src/main/java/org/apache/asterix/om/types/ARecordType.java
index 91a67ba..6147276 100644
--- a/asterix-om/src/main/java/org/apache/asterix/om/types/ARecordType.java
+++ b/asterix-om/src/main/java/org/apache/asterix/om/types/ARecordType.java
@@ -30,6 +30,7 @@ import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.om.base.IAObject;
 import org.apache.asterix.om.util.NonTaggedFormatUtil;
 import org.apache.asterix.om.visitors.IOMVisitor;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.json.JSONArray;
 import org.json.JSONException;
 import org.json.JSONObject;
@@ -42,6 +43,9 @@ import org.json.JSONObject;
  */
 public class ARecordType extends AbstractComplexType {
 
+    public static final ARecordType FULLY_OPEN_RECORD_TYPE = new ARecordType("OpenRecord", new String[0], new IAType[0],
+            true);
+
     private static final long serialVersionUID = 1L;
     private final String[] fieldNames;
     private final IAType[] fieldTypes;
@@ -71,11 +75,11 @@ public class ARecordType extends AbstractComplexType {
         }
     }
 
-    public final String[] getFieldNames() {
+    public String[] getFieldNames() {
         return fieldNames;
     }
 
-    public final IAType[] getFieldTypes() {
+    public IAType[] getFieldTypes() {
         return fieldTypes;
     }
 
@@ -96,7 +100,7 @@ public class ARecordType extends AbstractComplexType {
         int n = fieldNames.length;
         for (int i = 0; i < n; i++) {
             sb.append("  " + fieldNames[i] + ": " + fieldTypes[i].toString());
-            if (i < n - 1) {
+            if (i < (n - 1)) {
                 sb.append(",\n");
             } else {
                 sb.append("\n");
@@ -144,7 +148,7 @@ public class ARecordType extends AbstractComplexType {
 
     public IAType getSubFieldType(List<String> subFieldName, IAType parent) {
         ARecordType subRecordType = (ARecordType) parent;
-        for (int i = 0; i < subFieldName.size() - 1; i++) {
+        for (int i = 0; i < (subFieldName.size() - 1); i++) {
             subRecordType = (ARecordType) subRecordType.getFieldType(subFieldName.get(i));
         }
         return subRecordType.getFieldType(subFieldName.get(subFieldName.size() - 1));
@@ -182,10 +186,11 @@ public class ARecordType extends AbstractComplexType {
      * @param fieldName
      *            the fieldName whose type is sought
      * @return the field type of the field name if it exists, otherwise null
+     *         NOTE: this method doesn't work for nested fields
      */
     public IAType getFieldType(String fieldName) {
         int fieldPos = getFieldIndex(fieldName);
-        if (fieldPos < 0 || fieldPos >= fieldTypes.length) {
+        if ((fieldPos < 0) || (fieldPos >= fieldTypes.length)) {
             return null;
         }
         return fieldTypes[fieldPos];
@@ -242,7 +247,7 @@ public class ARecordType extends AbstractComplexType {
     public void generateNestedDerivedTypeNames() {
         for (int i = 0; i < fieldTypes.length; i++) {
             IAType fieldType = fieldTypes[i];
-            if (fieldType.getTypeTag().isDerivedType() && fieldType.getTypeName() == null) {
+            if (fieldType.getTypeTag().isDerivedType() && (fieldType.getTypeName() == null)) {
                 AbstractComplexType nestedType = ((AbstractComplexType) fieldType);
                 nestedType.setTypeName(getTypeName() + "_" + fieldNames[i]);
                 nestedType.generateNestedDerivedTypeNames();
@@ -256,7 +261,7 @@ public class ARecordType extends AbstractComplexType {
             return false;
         }
         ARecordType rt = (ARecordType) obj;
-        return isOpen == rt.isOpen && Arrays.deepEquals(fieldNames, rt.fieldNames)
+        return (isOpen == rt.isOpen) && Arrays.deepEquals(fieldNames, rt.fieldNames)
                 && Arrays.deepEquals(fieldTypes, rt.fieldTypes);
     }
 
@@ -264,10 +269,10 @@ public class ARecordType extends AbstractComplexType {
     public int hash() {
         int h = 0;
         for (int i = 0; i < fieldNames.length; i++) {
-            h += 31 * h + fieldNames[i].hashCode();
+            h += (31 * h) + fieldNames[i].hashCode();
         }
         for (int i = 0; i < fieldTypes.length; i++) {
-            h += 31 * h + fieldTypes[i].hashCode();
+            h += (31 * h) + fieldTypes[i].hashCode();
         }
         return h;
     }
@@ -298,4 +303,11 @@ public class ARecordType extends AbstractComplexType {
         return NonTaggedFormatUtil.hasNullableField(rt) ? (int) Math.ceil(rt.getFieldNames().length / 8.0) : 0;
     }
 
+    public List<IAType> getFieldTypes(List<List<String>> fields) throws AlgebricksException {
+        List<IAType> typeList = new ArrayList<>();
+        for (List<String> field : fields) {
+            typeList.add(getSubFieldType(field));
+        }
+        return typeList;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMPrimaryUpsertOperatorNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMPrimaryUpsertOperatorNodePushable.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMPrimaryUpsertOperatorNodePushable.java
index 97683d5..83c6e34 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMPrimaryUpsertOperatorNodePushable.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMPrimaryUpsertOperatorNodePushable.java
@@ -27,6 +27,7 @@ import org.apache.asterix.common.dataflow.AsterixLSMIndexUtil;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.om.pointables.nonvisitor.ARecordPointable;
 import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.ATypeTag;
 import org.apache.hyracks.api.comm.VSizeFrame;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.dataflow.value.INullWriter;
@@ -44,29 +45,30 @@ import org.apache.hyracks.storage.am.btree.impls.RangePredicate;
 import org.apache.hyracks.storage.am.btree.util.BTreeUtils;
 import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback.Operation;
+import org.apache.hyracks.storage.am.common.api.ITreeIndex;
+import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.common.tuples.PermutingFrameTupleReference;
-import org.apache.hyracks.storage.am.lsm.btree.impls.LSMBTree;
 import org.apache.hyracks.storage.am.lsm.common.dataflow.LSMIndexInsertUpdateDeleteOperatorNodePushable;
+import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndex;
 import org.apache.hyracks.storage.am.lsm.common.impls.LSMTreeIndexAccessor;
 
 public class AsterixLSMPrimaryUpsertOperatorNodePushable extends LSMIndexInsertUpdateDeleteOperatorNodePushable {
 
-    private PermutingFrameTupleReference key;
+    private final PermutingFrameTupleReference key;
     private MultiComparator keySearchCmp;
     private ArrayTupleBuilder nullTupleBuilder;
-    private INullWriter nullWriter;
+    private final INullWriter nullWriter;
     private ArrayTupleBuilder tb;
     private DataOutput dos;
-    private LSMBTree lsmIndex;
     private RangePredicate searchPred;
     private IIndexCursor cursor;
     private ITupleReference prevTuple;
-    private int numOfPrimaryKeys;
+    private final int numOfPrimaryKeys;
     boolean isFiltered = false;
-    private ArrayTupleReference prevTupleWithFilter = new ArrayTupleReference();
+    private final ArrayTupleReference prevTupleWithFilter = new ArrayTupleReference();
     private ArrayTupleBuilder prevRecWithPKWithFilterValue;
     private ARecordType recordType;
     private int presetFieldIndex = -1;
@@ -87,7 +89,7 @@ public class AsterixLSMPrimaryUpsertOperatorNodePushable extends LSMIndexInsertU
         }
         key.setFieldPermutation(searchKeyPermutations);
         this.numOfPrimaryKeys = numOfPrimaryKeys;
-        if (fieldPermutation.length > numOfPrimaryKeys + 1) {
+        if (filterFieldIndex >= 0) {
             isFiltered = true;
             this.recordType = recordType;
             this.presetFieldIndex = filterFieldIndex;
@@ -109,7 +111,7 @@ public class AsterixLSMPrimaryUpsertOperatorNodePushable extends LSMIndexInsertU
         writeBuffer = new VSizeFrame(ctx);
         writer.open();
         indexHelper.open();
-        lsmIndex = (LSMBTree) indexHelper.getIndexInstance();
+        index = indexHelper.getIndexInstance();
 
         try {
             nullTupleBuilder = new ArrayTupleBuilder(1);
@@ -126,15 +128,16 @@ public class AsterixLSMPrimaryUpsertOperatorNodePushable extends LSMIndexInsertU
             appender = new FrameTupleAppender(new VSizeFrame(ctx), true);
             modCallback = opDesc.getModificationOpCallbackFactory().createModificationOperationCallback(
                     indexHelper.getResourcePath(), indexHelper.getResourceID(), indexHelper.getResourcePartition(),
-                    lsmIndex, ctx);
+                    index, ctx);
 
-            indexAccessor = lsmIndex.createAccessor(modCallback, opDesc.getSearchOpCallbackFactory()
+            indexAccessor = index.createAccessor(modCallback, opDesc.getSearchOpCallbackFactory()
                     .createSearchOperationCallback(indexHelper.getResourceID(), ctx));
-            cursor = createCursor();
+            cursor = indexAccessor.createSearchCursor(false);
             frameTuple = new FrameTupleReference();
             IAsterixAppRuntimeContext runtimeCtx = (IAsterixAppRuntimeContext) ctx.getJobletContext()
                     .getApplicationContext().getApplicationObject();
-            AsterixLSMIndexUtil.checkAndSetFirstLSN(lsmIndex, runtimeCtx.getTransactionSubsystem().getLogManager());
+            AsterixLSMIndexUtil.checkAndSetFirstLSN((AbstractLSMIndex) index,
+                    runtimeCtx.getTransactionSubsystem().getLogManager());
         } catch (Exception e) {
             indexHelper.close();
             throw new HyracksDataException(e);
@@ -143,16 +146,18 @@ public class AsterixLSMPrimaryUpsertOperatorNodePushable extends LSMIndexInsertU
 
     private void resetSearchPredicate(int tupleIndex) {
         key.reset(accessor, tupleIndex);
+        searchPred.reset(key, key, true, true, keySearchCmp, keySearchCmp);
     }
 
-    protected void writeOutput(int tupleIndex) throws Exception {
+    private void writeOutput(int tupleIndex, boolean recordWasInserted) throws IOException {
+        boolean recordWasDeleted = prevTuple != null;
         tb.reset();
         frameTuple.reset(accessor, tupleIndex);
         for (int i = 0; i < frameTuple.getFieldCount(); i++) {
             dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
             tb.addFieldEndOffset();
         }
-        if (prevTuple != null) {
+        if (recordWasDeleted) {
             dos.write(prevTuple.getFieldData(numOfPrimaryKeys), prevTuple.getFieldStart(numOfPrimaryKeys),
                     prevTuple.getFieldLength(numOfPrimaryKeys));
             tb.addFieldEndOffset();
@@ -169,7 +174,13 @@ public class AsterixLSMPrimaryUpsertOperatorNodePushable extends LSMIndexInsertU
                 addNullField();
             }
         }
-        FrameUtils.appendToWriter(writer, appender, tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+        if (recordWasInserted || recordWasDeleted) {
+            FrameUtils.appendToWriter(writer, appender, tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+        }
+    }
+
+    public static boolean isNull(ITupleReference t1, int field) {
+        return t1.getFieldData(field)[t1.getFieldStart(field)] == ATypeTag.SERIALIZED_NULL_TYPE_TAG;
     }
 
     private void addNullField() throws IOException {
@@ -183,12 +194,12 @@ public class AsterixLSMPrimaryUpsertOperatorNodePushable extends LSMIndexInsertU
         accessor.reset(buffer);
         LSMTreeIndexAccessor lsmAccessor = (LSMTreeIndexAccessor) indexAccessor;
         int tupleCount = accessor.getTupleCount();
-
+        int i = 0;
         try {
-            for (int i = 0; i < tupleCount; i++) {
+            while (i < tupleCount) {
+                boolean recordWasInserted = false;
                 tuple.reset(accessor, i);
                 resetSearchPredicate(i);
-                cursor.reset();
                 lsmAccessor.search(cursor, searchPred);
                 if (cursor.hasNext()) {
                     cursor.next();
@@ -205,21 +216,25 @@ public class AsterixLSMPrimaryUpsertOperatorNodePushable extends LSMIndexInsertU
                     }
                 } else {
                     prevTuple = null;
+                    cursor.reset();
                 }
-                modCallback.setOp(Operation.INSERT);
-                if (prevTuple == null && i == 0) {
-                    lsmAccessor.insert(tuple);
-                } else {
-                    lsmAccessor.forceInsert(tuple);
+                if (!isNull(tuple, numOfPrimaryKeys)) {
+                    modCallback.setOp(Operation.INSERT);
+                    if ((prevTuple == null) && (i == 0)) {
+                        lsmAccessor.insert(tuple);
+                    } else {
+                        lsmAccessor.forceInsert(tuple);
+                    }
+                    recordWasInserted = true;
                 }
-                writeOutput(i);
+                writeOutput(i, recordWasInserted);
+                i++;
             }
             if (tupleCount > 0) {
                 // All tuples has to move forward to maintain the correctness of the transaction pipeline
                 appender.write(writer, true);
             }
-        } catch (Exception e) {
-            e.printStackTrace();
+        } catch (IndexException | IOException | AsterixException e) {
             throw new HyracksDataException(e);
         }
     }
@@ -233,7 +248,8 @@ public class AsterixLSMPrimaryUpsertOperatorNodePushable extends LSMIndexInsertU
         recPointable.set(prevTuple.getFieldData(numOfPrimaryKeys), prevTuple.getFieldStart(numOfPrimaryKeys),
                 prevTuple.getFieldLength(numOfPrimaryKeys));
         // copy the field data from prevTuple
-        prevDos.write(recPointable.getClosedFieldType(recordType, presetFieldIndex).getTypeTag().serialize());
+        byte tag = recPointable.getClosedFieldType(recordType, presetFieldIndex).getTypeTag().serialize();
+        prevDos.write(tag);
         prevDos.write(recPointable.getByteArray(), recPointable.getClosedFieldOffset(recordType, presetFieldIndex),
                 recPointable.getClosedFieldSize(recordType, presetFieldIndex));
         prevRecWithPKWithFilterValue.addFieldEndOffset();
@@ -244,14 +260,10 @@ public class AsterixLSMPrimaryUpsertOperatorNodePushable extends LSMIndexInsertU
     }
 
     private RangePredicate createSearchPredicate() {
-        keySearchCmp = BTreeUtils.getSearchMultiComparator(lsmIndex.getComparatorFactories(), key);
+        keySearchCmp = BTreeUtils.getSearchMultiComparator(((ITreeIndex) index).getComparatorFactories(), key);
         return new RangePredicate(key, key, true, true, keySearchCmp, keySearchCmp, null, null);
     }
 
-    protected IIndexCursor createCursor() {
-        return indexAccessor.createSearchCursor(false);
-    }
-
     @Override
     public void close() throws HyracksDataException {
         try {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMSecondaryUpsertOperatorNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMSecondaryUpsertOperatorNodePushable.java b/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMSecondaryUpsertOperatorNodePushable.java
index 65dc83f..05b633d 100644
--- a/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMSecondaryUpsertOperatorNodePushable.java
+++ b/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/AsterixLSMSecondaryUpsertOperatorNodePushable.java
@@ -20,7 +20,6 @@ package org.apache.asterix.runtime.operators;
 
 import java.nio.ByteBuffer;
 
-import org.apache.asterix.om.types.ATypeTag;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -88,10 +87,6 @@ public class AsterixLSMSecondaryUpsertOperatorNodePushable extends LSMIndexInser
         return true;
     }
 
-    private boolean isNull(PermutingFrameTupleReference t1) {
-        return t1.getFieldData(0)[t1.getFieldStart(0)] == ATypeTag.SERIALIZED_NULL_TYPE_TAG;
-    }
-
     @Override
     public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
         accessor.reset(buffer);
@@ -102,8 +97,8 @@ public class AsterixLSMSecondaryUpsertOperatorNodePushable extends LSMIndexInser
                 // if both previous value and new value are null, then we skip
                 tuple.reset(accessor, i);
                 prevValueTuple.reset(accessor, i);
-                isNewNull = isNull(tuple);
-                isPrevValueNull = isNull(prevValueTuple);
+                isNewNull = AsterixLSMPrimaryUpsertOperatorNodePushable.isNull(tuple, 0);
+                isPrevValueNull = AsterixLSMPrimaryUpsertOperatorNodePushable.isNull(prevValueTuple, 0);
                 if (isNewNull && isPrevValueNull) {
                     continue;
                 }


[06/19] incubator-asterixdb git commit: Support Change Feeds and Ingestion of Records with MetaData

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java
index 50d8ac0..c9fc5fc 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java
@@ -19,20 +19,21 @@
 package org.apache.asterix.external.util;
 
 import java.io.File;
-import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.asterix.common.cluster.ClusterPartition;
+import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.utils.StoragePathUtil;
 import org.apache.asterix.om.util.AsterixClusterProperties;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint.PartitionConstraintType;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.comm.FrameHelper;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IIOManager;
 import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
@@ -57,9 +58,9 @@ public class FeedUtils {
     }
 
     public static FileSplit[] splitsForAdapter(String dataverseName, String feedName,
-            AlgebricksPartitionConstraint partitionConstraints) throws Exception {
+            AlgebricksPartitionConstraint partitionConstraints) throws AsterixException {
         if (partitionConstraints.getPartitionConstraintType() == PartitionConstraintType.COUNT) {
-            throw new AlgebricksException("Can't create file splits for adapter with count partitioning constraints");
+            throw new AsterixException("Can't create file splits for adapter with count partitioning constraints");
         }
         File relPathFile = new File(prepareDataverseFeedName(dataverseName, feedName));
         String[] locations = null;
@@ -86,7 +87,7 @@ public class FeedUtils {
     }
 
     public static FeedLogManager getFeedLogManager(IHyracksTaskContext ctx, int partition,
-            FileSplit[] feedLogFileSplits) throws IOException {
+            FileSplit[] feedLogFileSplits) throws HyracksDataException {
         return new FeedLogManager(
                 FeedUtils.getAbsoluteFileRef(feedLogFileSplits[partition].getLocalFile().getFile().getPath(),
                         feedLogFileSplits[partition].getIODeviceId(), ctx.getIOManager()).getFile());
@@ -103,4 +104,13 @@ public class FeedUtils {
         message.flip();
         IntSerDeUtils.putInt(input.array(), FrameHelper.getTupleCountOffset(input.capacity()), tc);
     }
+
+    public static int getNumOfFields(Map<String, String> configuration) {
+        return 1;
+    }
+
+    public static String getFeedMetaTypeName(Map<String, String> configuration) {
+        return configuration.get(ExternalDataConstants.KEY_META_TYPE_NAME);
+
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java
index 2e3b8ec..386a8cf 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java
@@ -41,7 +41,7 @@ import org.apache.log4j.Logger;
 
 public class FileSystemWatcher {
 
-    private static Logger LOGGER = Logger.getLogger(FileSystemWatcher.class.getName());
+    private static final Logger LOGGER = Logger.getLogger(FileSystemWatcher.class.getName());
     private final WatchService watcher;
     private final HashMap<WatchKey, Path> keys;
     private final LinkedList<File> files = new LinkedList<File>();
@@ -54,28 +54,36 @@ public class FileSystemWatcher {
     private File current;
     private AbstractFeedDataFlowController controller;
 
-    public FileSystemWatcher(Path inputResource, String expression, boolean isFeed) throws IOException {
-        this.watcher = isFeed ? FileSystems.getDefault().newWatchService() : null;
-        this.keys = isFeed ? new HashMap<WatchKey, Path>() : null;
-        this.expression = expression;
-        this.path = inputResource;
-        this.isFeed = isFeed;
+    public FileSystemWatcher(Path inputResource, String expression, boolean isFeed) throws HyracksDataException {
+        try {
+            this.watcher = isFeed ? FileSystems.getDefault().newWatchService() : null;
+            this.keys = isFeed ? new HashMap<WatchKey, Path>() : null;
+            this.expression = expression;
+            this.path = inputResource;
+            this.isFeed = isFeed;
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
     }
 
     public void setFeedLogManager(FeedLogManager feedLogManager) {
         this.logManager = feedLogManager;
     }
 
-    public void init() throws IOException {
-        LinkedList<Path> dirs = null;
-        dirs = new LinkedList<Path>();
-        LocalFileSystemUtils.traverse(files, path.toFile(), expression, dirs);
-        it = files.iterator();
-        if (isFeed) {
-            for (Path path : dirs) {
-                register(path);
+    public void init() throws HyracksDataException {
+        try {
+            LinkedList<Path> dirs = null;
+            dirs = new LinkedList<Path>();
+            LocalFileSystemUtils.traverse(files, path.toFile(), expression, dirs);
+            it = files.iterator();
+            if (isFeed) {
+                for (Path path : dirs) {
+                    register(path);
+                }
+                resume();
             }
-            resume();
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
         }
     }
 
@@ -178,7 +186,7 @@ public class FileSystemWatcher {
     }
 
     public File next() throws IOException {
-        if (current != null && logManager != null) {
+        if ((current != null) && (logManager != null)) {
             logManager.startPartition(current.getAbsolutePath());
             logManager.endPartition();
         }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java
index 9a72135..84ccc35 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java
@@ -178,7 +178,7 @@ public class HDFSUtils {
         }
     }
 
-    public static JobConf configureHDFSJobConf(Map<String, String> configuration) throws Exception {
+    public static JobConf configureHDFSJobConf(Map<String, String> configuration) {
         JobConf conf = new JobConf();
 
         String localShortCircuitSocketPath = configuration.get(ExternalDataConstants.KEY_LOCAL_SOCKET_PATH);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/util/TweetGenerator.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/TweetGenerator.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/TweetGenerator.java
index 3670368..b86c170 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/TweetGenerator.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/TweetGenerator.java
@@ -32,8 +32,7 @@ import org.apache.asterix.external.util.DataGenerator.TweetMessage;
 import org.apache.asterix.external.util.DataGenerator.TweetMessageIterator;
 
 public class TweetGenerator {
-
-    private static Logger LOGGER = Logger.getLogger(TweetGenerator.class.getName());
+    private static final Logger LOGGER = Logger.getLogger(TweetGenerator.class.getName());
 
     public static final String KEY_DURATION = "duration";
     public static final String KEY_TPS = "tps";
@@ -43,20 +42,20 @@ public class TweetGenerator {
 
     private static final int DEFAULT_DURATION = INFINITY;
 
-    private int duration;
+    private final int duration;
     private TweetMessageIterator tweetIterator = null;
-    private int partition;
+    private final int partition;
     private long tweetCount = 0;
     private int frameTweetCount = 0;
     private int numFlushedTweets = 0;
     private DataGenerator dataGenerator = null;
-    private ByteBuffer outputBuffer = ByteBuffer.allocate(32 * 1024);
-    private String[] fields;
+    private final ByteBuffer outputBuffer = ByteBuffer.allocate(32 * 1024);
+    private final String[] fields;
     private final List<OutputStream> subscribers;
     private final Object lock = new Object();
     private final List<OutputStream> subscribersForRemoval = new ArrayList<OutputStream>();
 
-    public TweetGenerator(Map<String, String> configuration, int partition) throws Exception {
+    public TweetGenerator(Map<String, String> configuration, int partition) {
         this.partition = partition;
         String value = configuration.get(KEY_DURATION);
         this.duration = value != null ? Integer.parseInt(value) : DEFAULT_DURATION;
@@ -70,7 +69,7 @@ public class TweetGenerator {
         String tweet = tweetMessage.getAdmEquivalent(fields) + "\n";
         tweetCount++;
         byte[] b = tweet.getBytes();
-        if (outputBuffer.position() + b.length > outputBuffer.limit()) {
+        if ((outputBuffer.position() + b.length) > outputBuffer.limit()) {
             flush();
             numFlushedTweets += frameTweetCount;
             frameTweetCount = 0;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/test/java/org/apache/asterix/external/classad/BuiltinClassAdFunctions.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/BuiltinClassAdFunctions.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/BuiltinClassAdFunctions.java
index 97ebd6c..1eca7e1 100644
--- a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/BuiltinClassAdFunctions.java
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/BuiltinClassAdFunctions.java
@@ -535,7 +535,8 @@ public class BuiltinClassAdFunctions {
                 } else if (time_arg.isRelativeTimeValue()) {
                     result.setErrorValue();
                 } else if (time_arg.isAbsoluteTimeValue(time_components)) {
-                } else if (!time_arg.isClassAdValue(splitClassAd) /*doSplitTime(time_arg, splitClassAd)*/) {
+                } else if (!time_arg
+                        .isClassAdValue(splitClassAd) /* doSplitTime(time_arg, splitClassAd) */) {
                     result.setErrorValue();
                 } else {
                     if (!splitClassAd.evaluateAttrInt("Seconds", int64)) {
@@ -1863,7 +1864,7 @@ public class BuiltinClassAdFunctions {
                 result.setErrorValue();
                 return false;
             }
-            ClassAdParser parser = new ClassAdParser();
+            ClassAdParser parser = new ClassAdParser(null, false, true, false, null, null, null);
             ExprTreeHolder expr = new ExprTreeHolder();
             try {
                 if (!parser.parseExpression(s.toString(), expr, true) || (expr.getInnerTree() == null)) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAd.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAd.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAd.java
index 431fa0c..df6ae0d 100644
--- a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAd.java
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAd.java
@@ -33,7 +33,6 @@ import org.apache.asterix.om.base.AMutableDouble;
 import org.apache.asterix.om.base.AMutableInt32;
 import org.apache.asterix.om.base.AMutableInt64;
 import org.apache.asterix.om.base.AMutableString;
-import org.apache.asterix.om.types.ARecordType;
 import org.apache.commons.lang3.mutable.MutableBoolean;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
@@ -117,17 +116,13 @@ public class ClassAd extends ExprTree {
     private ClassAd newAd = null;
 
     /*
-     *  Constructors
+     * Constructors
      */
     public ClassAd() {
         chainedParentAd = null;
         alternateScope = null;
         newAd = new ClassAd(false, false);
-        parser = new ClassAdParser();
-    }
-
-    public void configure(Map<String, String> configuration, ARecordType recordType) throws IOException {
-        parser.configure(configuration, recordType);
+        parser = new ClassAdParser(null, false, true, false, null, null, null);
     }
 
     public ClassAd(boolean initializeParser, boolean initializeNewAd) {
@@ -137,7 +132,7 @@ public class ClassAd extends ExprTree {
             newAd = new ClassAd(false, false);
         }
         if (initializeParser) {
-            parser = new ClassAdParser();
+            parser = new ClassAdParser(null, false, true, false, null, null, null);
         }
     }
 
@@ -194,15 +189,18 @@ public class ClassAd extends ExprTree {
 
     //public TreeSet<CaseInsensitiveString> dirtyAttrList = new TreeSet<CaseInsensitiveString>();
 
-    /* Reference is an ordered set of Strings <The ordering uses less than ignore case>. Example below
-     *  TreeSet<String> references = new TreeSet<String>(
-     *        new Comparator<String>(){
-     *            public int compare(String o1, String o2) {
-     *    return o1.compareToIgnoreCase(o2);
-     *    }
-     *            });
+    /*
+     * Reference is an ordered set of Strings <The ordering uses less than ignore case>. Example
+     * below
+     * TreeSet<String> references = new TreeSet<String>(
+     * new Comparator<String>(){
+     * public int compare(String o1, String o2) {
+     * return o1.compareToIgnoreCase(o2);
+     * }
+     * });
      *
-     // PortReferences is a Map<ClassAd,OrderedSet<Strings>> */
+     * // PortReferences is a Map<ClassAd,OrderedSet<Strings>>
+     */
 
     public boolean copyFrom(ClassAd ad) throws HyracksDataException {
 
@@ -555,10 +553,12 @@ public class ClassAd extends ExprTree {
     }
 
     public ExprTree lookup(CaseInsensitiveString name) {
-        /*System.out.println("Lookup Printing all attributes with their values:");
-        for (Entry<String, ExprTree> entry : attrList.entrySet()) {
-            System.out.println(entry.getKey() + ":" + entry.getValue().getKind());
-        }*/
+        /*
+         * System.out.println("Lookup Printing all attributes with their values:");
+         * for (Entry<String, ExprTree> entry : attrList.entrySet()) {
+         * System.out.println(entry.getKey() + ":" + entry.getValue().getKind());
+         * }
+         */
         ExprTree attr = attrList.get(name);
         if (attr != null) {
             return attr;
@@ -861,7 +861,7 @@ public class ClassAd extends ExprTree {
     public boolean evaluateExpr(String buf, Value result) throws HyracksDataException {
         boolean successfully_evaluated;
         ExprTreeHolder tree = new ExprTreeHolder();
-        ClassAdParser parser = new ClassAdParser();
+        ClassAdParser parser = new ClassAdParser(null, false, true, false, null, null, null);
 
         try {
             if (parser.parseExpression(buf, tree)) {
@@ -927,15 +927,18 @@ public class ClassAd extends ExprTree {
         return (evaluateAttr(attr, val) && val.isBooleanValueEquiv(b));
     }
 
-    /* Reference is an ordered set of Strings <The ordering uses less than ignore case>. Example below
-     *  TreeSet<String> references = new TreeSet<String>(
-     *        new Comparator<String>(){
-     *            public int compare(String o1, String o2) {
-     *    return o1.compareToIgnoreCase(o2);
-     *    }
-     *            });
+    /*
+     * Reference is an ordered set of Strings <The ordering uses less than ignore case>. Example
+     * below
+     * TreeSet<String> references = new TreeSet<String>(
+     * new Comparator<String>(){
+     * public int compare(String o1, String o2) {
+     * return o1.compareToIgnoreCase(o2);
+     * }
+     * });
      *
-     // PortReferences is a Map<ClassAd,OrderedSet<Strings>> */
+     * // PortReferences is a Map<ClassAd,OrderedSet<Strings>>
+     */
 
     public boolean getExternalReferences(ExprTree tree, TreeSet<String> refs, boolean fullNames)
             throws HyracksDataException {
@@ -1256,15 +1259,18 @@ public class ClassAd extends ExprTree {
         }
     }
 
-    /* Reference is an ordered set of Strings <The ordering uses less than ignore case>. Example below
-     *  TreeSet<String> references = new TreeSet<String>(
-     *        new Comparator<String>(){
-     *            public int compare(String o1, String o2) {
-     *    return o1.compareToIgnoreCase(o2);
-     *    }
-     *            });
+    /*
+     * Reference is an ordered set of Strings <The ordering uses less than ignore case>. Example
+     * below
+     * TreeSet<String> references = new TreeSet<String>(
+     * new Comparator<String>(){
+     * public int compare(String o1, String o2) {
+     * return o1.compareToIgnoreCase(o2);
+     * }
+     * });
      *
-     // PortReferences is a Map<ClassAd,OrderedSet<Strings>> */
+     * // PortReferences is a Map<ClassAd,OrderedSet<Strings>>
+     */
     public boolean getInternalReferences(ExprTree tree, TreeSet<String> refs, boolean fullNames)
             throws HyracksDataException {
         EvalState state = new EvalState();
@@ -1560,6 +1566,6 @@ public class ClassAd extends ExprTree {
     }
 
     public void createParser() {
-        parser = new ClassAdParser();
+        parser = new ClassAdParser(null, false, true, false, null, null, null);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdParserTest.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdParserTest.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdParserTest.java
index b0e0925..1c135f5 100644
--- a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdParserTest.java
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdParserTest.java
@@ -50,7 +50,7 @@ public class ClassAdParserTest extends TestCase {
     /**
      *
      */
-    public void testApp() {
+    public void test() {
         try {
             // test here
             ClassAd pAd = new ClassAd();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java
index 926c40e..57b0dcd 100644
--- a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java
@@ -19,7 +19,6 @@
 package org.apache.asterix.external.classad.test;
 
 import java.nio.file.Paths;
-import java.util.HashMap;
 import java.util.Map;
 import java.util.Map.Entry;
 
@@ -32,7 +31,6 @@ import org.apache.asterix.external.classad.Value;
 import org.apache.asterix.external.input.record.reader.stream.SemiStructuredRecordReader;
 import org.apache.asterix.external.input.stream.LocalFileSystemInputStream;
 import org.apache.asterix.external.library.ClassAdParser;
-import org.apache.asterix.external.util.ExternalDataConstants;
 
 import junit.framework.Test;
 import junit.framework.TestCase;
@@ -59,27 +57,19 @@ public class ClassAdToADMTest extends TestCase {
     /**
      *
      */
-    public void testApp() {
+    public void test() {
         try {
             // test here
             ClassAd pAd = new ClassAd();
             String[] files = new String[] { "/jobads.txt" };
-            ClassAdParser parser = new ClassAdParser();
+            ClassAdParser parser = new ClassAdParser(null, false, true, false, null, null, null);
             CharArrayLexerSource lexerSource = new CharArrayLexerSource();
             for (String path : files) {
-                SemiStructuredRecordReader recordReader = new SemiStructuredRecordReader();
-                HashMap<String, String> configuration = new HashMap<String, String>();
-                configuration.put(ExternalDataConstants.KEY_RECORD_START, "[");
-                configuration.put(ExternalDataConstants.KEY_RECORD_END, "]");
-                recordReader.configure(configuration);
                 LocalFileSystemInputStream in = new LocalFileSystemInputStream(
                         Paths.get(getClass().getResource(path).toURI()), null, false);
-                in.configure(configuration);
-                recordReader.setInputStream(in);
+                SemiStructuredRecordReader recordReader = new SemiStructuredRecordReader(in, null, "[", "]");
                 Value val = new Value();
-                int i = 0;
                 while (recordReader.hasNext()) {
-                    i++;
                     val.clear();
                     IRawRecord<char[]> record = recordReader.next();
                     lexerSource.setNewSource(record.get());

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTest.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTest.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTest.java
index f972f45..b9f32f5 100644
--- a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTest.java
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTest.java
@@ -48,7 +48,7 @@ public class ClassAdUnitTest extends TestCase {
     /**
      * Rigourous Test :-)
      */
-    public void testApp() {
+    public void test() {
         String[] args = { "", "-d", "-vv", "-all" };
         try {
             ClassAdUnitTester.test(args.length, args);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTester.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTester.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTester.java
index 5b1e0b5..cec9299 100644
--- a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTester.java
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTester.java
@@ -273,7 +273,7 @@ public class ClassAdUnitTester {
      * @throws IOException
      *********************************************************************/
     public static void testParsing(Parameters parameters, Results results) throws IOException {
-        ClassAdParser parser = new ClassAdParser();
+        ClassAdParser parser = new ClassAdParser(null, false, true, false, null, null, null);
         ExprTree tree;
 
         // My goal is to ensure that these expressions don't crash
@@ -316,7 +316,7 @@ public class ClassAdUnitTester {
      * @throws IOException
      *********************************************************************/
     public static void testClassad(Parameters parameters, Results results) throws IOException {
-        ClassAdParser parser = new ClassAdParser();
+        ClassAdParser parser = new ClassAdParser(null, false, true, false, null, null, null);
         boolean haveAttribute;
         boolean success;
 
@@ -574,7 +574,7 @@ public class ClassAdUnitTester {
 
         /* ----- Test an ExprList bug that Nate Mueller found ----- */
         ClassAd classad;
-        ClassAdParser parser = new ClassAdParser();
+        ClassAdParser parser = new ClassAdParser(null, false, true, false, null, null, null);
         MutableBoolean b = new MutableBoolean();
         boolean haveAttribute;
         boolean canEvaluate;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/FunctionalTester.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/FunctionalTester.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/FunctionalTester.java
index f4cf9db..c9ce47c 100644
--- a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/FunctionalTester.java
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/FunctionalTester.java
@@ -909,7 +909,7 @@ public class FunctionalTester {
     public static ExprTree get_expr(AMutableString line, State state, Parameters parameters) throws IOException {
         int offset;
         ExprTree tree;
-        ClassAdParser parser = new ClassAdParser();
+        ClassAdParser parser = new ClassAdParser(null, false, true, false, null, null, null);
         StringLexerSource lexer_source = new StringLexerSource(line.getStringValue());
 
         tree = parser.parseExpression(lexer_source, false);
@@ -932,7 +932,7 @@ public class FunctionalTester {
     public static void get_two_exprs(AMutableString line, ExprTreeHolder tree1, ExprTreeHolder tree2, State state,
             Parameters parameters) throws IOException {
         int offset;
-        ClassAdParser parser = new ClassAdParser();
+        ClassAdParser parser = new ClassAdParser(null, false, true, false, null, null, null);
         StringLexerSource lexer_source = new StringLexerSource(line.getStringValue());
 
         tree1.setInnerTree(parser.parseExpression(lexer_source, false));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/test/java/org/apache/asterix/external/generator/test/DCPGeneratorTest.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/generator/test/DCPGeneratorTest.java b/asterix-external-data/src/test/java/org/apache/asterix/external/generator/test/DCPGeneratorTest.java
new file mode 100644
index 0000000..14b6ed1
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/generator/test/DCPGeneratorTest.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.generator.test;
+
+import org.apache.asterix.external.api.IRawRecord;
+import org.apache.asterix.external.input.record.RecordWithMetadataAndPK;
+import org.apache.asterix.external.input.record.converter.DCPRequestToRecordWithMetadataAndPKConverter;
+import org.apache.asterix.external.input.record.reader.kv.KVTestReader;
+import org.apache.hyracks.data.std.primitive.UTF8StringPointable;
+import org.junit.Test;
+
+import com.couchbase.client.core.message.dcp.DCPRequest;
+
+public class DCPGeneratorTest {
+
+    @Test
+    public void runTest() throws Exception {
+        try (KVTestReader cbreader = new KVTestReader(0, "TestBucket",
+                new int[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 }, 150, 0, 0)) {
+            final UTF8StringPointable pointable = new UTF8StringPointable();
+            final DCPRequestToRecordWithMetadataAndPKConverter converter = new DCPRequestToRecordWithMetadataAndPKConverter();
+            while (cbreader.hasNext()) {
+                final IRawRecord<DCPRequest> dcp = cbreader.next();
+                final RecordWithMetadataAndPK<char[]> record = converter.convert(dcp);
+                if (record.getRecord().size() == 0) {
+                    pointable.set(record.getMetadata(0).getByteArray(), 1, record.getMetadata(0).getLength());
+                } else {
+                    pointable.set(record.getMetadata(0).getByteArray(), 1, record.getMetadata(0).getLength());
+                }
+            }
+        } catch (final Throwable th) {
+            System.err.println("TEST FAILED");
+            th.printStackTrace();
+            throw th;
+        }
+        System.err.println("TEST PASSED.");
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParser.java b/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParser.java
index 2882083..d6db6c2 100644
--- a/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParser.java
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParser.java
@@ -35,7 +35,6 @@ import org.apache.asterix.builders.OrderedListBuilder;
 import org.apache.asterix.builders.RecordBuilderFactory;
 import org.apache.asterix.builders.UnorderedListBuilder;
 import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 import org.apache.asterix.external.api.IRawRecord;
 import org.apache.asterix.external.api.IRecordDataParser;
 import org.apache.asterix.external.classad.AMutableCharArrayString;
@@ -70,8 +69,6 @@ import org.apache.asterix.external.classad.object.pool.OperationPool;
 import org.apache.asterix.external.classad.object.pool.TokenValuePool;
 import org.apache.asterix.external.classad.object.pool.ValuePool;
 import org.apache.asterix.external.parser.AbstractDataParser;
-import org.apache.asterix.external.util.ExternalDataConstants;
-import org.apache.asterix.external.util.ExternalDataUtils;
 import org.apache.asterix.om.base.ABoolean;
 import org.apache.asterix.om.base.AMutableInt32;
 import org.apache.asterix.om.types.AOrderedListType;
@@ -124,14 +121,20 @@ public class ClassAdParser extends AbstractDataParser implements IRecordDataPars
     private boolean oldFormat = false;
     private StringLexerSource stringLexerSource = new StringLexerSource("");
 
-    public ClassAdParser(ARecordType recordType) {
+    public ClassAdParser(ARecordType recordType, boolean oldFormat, boolean evaluateExpr, boolean keepBoth,
+            String exprPrefix, String exprSuffix, String exprFieldNameSuffix) {
         this.recordType = recordType;
         this.currentSource = new CharArrayLexerSource();
-    }
-
-    public ClassAdParser() {
-        this.recordType = null;
-        this.currentSource = new CharArrayLexerSource();
+        this.recordType = recordType;
+        this.oldFormat = oldFormat;
+        if (oldFormat) {
+            rootAd.createParser();
+        }
+        this.keepBoth = keepBoth;
+        this.evaluateExpr = evaluateExpr;
+        this.exprPrefix = exprPrefix;
+        this.exprSuffix = exprSuffix;
+        this.exprFieldNameSuffix = exprFieldNameSuffix;
     }
 
     /***********************************
@@ -565,7 +568,6 @@ public class ClassAdParser extends AbstractDataParser implements IRecordDataPars
     }
 
     private String mismatchErrorMessage = "Mismatch Type, expecting a value of type ";
-    private Map<String, String> configuration;
 
     private boolean checkType(ATypeTag expectedTypeTag, IAType aObjectType) throws IOException {
         return getTargetTypeTag(expectedTypeTag, aObjectType) != null;
@@ -1695,55 +1697,6 @@ public class ClassAdParser extends AbstractDataParser implements IRecordDataPars
     }
 
     @Override
-    public DataSourceType getDataSourceType() {
-        return ExternalDataUtils.isDataSourceStreamProvider(configuration) ? DataSourceType.STREAM
-                : DataSourceType.RECORDS;
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration, ARecordType recordType) throws IOException {
-        this.recordType = recordType;
-        this.configuration = configuration;
-        String parserConfig = configuration.get(ClassAdParserFactory.KEY_OLD_FORMAT);
-        if (parserConfig != null && parserConfig.equalsIgnoreCase(ExternalDataConstants.TRUE)) {
-            oldFormat = true;
-            rootAd.createParser();
-        }
-        parserConfig = configuration.get(ExternalDataConstants.KEY_READER);
-        if (parserConfig != null && parserConfig.equalsIgnoreCase(ExternalDataConstants.READER_LINE_SEPARATED)) {
-            oldFormat = true;
-            rootAd.createParser();
-        }
-
-        parserConfig = configuration.get(ClassAdParserFactory.KEY_EVALUATE);
-        if (parserConfig != null && parserConfig.equalsIgnoreCase("false")) {
-            evaluateExpr = false;
-            keepBoth = false;
-        }
-        parserConfig = configuration.get(ClassAdParserFactory.KEY_KEEP_EXPR);
-        if (parserConfig != null && parserConfig.equalsIgnoreCase("false")) {
-            keepBoth = false;
-            evaluateExpr = true;
-        }
-        parserConfig = configuration.get(ClassAdParserFactory.KEY_EXPR_PREFIX);
-        if (parserConfig != null && parserConfig.trim().length() > 0) {
-            exprPrefix = parserConfig;
-        }
-        parserConfig = configuration.get(ClassAdParserFactory.KEY_EXPR_SUFFIX);
-        if (parserConfig != null && parserConfig.trim().length() > 0) {
-            exprSuffix = parserConfig;
-        }
-        parserConfig = configuration.get(ClassAdParserFactory.KEY_EXPR_NAME_SUFFIX);
-        if (parserConfig != null && parserConfig.trim().length() > 0) {
-            exprFieldNameSuffix = parserConfig;
-        }
-        if (!oldFormat) {
-            configuration.put(ExternalDataConstants.KEY_RECORD_START, "[");
-            configuration.put(ExternalDataConstants.KEY_RECORD_END, "]");
-        }
-    }
-
-    @Override
     public void parse(IRawRecord<? extends char[]> record, DataOutput out) throws IOException {
         try {
             if (oldFormat) {
@@ -1775,9 +1728,4 @@ public class ClassAdParser extends AbstractDataParser implements IRecordDataPars
             throw new HyracksDataException(e);
         }
     }
-
-    @Override
-    public Class<? extends char[]> getRecordClass() {
-        return char[].class;
-    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParserFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParserFactory.java b/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParserFactory.java
index 97982df..5064992 100644
--- a/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParserFactory.java
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParserFactory.java
@@ -19,10 +19,10 @@
 package org.apache.asterix.external.library;
 
 import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
 import java.util.Map;
 
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 import org.apache.asterix.external.api.IRecordDataParser;
 import org.apache.asterix.external.api.IRecordDataParserFactory;
 import org.apache.asterix.external.util.ExternalDataConstants;
@@ -39,37 +39,36 @@ public class ClassAdParserFactory implements IRecordDataParserFactory<char[]> {
     public static final String KEY_EXPR_PREFIX = "expr-prefix";
     public static final String KEY_EXPR_SUFFIX = "expr-suffix";
     public static final String KEY_EXPR_NAME_SUFFIX = "expr-name-suffix";
-
     private ARecordType recordType;
     private Map<String, String> configuration;
     private boolean oldFormat = false;
+    private boolean evaluateExpr = true;
+    private boolean keepBoth;
+    private String exprPrefix;
+    private String exprSuffix;
+    private String exprFieldNameSuffix;
 
-    private void writeObject(java.io.ObjectOutputStream stream) throws IOException {
+    private void writeObject(ObjectOutputStream stream) throws IOException {
         stream.writeObject(recordType);
         stream.writeObject(configuration);
     }
 
     @SuppressWarnings("unchecked")
-    private void readObject(java.io.ObjectInputStream stream) throws IOException, ClassNotFoundException {
+    private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException {
         recordType = (ARecordType) stream.readObject();
         configuration = (Map<String, String>) stream.readObject();
     }
 
     @Override
-    public DataSourceType getDataSourceType() throws AsterixException {
-        return DataSourceType.RECORDS;
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration) throws Exception {
+    public void configure(Map<String, String> configuration) {
         this.configuration = configuration;
         // is old format?
         String parserConfig = configuration.get(KEY_OLD_FORMAT);
-        if (parserConfig != null && parserConfig.equalsIgnoreCase(ExternalDataConstants.TRUE)) {
+        if ((parserConfig != null) && parserConfig.equalsIgnoreCase(ExternalDataConstants.TRUE)) {
             oldFormat = true;
         }
-        parserConfig = configuration.get(ExternalDataConstants.KEY_READER);
-        if (parserConfig != null && parserConfig.equalsIgnoreCase(ExternalDataConstants.READER_LINE_SEPARATED)) {
+        parserConfig = configuration.get(ExternalDataConstants.KEY_FORMAT);
+        if ((parserConfig != null) && parserConfig.equalsIgnoreCase(ExternalDataConstants.FORMAT_LINE_SEPARATED)) {
             oldFormat = true;
         }
         if (!oldFormat) {
@@ -77,6 +76,30 @@ public class ClassAdParserFactory implements IRecordDataParserFactory<char[]> {
             configuration.put(ExternalDataConstants.KEY_RECORD_END, "]");
         }
 
+        parserConfig = configuration.get(ClassAdParserFactory.KEY_EVALUATE);
+        if ((parserConfig != null) && parserConfig.equalsIgnoreCase("false")) {
+            evaluateExpr = false;
+            keepBoth = false;
+        }
+        parserConfig = configuration.get(ClassAdParserFactory.KEY_KEEP_EXPR);
+        if ((parserConfig != null) && parserConfig.equalsIgnoreCase("false")) {
+            keepBoth = false;
+            evaluateExpr = true;
+        }
+
+        parserConfig = configuration.get(ClassAdParserFactory.KEY_EXPR_PREFIX);
+        if ((parserConfig != null) && (parserConfig.trim().length() > 0)) {
+            exprPrefix = parserConfig;
+        }
+        parserConfig = configuration.get(ClassAdParserFactory.KEY_EXPR_SUFFIX);
+        if ((parserConfig != null) && (parserConfig.trim().length() > 0)) {
+            exprSuffix = parserConfig;
+        }
+
+        parserConfig = configuration.get(ClassAdParserFactory.KEY_EXPR_NAME_SUFFIX);
+        if ((parserConfig != null) && (parserConfig.trim().length() > 0)) {
+            exprFieldNameSuffix = parserConfig;
+        }
     }
 
     @Override
@@ -85,11 +108,9 @@ public class ClassAdParserFactory implements IRecordDataParserFactory<char[]> {
     }
 
     @Override
-    public IRecordDataParser<char[]> createRecordParser(IHyracksTaskContext ctx)
-            throws HyracksDataException, AsterixException, IOException {
-        ClassAdParser parser = new ClassAdParser(recordType);
-        parser.configure(configuration, recordType);
-        return parser;
+    public IRecordDataParser<char[]> createRecordParser(IHyracksTaskContext ctx) throws HyracksDataException {
+        return new ClassAdParser(recordType, oldFormat, evaluateExpr, keepBoth, exprPrefix, exprSuffix,
+                exprFieldNameSuffix);
     }
 
     @Override
@@ -97,4 +118,8 @@ public class ClassAdParserFactory implements IRecordDataParserFactory<char[]> {
         return char[].class;
     }
 
+    @Override
+    public void setMetaType(ARecordType metaType) {
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapter.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapter.java b/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapter.java
index 2fc289b..c362969 100644
--- a/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapter.java
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapter.java
@@ -28,15 +28,16 @@ import java.util.concurrent.Executors;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.apache.asterix.external.api.IFeedAdapter;
+import org.apache.asterix.external.dataset.adapter.FeedAdapter;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.IAType;
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.std.file.ITupleParser;
 import org.apache.hyracks.dataflow.std.file.ITupleParserFactory;
 
-public class TestTypedAdapter implements IFeedAdapter {
+public class TestTypedAdapter extends FeedAdapter {
 
     private static final long serialVersionUID = 1L;
 
@@ -56,6 +57,7 @@ public class TestTypedAdapter implements IFeedAdapter {
 
     public TestTypedAdapter(ITupleParserFactory parserFactory, ARecordType sourceDatatype, IHyracksTaskContext ctx,
             Map<String, String> configuration, int partition) throws IOException {
+        super(null);
         pos = new PipedOutputStream();
         pis = new PipedInputStream(pos);
         this.configuration = configuration;
@@ -64,7 +66,7 @@ public class TestTypedAdapter implements IFeedAdapter {
     }
 
     @Override
-    public void start(int partition, IFrameWriter writer) throws Exception {
+    public void start(int partition, IFrameWriter writer) throws HyracksDataException {
         generator = new DummyGenerator(configuration, pos);
         ExecutorService executor = Executors.newSingleThreadExecutor();
         executor.execute(generator);
@@ -137,7 +139,7 @@ public class TestTypedAdapter implements IFeedAdapter {
     }
 
     @Override
-    public boolean stop() throws Exception {
+    public boolean stop() {
         generator.stop();
         return true;
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java b/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
index 6b11d21..9082a72 100644
--- a/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
@@ -18,9 +18,11 @@
  */
 package org.apache.asterix.external.library.adapter;
 
+import java.io.IOException;
 import java.io.InputStream;
 import java.util.Map;
 
+import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.api.IAdapterFactory;
 import org.apache.asterix.external.api.IDataSourceAdapter;
 import org.apache.asterix.external.api.IExternalDataSourceFactory;
@@ -56,29 +58,28 @@ public class TestTypedAdapterFactory implements IAdapterFactory {
     }
 
     @Override
-    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws Exception {
+    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AsterixException {
         clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(clusterLocations, 1);
         return clusterLocations;
     }
 
     @Override
-    public IDataSourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
+    public IDataSourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws HyracksDataException {
         final String nodeId = ctx.getJobletContext().getApplicationContext().getNodeId();
-        ITupleParserFactory tupleParserFactory = new ITupleParserFactory() {
+        final ITupleParserFactory tupleParserFactory = new ITupleParserFactory() {
             private static final long serialVersionUID = 1L;
 
             @Override
-            public ITupleParser createTupleParser(final IHyracksTaskContext ctx) throws HyracksDataException {
+            public ITupleParser createTupleParser(IHyracksTaskContext ctx) throws HyracksDataException {
                 ADMDataParser parser;
                 ITupleForwarder forwarder;
                 ArrayTupleBuilder tb;
                 try {
-                    parser = new ADMDataParser();
+                    parser = new ADMDataParser(outputType, true);
                     forwarder = DataflowUtils.getTupleForwarder(configuration,
                             FeedUtils.getFeedLogManager(ctx, partition,
                                     FeedUtils.splitsForAdapter(ExternalDataUtils.getDataverse(configuration),
                                             ExternalDataUtils.getFeedName(configuration), nodeId, partition)));
-                    forwarder.configure(configuration);
                     tb = new ArrayTupleBuilder(1);
                 } catch (Exception e) {
                     throw new HyracksDataException(e);
@@ -88,7 +89,6 @@ public class TestTypedAdapterFactory implements IAdapterFactory {
                     @Override
                     public void parse(InputStream in, IFrameWriter writer) throws HyracksDataException {
                         try {
-                            parser.configure(configuration, outputType);
                             parser.setInputStream(in);
                             forwarder.initialize(ctx, writer);
                             while (true) {
@@ -107,7 +107,11 @@ public class TestTypedAdapterFactory implements IAdapterFactory {
                 };
             }
         };
-        return new TestTypedAdapter(tupleParserFactory, outputType, ctx, configuration, partition);
+        try {
+            return new TestTypedAdapter(tupleParserFactory, outputType, ctx, configuration, partition);
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
     }
 
     @Override
@@ -116,9 +120,8 @@ public class TestTypedAdapterFactory implements IAdapterFactory {
     }
 
     @Override
-    public void configure(Map<String, String> configuration, ARecordType outputType) throws Exception {
+    public void configure(Map<String, String> configuration, ARecordType outputType, ARecordType metaType) {
         this.configuration = configuration;
         this.outputType = outputType;
     }
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ADMDataParserTest.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ADMDataParserTest.java b/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ADMDataParserTest.java
index 4303442..6f36eab 100644
--- a/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ADMDataParserTest.java
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ADMDataParserTest.java
@@ -21,6 +21,7 @@ package org.apache.asterix.external.parser.test;
 import java.io.ByteArrayOutputStream;
 import java.io.DataOutput;
 import java.io.DataOutputStream;
+import java.io.IOException;
 import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.asterix.external.parser.ADMDataParser;
@@ -35,7 +36,7 @@ import junit.extensions.PA;
 public class ADMDataParserTest {
 
     @Test
-    public void test() {
+    public void test() throws IOException {
         String[] dates = { "-9537-08-04", "9656-06-03", "-9537-04-04", "9656-06-04", "-9537-10-04", "9626-09-05" };
         AMutableDate[] parsedDates = new AMutableDate[] { new AMutableDate(-4202630), new AMutableDate(2807408),
                 new AMutableDate(-4202752), new AMutableDate(2807409), new AMutableDate(-4202569),
@@ -58,7 +59,7 @@ public class ADMDataParserTest {
         AtomicInteger errorCount = new AtomicInteger(0);
         for (int i = 0; i < threads.length; ++i) {
             threads[i] = new Thread(new Runnable() {
-                ADMDataParser parser = new ADMDataParser();
+                ADMDataParser parser = new ADMDataParser(null, true);
                 ByteArrayOutputStream bos = new ByteArrayOutputStream();
                 DataOutput dos = new DataOutputStream(bos);
 
@@ -69,16 +70,16 @@ public class ADMDataParserTest {
                         while (round++ < 10000) {
                             // Test parseDate.
                             for (int index = 0; index < dates.length; ++index) {
-                                PA.invokeMethod(parser, "parseDate(java.lang.String, java.io.DataOutput)",
-                                        dates[index], dos);
+                                PA.invokeMethod(parser, "parseDate(java.lang.String, java.io.DataOutput)", dates[index],
+                                        dos);
                                 AMutableDate aDate = (AMutableDate) PA.getValue(parser, "aDate");
                                 Assert.assertTrue(aDate.equals(parsedDates[index]));
                             }
 
                             // Tests parseTime.
                             for (int index = 0; index < times.length; ++index) {
-                                PA.invokeMethod(parser, "parseTime(java.lang.String, java.io.DataOutput)",
-                                        times[index], dos);
+                                PA.invokeMethod(parser, "parseTime(java.lang.String, java.io.DataOutput)", times[index],
+                                        dos);
                                 AMutableTime aTime = (AMutableTime) PA.getValue(parser, "aTime");
                                 Assert.assertTrue(aTime.equals(parsedTimes[index]));
                             }
@@ -112,5 +113,4 @@ public class ADMDataParserTest {
         // Asserts no failure.
         Assert.assertTrue(errorCount.get() == 0);
     }
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/RecordWithMetaTest.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/RecordWithMetaTest.java b/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/RecordWithMetaTest.java
new file mode 100644
index 0000000..521cee0
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/RecordWithMetaTest.java
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.parser.test;
+
+import java.io.File;
+import java.io.PrintStream;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.asterix.external.api.IRawRecord;
+import org.apache.asterix.external.input.record.converter.CSVToRecordWithMetadataAndPKConverter;
+import org.apache.asterix.external.input.record.reader.stream.QuotedLineRecordReader;
+import org.apache.asterix.external.input.stream.LocalFileSystemInputStream;
+import org.apache.asterix.external.parser.ADMDataParser;
+import org.apache.asterix.external.parser.RecordWithMetadataParser;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.formats.nontagged.AqlADMPrinterFactoryProvider;
+import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.BuiltinType;
+import org.apache.asterix.om.types.IAType;
+import org.apache.commons.io.FileUtils;
+import org.apache.hyracks.algebricks.data.IPrinter;
+import org.apache.hyracks.algebricks.data.IPrinterFactory;
+import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class RecordWithMetaTest {
+    private static ARecordType recordType;
+
+    @SuppressWarnings({ "unchecked", "rawtypes" })
+    @Test
+    public void runTest() throws Exception {
+        File file = new File("target/beer.adm");
+        File expected = new File(getClass().getResource("/results/beer.txt").toURI().getPath());
+        try {
+            FileUtils.deleteQuietly(file);
+            PrintStream printStream = new PrintStream(Files.newOutputStream(Paths.get(file.toURI())));
+            // create key type
+            IAType[] keyTypes = { BuiltinType.ASTRING };
+            String keyName = "id";
+            List<String> keyNameAsList = new ArrayList<>(1);
+            keyNameAsList.add(keyName);
+            // create record type
+            String[] recordFieldNames = {};
+            IAType[] recordFieldTypes = {};
+            recordType = new ARecordType("value", recordFieldNames, recordFieldTypes, true);
+            // create the meta type
+            String[] metaFieldNames = { keyName, "flags", "expiration", "cas", "rev", "vbid", "dtype" };
+            IAType[] metaFieldTypes = { BuiltinType.ASTRING, BuiltinType.AINT32, BuiltinType.AINT64, BuiltinType.AINT64,
+                    BuiltinType.AINT32, BuiltinType.AINT32, BuiltinType.AINT32 };
+            ARecordType metaType = new ARecordType("meta", metaFieldNames, metaFieldTypes, true);
+            int valueIndex = 4;
+            char delimiter = ',';
+            int numOfTupleFields = 3;
+            int[] pkIndexes = { 0 };
+            int[] pkIndicators = { 1 };
+
+            // create input stream
+            LocalFileSystemInputStream inputStream = new LocalFileSystemInputStream(
+                    Paths.get(getClass().getResource("/beer.csv").toURI()).toAbsolutePath(), null, false);
+
+            // create reader record reader
+            QuotedLineRecordReader lineReader = new QuotedLineRecordReader(true, inputStream, null,
+                    ExternalDataConstants.DEFAULT_QUOTE);
+            // create csv with json record reader
+            CSVToRecordWithMetadataAndPKConverter recordConverter = new CSVToRecordWithMetadataAndPKConverter(
+                    valueIndex, delimiter, metaType, recordType, pkIndicators, pkIndexes, keyTypes);
+            // create the value parser <ADM in this case>
+            ADMDataParser valueParser = new ADMDataParser(recordType, false);
+            // create parser.
+            RecordWithMetadataParser parser = new RecordWithMetadataParser(metaType, valueParser, recordConverter);
+
+            // create serializer deserializer and printer factories
+            ISerializerDeserializer[] serdes = new ISerializerDeserializer[keyTypes.length + 2];
+            IPrinterFactory[] printerFactories = new IPrinterFactory[keyTypes.length + 2];
+            for (int i = 0; i < keyTypes.length; i++) {
+                serdes[i + 2] = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(keyTypes[i]);
+                printerFactories[i + 2] = AqlADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(keyTypes[i]);
+            }
+            serdes[0] = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(recordType);
+            serdes[1] = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(metaType);
+            printerFactories[0] = AqlADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(recordType);
+            printerFactories[1] = AqlADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(metaType);
+            // create output descriptor 
+            IPrinter[] printers = new IPrinter[printerFactories.length];
+
+            for (int i = 0; i < printerFactories.length; i++) {
+                printers[i] = printerFactories[i].createPrinter();
+            }
+
+            ArrayTupleBuilder tb = new ArrayTupleBuilder(numOfTupleFields);
+            while (lineReader.hasNext()) {
+                IRawRecord<char[]> record = lineReader.next();
+                tb.reset();
+                parser.parse(record, tb.getDataOutput());
+                tb.addFieldEndOffset();
+                parser.parseMeta(tb.getDataOutput());
+                tb.addFieldEndOffset();
+                parser.appendPK(tb);
+                //print tuple
+                printTuple(tb, printers, printStream);
+
+            }
+            lineReader.close();
+            printStream.close();
+            Assert.assertTrue(FileUtils.contentEquals(file, expected));
+        } catch (Throwable th) {
+            System.err.println("TEST FAILED");
+            th.printStackTrace();
+            throw th;
+        } finally {
+            FileUtils.deleteQuietly(file);
+        }
+        System.err.println("TEST PASSED.");
+    }
+
+    private void printTuple(ArrayTupleBuilder tb, IPrinter[] printers, PrintStream printStream)
+            throws HyracksDataException {
+        int[] offsets = tb.getFieldEndOffsets();
+        for (int i = 0; i < printers.length; i++) {
+            int offset = i == 0 ? 0 : offsets[i - 1];
+            int length = i == 0 ? offsets[0] : offsets[i] - offsets[i - 1];
+            printers[i].print(tb.getByteArray(), offset, length, printStream);
+            printStream.println();
+        }
+    }
+}


[10/19] incubator-asterixdb git commit: Support Change Feeds and Ingestion of Records with MetaData

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/IRecordToRecordWithMetadataAndPKConverter.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/IRecordToRecordWithMetadataAndPKConverter.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/IRecordToRecordWithMetadataAndPKConverter.java
new file mode 100644
index 0000000..50fb3cf
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/IRecordToRecordWithMetadataAndPKConverter.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.converter;
+
+import org.apache.asterix.external.api.IRecordConverter;
+import org.apache.asterix.external.input.record.RecordWithMetadataAndPK;
+
+public interface IRecordToRecordWithMetadataAndPKConverter<T, O>
+        extends IRecordConverter<T, RecordWithMetadataAndPK<O>> {
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/EmptyRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/EmptyRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/EmptyRecordReader.java
deleted file mode 100644
index e742b1e..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/EmptyRecordReader.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.input.record.reader;
-
-import java.io.IOException;
-
-import org.apache.hadoop.mapred.RecordReader;
-
-public class EmptyRecordReader<K, V> implements RecordReader<K, V> {
-
-    @Override
-    public boolean next(K key, V value) throws IOException {
-        return false;
-    }
-
-    @Override
-    public K createKey() {
-        return null;
-    }
-
-    @Override
-    public V createValue() {
-        return null;
-    }
-
-    @Override
-    public long getPos() throws IOException {
-        return 0;
-    }
-
-    @Override
-    public void close() throws IOException {
-    }
-
-    @Override
-    public float getProgress() throws IOException {
-        return 0;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/RecordWithPKTestReaderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/RecordWithPKTestReaderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/RecordWithPKTestReaderFactory.java
new file mode 100644
index 0000000..87c187a
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/RecordWithPKTestReaderFactory.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader;
+
+import java.util.Map;
+
+import org.apache.asterix.external.api.IExternalDataSourceFactory;
+import org.apache.asterix.external.api.IRecordReader;
+import org.apache.asterix.external.api.IRecordReaderFactory;
+import org.apache.asterix.external.input.record.RecordWithPK;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+
+public class RecordWithPKTestReaderFactory implements IRecordReaderFactory<RecordWithPK<char[]>> {
+
+    private static final long serialVersionUID = 1L;
+    private transient AlgebricksAbsolutePartitionConstraint clusterLocations;
+
+    @Override
+    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() {
+        clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(clusterLocations, 1);
+        return clusterLocations;
+    }
+
+    @Override
+    public void configure(final Map<String, String> configuration) {
+    }
+
+    @Override
+    public IRecordReader<? extends RecordWithPK<char[]>> createRecordReader(final IHyracksTaskContext ctx,
+            final int partition) {
+        return new TestAsterixMembersReader();
+    }
+
+    @Override
+    public Class<?> getRecordClass() {
+        return RecordWithPK.class;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/TestAsterixMembersReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/TestAsterixMembersReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/TestAsterixMembersReader.java
new file mode 100644
index 0000000..916904f
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/TestAsterixMembersReader.java
@@ -0,0 +1,88 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader;
+
+import java.io.IOException;
+
+import org.apache.asterix.external.api.IDataFlowController;
+import org.apache.asterix.external.api.IRawRecord;
+import org.apache.asterix.external.api.IRecordReader;
+import org.apache.asterix.external.input.record.CharArrayRecord;
+import org.apache.asterix.external.input.record.GenericRecord;
+import org.apache.asterix.external.input.record.RecordWithPK;
+import org.apache.asterix.external.util.FeedLogManager;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
+
+public class TestAsterixMembersReader implements IRecordReader<RecordWithPK<char[]>> {
+
+    private final CharArrayRecord rawRecord;
+    private final GenericRecord<RecordWithPK<char[]>> record;
+    private final ArrayBackedValueStorage[] pkFieldValueBuffers;
+    private int counter = 0;
+    private final int numOfRecords = 10;
+    private final StringBuilder builder = new StringBuilder();
+    private static final String[] names = { "Abdullah", "Michael", "Till", "Yingyi", "Ildar", "Taewoo", "Young-Seok",
+            "Murtadha", "Ian", "Steven" };
+
+    public TestAsterixMembersReader() {
+        rawRecord = new CharArrayRecord();
+        pkFieldValueBuffers = new ArrayBackedValueStorage[1];
+        pkFieldValueBuffers[0] = new ArrayBackedValueStorage();
+        record = new GenericRecord<RecordWithPK<char[]>>(new RecordWithPK<char[]>(rawRecord, pkFieldValueBuffers));
+    }
+
+    @Override
+    public void close() throws IOException {
+    }
+
+    @Override
+    public boolean hasNext() throws Exception {
+        return counter < numOfRecords;
+    }
+
+    @Override
+    public IRawRecord<RecordWithPK<char[]>> next() throws IOException, InterruptedException {
+        if (counter < numOfRecords) {
+            record.get().reset();
+            builder.setLength(0);
+            builder.append("{\"id\":" + counter + ",\"name\":\"" + names[counter % names.length] + "\"}");
+            rawRecord.set(builder);
+            rawRecord.endRecord();
+            pkFieldValueBuffers[0].getDataOutput().writeByte(ATypeTag.SERIALIZED_INT64_TYPE_TAG);
+            pkFieldValueBuffers[0].getDataOutput().writeLong(counter);
+            counter++;
+            return record;
+        }
+        return null;
+    }
+
+    @Override
+    public boolean stop() {
+        return false;
+    }
+
+    @Override
+    public void setController(final IDataFlowController controller) {
+    }
+
+    @Override
+    public void setFeedLogManager(final FeedLogManager feedLogManager) {
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/couchbase/CouchbaseReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/couchbase/CouchbaseReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/couchbase/CouchbaseReader.java
deleted file mode 100644
index fe59aad..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/couchbase/CouchbaseReader.java
+++ /dev/null
@@ -1,265 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.input.record.reader.couchbase;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.nio.CharBuffer;
-import java.nio.charset.CharsetDecoder;
-import java.nio.charset.StandardCharsets;
-import java.util.Map;
-import java.util.concurrent.ArrayBlockingQueue;
-
-import org.apache.asterix.external.api.IDataFlowController;
-import org.apache.asterix.external.api.IRawRecord;
-import org.apache.asterix.external.api.IRecordReader;
-import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
-import org.apache.asterix.external.input.record.CharArrayRecord;
-import org.apache.asterix.external.input.record.GenericRecord;
-import org.apache.asterix.external.input.record.RecordWithMetadata;
-import org.apache.asterix.external.util.ExternalDataConstants;
-import org.apache.asterix.external.util.FeedLogManager;
-import org.apache.asterix.om.types.BuiltinType;
-import org.apache.asterix.om.types.IAType;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.log4j.Logger;
-
-import com.couchbase.client.core.CouchbaseCore;
-import com.couchbase.client.core.dcp.BucketStreamAggregator;
-import com.couchbase.client.core.dcp.BucketStreamAggregatorState;
-import com.couchbase.client.core.dcp.BucketStreamState;
-import com.couchbase.client.core.dcp.BucketStreamStateUpdatedEvent;
-import com.couchbase.client.core.env.DefaultCoreEnvironment;
-import com.couchbase.client.core.env.DefaultCoreEnvironment.Builder;
-import com.couchbase.client.core.message.cluster.CloseBucketRequest;
-import com.couchbase.client.core.message.cluster.OpenBucketRequest;
-import com.couchbase.client.core.message.cluster.SeedNodesRequest;
-import com.couchbase.client.core.message.dcp.DCPRequest;
-import com.couchbase.client.core.message.dcp.MutationMessage;
-import com.couchbase.client.core.message.dcp.RemoveMessage;
-import com.couchbase.client.core.message.dcp.SnapshotMarkerMessage;
-import com.couchbase.client.deps.io.netty.buffer.ByteBuf;
-
-import rx.functions.Action1;
-
-public class CouchbaseReader implements IRecordReader<RecordWithMetadata<char[]>> {
-
-    private static final MutationMessage POISON_PILL = new MutationMessage((short) 0, null, null, 0, 0L, 0L, 0, 0, 0L,
-            null);
-    private final String feedName;
-    private final short[] vbuckets;
-    private final String bucket;
-    private final String password;
-    private final String[] couchbaseNodes;
-    private AbstractFeedDataFlowController controller;
-    private Builder builder;
-    private BucketStreamAggregator bucketStreamAggregator;
-    private CouchbaseCore core;
-    private DefaultCoreEnvironment env;
-    private Thread pushThread;
-    private ArrayBlockingQueue<MutationMessage> messages;
-    private GenericRecord<RecordWithMetadata<char[]>> record;
-    private RecordWithMetadata<char[]> recordWithMetadata;
-    private boolean done = false;
-    private CharArrayRecord value;
-    private CharsetDecoder decoder = StandardCharsets.UTF_8.newDecoder();
-    private ByteBuffer bytes = ByteBuffer.allocateDirect(ExternalDataConstants.DEFAULT_BUFFER_SIZE);
-    private CharBuffer chars = CharBuffer.allocate(ExternalDataConstants.DEFAULT_BUFFER_SIZE);
-    // metaTypes = {key(string), bucket(string), vbucket(int32), seq(long), cas(long),
-    // creationTime(long),expiration(int32),flags(int32),revSeqNumber(long),lockTime(int32)}
-    private static final IAType[] metaTypes = new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING,
-            BuiltinType.AINT32, BuiltinType.AINT64, BuiltinType.AINT64, BuiltinType.AINT64, BuiltinType.AINT32,
-            BuiltinType.AINT32, BuiltinType.AINT64, BuiltinType.AINT32 };
-    private static final Logger LOGGER = Logger.getLogger(CouchbaseReader.class);
-
-    public CouchbaseReader(String feedName, String bucket, String password, String[] couchbaseNodes, short[] vbuckets,
-            int queueSize) throws HyracksDataException {
-        this.feedName = feedName;
-        this.bucket = bucket;
-        this.password = password;
-        this.couchbaseNodes = couchbaseNodes;
-        this.vbuckets = vbuckets;
-        this.recordWithMetadata = new RecordWithMetadata<char[]>(metaTypes, char[].class);
-        this.messages = new ArrayBlockingQueue<MutationMessage>(queueSize);
-        this.value = new CharArrayRecord();
-        recordWithMetadata.setRecord(value);
-        this.record = new GenericRecord<RecordWithMetadata<char[]>>(recordWithMetadata);
-    }
-
-    @Override
-    public void close() {
-        if (!done) {
-            done = true;
-        }
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration) throws Exception {
-        this.builder = DefaultCoreEnvironment.builder().dcpEnabled(CouchbaseReaderFactory.DCP_ENABLED)
-                .autoreleaseAfter(CouchbaseReaderFactory.AUTO_RELEASE_AFTER_MILLISECONDS);
-        this.env = builder.build();
-        this.core = new CouchbaseCore(env);
-        this.bucketStreamAggregator = new BucketStreamAggregator(feedName, core, bucket);
-        connect();
-    }
-
-    private void connect() {
-        core.send(new SeedNodesRequest(couchbaseNodes))
-                .timeout(CouchbaseReaderFactory.TIMEOUT, CouchbaseReaderFactory.TIME_UNIT).toBlocking().single();
-        core.send(new OpenBucketRequest(bucket, password))
-                .timeout(CouchbaseReaderFactory.TIMEOUT, CouchbaseReaderFactory.TIME_UNIT).toBlocking().single();
-        this.pushThread = new Thread(new Runnable() {
-            @Override
-            public void run() {
-                CouchbaseReader.this.run(bucketStreamAggregator);
-            }
-        }, feedName);
-        pushThread.start();
-    }
-
-    private void run(BucketStreamAggregator bucketStreamAggregator) {
-        BucketStreamAggregatorState state = new BucketStreamAggregatorState();
-        for (int i = 0; i < vbuckets.length; i++) {
-            state.put(new BucketStreamState(vbuckets[i], 0, 0, 0xffffffff, 0, 0xffffffff));
-        }
-        state.updates().subscribe(new Action1<BucketStreamStateUpdatedEvent>() {
-            @Override
-            public void call(BucketStreamStateUpdatedEvent event) {
-                if (event.partialUpdate()) {
-                } else {
-                }
-            }
-        });
-        try {
-            bucketStreamAggregator.feed(state).toBlocking().forEach(new Action1<DCPRequest>() {
-                @Override
-                public void call(final DCPRequest dcpRequest) {
-                    try {
-                        if (dcpRequest instanceof SnapshotMarkerMessage) {
-                            SnapshotMarkerMessage message = (SnapshotMarkerMessage) dcpRequest;
-                            final BucketStreamState oldState = state.get(message.partition());
-                            state.put(new BucketStreamState(message.partition(), oldState.vbucketUUID(),
-                                    message.endSequenceNumber(), oldState.endSequenceNumber(),
-                                    message.endSequenceNumber(), oldState.snapshotEndSequenceNumber()));
-                        } else if (dcpRequest instanceof MutationMessage) {
-
-                            messages.put((MutationMessage) dcpRequest);
-                        } else if (dcpRequest instanceof RemoveMessage) {
-                            RemoveMessage message = (RemoveMessage) dcpRequest;
-                            LOGGER.info(message.key() + " was deleted.");
-                        }
-                    } catch (Throwable th) {
-                        LOGGER.error(th);
-                    }
-                }
-            });
-        } catch (Throwable th) {
-            if (th.getCause() instanceof InterruptedException) {
-                LOGGER.warn("dcp thread was interrupted", th);
-                synchronized (this) {
-                    CouchbaseReader.this.close();
-                    notifyAll();
-                }
-            }
-            throw th;
-        }
-    }
-
-    @Override
-    public boolean hasNext() throws Exception {
-        return !done;
-    }
-
-    @Override
-    public IRawRecord<RecordWithMetadata<char[]>> next() throws IOException, InterruptedException {
-        if (messages.isEmpty()) {
-            controller.flush();
-        }
-        MutationMessage message = messages.take();
-        if (message == POISON_PILL) {
-            return null;
-        }
-        String key = message.key();
-        int vbucket = message.partition();
-        long seq = message.bySequenceNumber();
-        String bucket = message.bucket();
-        long cas = message.cas();
-        long creationTime = message.creationTime();
-        int expiration = message.expiration();
-        int flags = message.flags();
-        long revSeqNumber = message.revisionSequenceNumber();
-        int lockTime = message.lockTime();
-        recordWithMetadata.reset();
-        recordWithMetadata.setMetadata(0, key);
-        recordWithMetadata.setMetadata(1, bucket);
-        recordWithMetadata.setMetadata(2, vbucket);
-        recordWithMetadata.setMetadata(3, seq);
-        recordWithMetadata.setMetadata(4, cas);
-        recordWithMetadata.setMetadata(5, creationTime);
-        recordWithMetadata.setMetadata(6, expiration);
-        recordWithMetadata.setMetadata(7, flags);
-        recordWithMetadata.setMetadata(8, revSeqNumber);
-        recordWithMetadata.setMetadata(9, lockTime);
-        CouchbaseReader.set(message.content(), decoder, bytes, chars, value);
-        return record;
-    }
-
-    @Override
-    public boolean stop() {
-        done = true;
-        core.send(new CloseBucketRequest(bucket)).toBlocking();
-        try {
-            messages.put(CouchbaseReader.POISON_PILL);
-        } catch (InterruptedException e) {
-            LOGGER.warn(e);
-            return false;
-        }
-        return true;
-    }
-
-    @Override
-    public void setController(IDataFlowController controller) {
-        this.controller = (AbstractFeedDataFlowController) controller;
-    }
-
-    public static void set(ByteBuf content, CharsetDecoder decoder, ByteBuffer bytes, CharBuffer chars,
-            CharArrayRecord record) {
-        int position = content.readerIndex();
-        int limit = content.writerIndex();
-        int contentSize = content.capacity();
-        while (position < limit) {
-            bytes.clear();
-            chars.clear();
-            if (contentSize - position < bytes.capacity()) {
-                bytes.limit(contentSize - position);
-            }
-            content.getBytes(position, bytes);
-            position += bytes.position();
-            bytes.flip();
-            decoder.decode(bytes, chars, false);
-            chars.flip();
-            record.append(chars);
-        }
-        record.endRecord();
-    }
-
-    @Override
-    public void setFeedLogManager(FeedLogManager feedLogManager) {
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/couchbase/CouchbaseReaderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/couchbase/CouchbaseReaderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/couchbase/CouchbaseReaderFactory.java
deleted file mode 100644
index b715a26..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/couchbase/CouchbaseReaderFactory.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.input.record.reader.couchbase;
-
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.external.api.IRecordReader;
-import org.apache.asterix.external.api.IRecordReaderFactory;
-import org.apache.asterix.external.input.record.RecordWithMetadata;
-import org.apache.asterix.external.util.ExternalDataConstants;
-import org.apache.asterix.external.util.ExternalDataUtils;
-import org.apache.asterix.om.util.AsterixClusterProperties;
-import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-
-import com.couchbase.client.core.CouchbaseCore;
-import com.couchbase.client.core.config.CouchbaseBucketConfig;
-import com.couchbase.client.core.env.DefaultCoreEnvironment;
-import com.couchbase.client.core.env.DefaultCoreEnvironment.Builder;
-import com.couchbase.client.core.message.cluster.CloseBucketRequest;
-import com.couchbase.client.core.message.cluster.GetClusterConfigRequest;
-import com.couchbase.client.core.message.cluster.GetClusterConfigResponse;
-import com.couchbase.client.core.message.cluster.OpenBucketRequest;
-import com.couchbase.client.core.message.cluster.SeedNodesRequest;
-
-import rx.functions.Func1;
-
-public class CouchbaseReaderFactory implements IRecordReaderFactory<RecordWithMetadata<char[]>> {
-
-    private static final long serialVersionUID = 1L;
-    // Constant fields
-    public static final boolean DCP_ENABLED = true;
-    public static final long AUTO_RELEASE_AFTER_MILLISECONDS = 5000L;
-    public static final int TIMEOUT = 5;
-    public static final TimeUnit TIME_UNIT = TimeUnit.SECONDS;
-    // Dynamic fields
-    private Map<String, String> configuration;
-    private String bucket;
-    private String password = "";
-    private String[] couchbaseNodes;
-    private int numOfVBuckets;
-    private int[] schedule;
-    private String feedName;
-    // Transient fields
-    private transient CouchbaseCore core;
-    private transient Builder builder;
-    private transient DefaultCoreEnvironment env;
-
-    @Override
-    public DataSourceType getDataSourceType() {
-        return DataSourceType.RECORDS;
-    }
-
-    @Override
-    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws Exception {
-        return AsterixClusterProperties.INSTANCE.getClusterLocations();
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration) throws Exception {
-        // validate first
-        if (!configuration.containsKey(ExternalDataConstants.KEY_BUCKET)) {
-            throw new AsterixException("Unspecified bucket");
-        }
-        if (!configuration.containsKey(ExternalDataConstants.KEY_NODES)) {
-            throw new AsterixException("Unspecified Couchbase nodes");
-        }
-        if (configuration.containsKey(ExternalDataConstants.KEY_PASSWORD)) {
-            password = configuration.get(ExternalDataConstants.KEY_PASSWORD);
-        }
-        this.configuration = configuration;
-        bucket = configuration.get(ExternalDataConstants.KEY_BUCKET);
-        couchbaseNodes = configuration.get(ExternalDataConstants.KEY_NODES).split(",");
-        feedName = configuration.get(ExternalDataConstants.KEY_FEED_NAME);
-        builder = DefaultCoreEnvironment.builder().dcpEnabled(DCP_ENABLED)
-                .autoreleaseAfter(AUTO_RELEASE_AFTER_MILLISECONDS);
-        env = builder.build();
-        core = new CouchbaseCore(env);
-        getNumberOfVbuckets();
-        schedule();
-    }
-
-    /*
-     * We distribute the work of streaming vbuckets between all the partitions in a round robin
-     * fashion.
-     */
-    private void schedule() {
-        schedule = new int[numOfVBuckets];
-        String[] locations = AsterixClusterProperties.INSTANCE.getClusterLocations().getLocations();
-        for (int i = 0; i < numOfVBuckets; i++) {
-            schedule[i] = i % locations.length;
-        }
-    }
-
-    private void getNumberOfVbuckets() {
-        core.send(new SeedNodesRequest(couchbaseNodes)).timeout(TIMEOUT, TIME_UNIT).toBlocking().single();
-        core.send(new OpenBucketRequest(bucket, password)).timeout(TIMEOUT, TIME_UNIT).toBlocking().single();
-        numOfVBuckets = core.<GetClusterConfigResponse> send(new GetClusterConfigRequest())
-                .map(new Func1<GetClusterConfigResponse, Integer>() {
-                    @Override
-                    public Integer call(GetClusterConfigResponse response) {
-                        CouchbaseBucketConfig config = (CouchbaseBucketConfig) response.config().bucketConfig(bucket);
-                        return config.numberOfPartitions();
-                    }
-                }).timeout(TIMEOUT, TIME_UNIT).toBlocking().single();
-        core.send(new CloseBucketRequest(bucket)).toBlocking();
-    }
-
-    @Override
-    public IRecordReader<? extends RecordWithMetadata<char[]>> createRecordReader(IHyracksTaskContext ctx,
-            int partition) throws Exception {
-        String nodeName = ctx.getJobletContext().getApplicationContext().getNodeId();
-        ArrayList<Short> listOfAssignedVBuckets = new ArrayList<Short>();
-        for (int i = 0; i < schedule.length; i++) {
-            if (schedule[i] == partition) {
-                listOfAssignedVBuckets.add((short) i);
-            }
-        }
-        short[] vbuckets = new short[listOfAssignedVBuckets.size()];
-        for (int i = 0; i < vbuckets.length; i++) {
-            vbuckets[i] = listOfAssignedVBuckets.get(i);
-        }
-        CouchbaseReader reader = new CouchbaseReader(feedName + ":" + nodeName + ":" + partition, bucket, password,
-                couchbaseNodes, vbuckets, ExternalDataUtils.getQueueSize(configuration));
-        reader.configure(configuration);
-        return reader;
-    }
-
-    @SuppressWarnings("unchecked")
-    @Override
-    public Class<? extends RecordWithMetadata<char[]>> getRecordClass() {
-        RecordWithMetadata<char[]> record = new RecordWithMetadata<char[]>(char[].class);
-        return (Class<? extends RecordWithMetadata<char[]>>) record.getClass();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/EmptyRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/EmptyRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/EmptyRecordReader.java
new file mode 100644
index 0000000..00e5e71
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/EmptyRecordReader.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader.hdfs;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapred.RecordReader;
+
+public class EmptyRecordReader<K, V> implements RecordReader<K, V> {
+
+    @Override
+    public boolean next(K key, V value) throws IOException {
+        return false;
+    }
+
+    @Override
+    public K createKey() {
+        return null;
+    }
+
+    @Override
+    public V createValue() {
+        return null;
+    }
+
+    @Override
+    public long getPos() throws IOException {
+        return 0;
+    }
+
+    @Override
+    public void close() throws IOException {
+    }
+
+    @Override
+    public float getProgress() throws IOException {
+        return 0;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSLookupReaderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSLookupReaderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSLookupReaderFactory.java
index 22488f7..bfcacd8 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSLookupReaderFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSLookupReaderFactory.java
@@ -18,6 +18,7 @@
  */
 package org.apache.asterix.external.input.record.reader.hdfs;
 
+import java.io.IOException;
 import java.util.Map;
 
 import org.apache.asterix.common.exceptions.AsterixException;
@@ -30,14 +31,15 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.hdfs.dataflow.ConfFactory;
 
 public class HDFSLookupReaderFactory<T> implements ILookupReaderFactory<T> {
 
-    protected static final long serialVersionUID = 1L;
-    protected transient AlgebricksAbsolutePartitionConstraint clusterLocations;
+    private static final long serialVersionUID = 1L;
     protected ConfFactory confFactory;
     protected Map<String, String> configuration;
+    protected transient AlgebricksAbsolutePartitionConstraint clusterLocations;
 
     public HDFSLookupReaderFactory() {
     }
@@ -48,16 +50,20 @@ public class HDFSLookupReaderFactory<T> implements ILookupReaderFactory<T> {
     }
 
     @Override
-    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws Exception {
+    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AsterixException {
         clusterLocations = HDFSUtils.getPartitionConstraints(clusterLocations);
         return clusterLocations;
     }
 
     @Override
-    public void configure(Map<String, String> configuration) throws Exception {
+    public void configure(Map<String, String> configuration) throws AsterixException {
         this.configuration = configuration;
         JobConf conf = HDFSUtils.configureHDFSJobConf(configuration);
-        confFactory = new ConfFactory(conf);
+        try {
+            confFactory = new ConfFactory(conf);
+        } catch (HyracksDataException e) {
+            throw new AsterixException(e);
+        }
 
     }
 
@@ -69,10 +75,15 @@ public class HDFSLookupReaderFactory<T> implements ILookupReaderFactory<T> {
     @SuppressWarnings("unchecked")
     @Override
     public ILookupRecordReader<? extends T> createRecordReader(IHyracksTaskContext ctx, int partition,
-            ExternalFileIndexAccessor snapshotAccessor) throws Exception {
+            ExternalFileIndexAccessor snapshotAccessor) throws HyracksDataException {
         String inputFormatParameter = configuration.get(ExternalDataConstants.KEY_INPUT_FORMAT).trim();
         JobConf conf = confFactory.getConf();
-        FileSystem fs = FileSystem.get(conf);
+        FileSystem fs;
+        try {
+            fs = FileSystem.get(conf);
+        } catch (IOException e) {
+            throw new HyracksDataException("Unable to get filesystem object", e);
+        }
         switch (inputFormatParameter) {
             case ExternalDataConstants.INPUT_FORMAT_TEXT:
                 return (ILookupRecordReader<? extends T>) new TextLookupReader(snapshotAccessor, fs, conf);
@@ -81,7 +92,7 @@ public class HDFSLookupReaderFactory<T> implements ILookupReaderFactory<T> {
             case ExternalDataConstants.INPUT_FORMAT_RC:
                 return (ILookupRecordReader<? extends T>) new RCLookupReader(snapshotAccessor, fs, conf);
             default:
-                throw new AsterixException("Unrecognised input format: " + inputFormatParameter);
+                throw new HyracksDataException("Unrecognised input format: " + inputFormatParameter);
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSRecordReader.java
index b162a02..5ed6dc5 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSRecordReader.java
@@ -20,7 +20,6 @@ package org.apache.asterix.external.input.record.reader.hdfs;
 
 import java.io.IOException;
 import java.util.List;
-import java.util.Map;
 
 import org.apache.asterix.external.api.IDataFlowController;
 import org.apache.asterix.external.api.IExternalIndexer;
@@ -29,7 +28,6 @@ import org.apache.asterix.external.api.IRawRecord;
 import org.apache.asterix.external.api.IRecordReader;
 import org.apache.asterix.external.indexing.ExternalFile;
 import org.apache.asterix.external.input.record.GenericRecord;
-import org.apache.asterix.external.input.record.reader.EmptyRecordReader;
 import org.apache.asterix.external.util.FeedLogManager;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -57,12 +55,12 @@ public class HDFSRecordReader<K, V extends Writable> implements IRecordReader<Wr
     protected JobConf conf;
     protected GenericRecord<Writable> record;
     // Indexing variables
-    protected IExternalIndexer indexer;
-    protected List<ExternalFile> snapshot;
-    protected FileSystem hdfs;
+    protected final IExternalIndexer indexer;
+    protected final List<ExternalFile> snapshot;
+    protected final FileSystem hdfs;
 
     public HDFSRecordReader(boolean read[], InputSplit[] inputSplits, String[] readSchedule, String nodeName,
-            JobConf conf) {
+            JobConf conf, List<ExternalFile> snapshot, IExternalIndexer indexer) throws IOException {
         this.read = read;
         this.inputSplits = inputSplits;
         this.readSchedule = readSchedule;
@@ -70,6 +68,11 @@ public class HDFSRecordReader<K, V extends Writable> implements IRecordReader<Wr
         this.conf = conf;
         this.inputFormat = conf.getInputFormat();
         this.reader = new EmptyRecordReader<K, Writable>();
+        this.record = new GenericRecord<Writable>();
+        this.indexer = indexer;
+        this.snapshot = snapshot;
+        this.hdfs = FileSystem.get(conf);
+        nextInputSplit();
     }
 
     @Override
@@ -78,12 +81,6 @@ public class HDFSRecordReader<K, V extends Writable> implements IRecordReader<Wr
     }
 
     @Override
-    public void configure(Map<String, String> configuration) throws Exception {
-        record = new GenericRecord<Writable>();
-        nextInputSplit();
-    }
-
-    @Override
     public boolean hasNext() throws Exception {
         if (reader.next(key, value)) {
             return true;
@@ -163,20 +160,10 @@ public class HDFSRecordReader<K, V extends Writable> implements IRecordReader<Wr
         return indexer;
     }
 
-    @Override
-    public void setIndexer(IExternalIndexer indexer) {
-        this.indexer = indexer;
-    }
-
     public List<ExternalFile> getSnapshot() {
         return snapshot;
     }
 
-    public void setSnapshot(List<ExternalFile> snapshot) throws IOException {
-        this.snapshot = snapshot;
-        hdfs = FileSystem.get(conf);
-    }
-
     public int getCurrentSplitIndex() {
         return currentSplitIndex;
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/kv/KVReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/kv/KVReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/kv/KVReader.java
new file mode 100644
index 0000000..4e41357
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/kv/KVReader.java
@@ -0,0 +1,193 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader.kv;
+
+import java.io.IOException;
+import java.util.concurrent.ArrayBlockingQueue;
+
+import org.apache.asterix.external.api.IDataFlowController;
+import org.apache.asterix.external.api.IRawRecord;
+import org.apache.asterix.external.api.IRecordReader;
+import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
+import org.apache.asterix.external.input.record.GenericRecord;
+import org.apache.asterix.external.util.FeedLogManager;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.log4j.Logger;
+
+import com.couchbase.client.core.CouchbaseCore;
+import com.couchbase.client.core.dcp.BucketStreamAggregator;
+import com.couchbase.client.core.dcp.BucketStreamAggregatorState;
+import com.couchbase.client.core.dcp.BucketStreamState;
+import com.couchbase.client.core.dcp.BucketStreamStateUpdatedEvent;
+import com.couchbase.client.core.env.DefaultCoreEnvironment;
+import com.couchbase.client.core.env.DefaultCoreEnvironment.Builder;
+import com.couchbase.client.core.message.cluster.CloseBucketRequest;
+import com.couchbase.client.core.message.cluster.OpenBucketRequest;
+import com.couchbase.client.core.message.cluster.SeedNodesRequest;
+import com.couchbase.client.core.message.dcp.DCPRequest;
+import com.couchbase.client.core.message.dcp.MutationMessage;
+import com.couchbase.client.core.message.dcp.RemoveMessage;
+import com.couchbase.client.core.message.dcp.SnapshotMarkerMessage;
+
+import rx.functions.Action1;
+
+public class KVReader implements IRecordReader<DCPRequest> {
+
+    private static final Logger LOGGER = Logger.getLogger(KVReader.class);
+    private static final MutationMessage POISON_PILL = new MutationMessage((short) 0, null, null, 0, 0L, 0L, 0, 0, 0L,
+            null);
+    private final String feedName;
+    private final short[] vbuckets;
+    private final String bucket;
+    private final String password;
+    private final String[] sourceNodes;
+    private final Builder builder;
+    private final BucketStreamAggregator bucketStreamAggregator;
+    private final CouchbaseCore core;
+    private final DefaultCoreEnvironment env;
+    private final GenericRecord<DCPRequest> record;
+    private final ArrayBlockingQueue<DCPRequest> messages;
+    private AbstractFeedDataFlowController controller;
+    private Thread pushThread;
+    private boolean done = false;
+
+    public KVReader(String feedName, String bucket, String password, String[] sourceNodes, short[] vbuckets,
+            int queueSize) throws HyracksDataException {
+        this.feedName = feedName;
+        this.bucket = bucket;
+        this.password = password;
+        this.sourceNodes = sourceNodes;
+        this.vbuckets = vbuckets;
+        this.messages = new ArrayBlockingQueue<DCPRequest>(queueSize);
+        this.builder = DefaultCoreEnvironment.builder().dcpEnabled(KVReaderFactory.DCP_ENABLED)
+                .autoreleaseAfter(KVReaderFactory.AUTO_RELEASE_AFTER_MILLISECONDS);
+        this.env = builder.build();
+        this.core = new CouchbaseCore(env);
+        this.bucketStreamAggregator = new BucketStreamAggregator(feedName, core, bucket);
+        this.record = new GenericRecord<>();
+        connect();
+    }
+
+    @Override
+    public void close() {
+        if (!done) {
+            done = true;
+        }
+    }
+
+    private void connect() {
+        core.send(new SeedNodesRequest(sourceNodes))
+                .timeout(KVReaderFactory.TIMEOUT, KVReaderFactory.TIME_UNIT).toBlocking().single();
+        core.send(new OpenBucketRequest(bucket, password))
+                .timeout(KVReaderFactory.TIMEOUT, KVReaderFactory.TIME_UNIT).toBlocking().single();
+        this.pushThread = new Thread(new Runnable() {
+            @Override
+            public void run() {
+                KVReader.this.run(bucketStreamAggregator);
+            }
+        }, feedName);
+        pushThread.start();
+    }
+
+    private void run(BucketStreamAggregator bucketStreamAggregator) {
+        BucketStreamAggregatorState state = new BucketStreamAggregatorState();
+        for (int i = 0; i < vbuckets.length; i++) {
+            state.put(new BucketStreamState(vbuckets[i], 0, 0, 0xffffffff, 0, 0xffffffff));
+        }
+        state.updates().subscribe(new Action1<BucketStreamStateUpdatedEvent>() {
+            @Override
+            public void call(BucketStreamStateUpdatedEvent event) {
+                if (event.partialUpdate()) {
+                } else {
+                }
+            }
+        });
+        try {
+            bucketStreamAggregator.feed(state).toBlocking().forEach(new Action1<DCPRequest>() {
+                @Override
+                public void call(DCPRequest dcpRequest) {
+                    try {
+                        if (dcpRequest instanceof SnapshotMarkerMessage) {
+                            SnapshotMarkerMessage message = (SnapshotMarkerMessage) dcpRequest;
+                            BucketStreamState oldState = state.get(message.partition());
+                            state.put(new BucketStreamState(message.partition(), oldState.vbucketUUID(),
+                                    message.endSequenceNumber(), oldState.endSequenceNumber(),
+                                    message.endSequenceNumber(), oldState.snapshotEndSequenceNumber()));
+                        } else if ((dcpRequest instanceof MutationMessage) || (dcpRequest instanceof RemoveMessage)) {
+                            messages.put(dcpRequest);
+                        } else {
+                            LOGGER.warn("Unknown type of DCP messages: " + dcpRequest);
+                        }
+                    } catch (Throwable th) {
+                        LOGGER.error(th);
+                    }
+                }
+            });
+        } catch (Throwable th) {
+            if (th.getCause() instanceof InterruptedException) {
+                LOGGER.warn("dcp thread was interrupted", th);
+                synchronized (this) {
+                    KVReader.this.close();
+                    notifyAll();
+                }
+            }
+            throw th;
+        }
+    }
+
+    @Override
+    public boolean hasNext() throws Exception {
+        return !done;
+    }
+
+    @Override
+    public IRawRecord<DCPRequest> next() throws IOException, InterruptedException {
+        if (messages.isEmpty()) {
+            controller.flush();
+        }
+        DCPRequest dcpRequest = messages.take();
+        if (dcpRequest == POISON_PILL) {
+            return null;
+        }
+        record.set(dcpRequest);
+        return record;
+    }
+
+    @Override
+    public boolean stop() {
+        done = true;
+        core.send(new CloseBucketRequest(bucket)).toBlocking();
+        try {
+            messages.put(KVReader.POISON_PILL);
+        } catch (InterruptedException e) {
+            LOGGER.warn(e);
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public void setController(IDataFlowController controller) {
+        this.controller = (AbstractFeedDataFlowController) controller;
+    }
+
+    @Override
+    public void setFeedLogManager(FeedLogManager feedLogManager) {
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/kv/KVReaderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/kv/KVReaderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/kv/KVReaderFactory.java
new file mode 100644
index 0000000..bc2a980
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/kv/KVReaderFactory.java
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader.kv;
+
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.api.IRecordReader;
+import org.apache.asterix.external.api.IRecordReaderFactory;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.external.util.ExternalDataUtils;
+import org.apache.asterix.om.util.AsterixClusterProperties;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+import com.couchbase.client.core.CouchbaseCore;
+import com.couchbase.client.core.config.CouchbaseBucketConfig;
+import com.couchbase.client.core.env.DefaultCoreEnvironment;
+import com.couchbase.client.core.env.DefaultCoreEnvironment.Builder;
+import com.couchbase.client.core.message.cluster.CloseBucketRequest;
+import com.couchbase.client.core.message.cluster.GetClusterConfigRequest;
+import com.couchbase.client.core.message.cluster.GetClusterConfigResponse;
+import com.couchbase.client.core.message.cluster.OpenBucketRequest;
+import com.couchbase.client.core.message.cluster.SeedNodesRequest;
+import com.couchbase.client.core.message.dcp.DCPRequest;
+
+import rx.functions.Func1;
+
+public class KVReaderFactory implements IRecordReaderFactory<DCPRequest> {
+
+    private static final long serialVersionUID = 1L;
+    // Constant fields
+    public static final boolean DCP_ENABLED = true;
+    public static final long AUTO_RELEASE_AFTER_MILLISECONDS = 5000L;
+    public static final int TIMEOUT = 5;
+    public static final TimeUnit TIME_UNIT = TimeUnit.SECONDS;
+    // Dynamic fields
+    private Map<String, String> configuration;
+    private String bucket;
+    private String password = "";
+    private String[] couchbaseNodes;
+    private int numOfVBuckets;
+    private int[] schedule;
+    private String feedName;
+    // Transient fields
+    private transient CouchbaseCore core;
+    private transient Builder builder;
+    private transient DefaultCoreEnvironment env;
+
+    @Override
+    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() {
+        return AsterixClusterProperties.INSTANCE.getClusterLocations();
+    }
+
+    @Override
+    public void configure(Map<String, String> configuration) throws AsterixException {
+        // validate first
+        if (!configuration.containsKey(ExternalDataConstants.KEY_BUCKET)) {
+            throw new AsterixException("Unspecified bucket");
+        }
+        if (!configuration.containsKey(ExternalDataConstants.KEY_NODES)) {
+            throw new AsterixException("Unspecified Couchbase nodes");
+        }
+        if (configuration.containsKey(ExternalDataConstants.KEY_PASSWORD)) {
+            password = configuration.get(ExternalDataConstants.KEY_PASSWORD);
+        }
+        this.configuration = configuration;
+        ExternalDataUtils.setNumberOfKeys(configuration, 1);
+        ExternalDataUtils.setChangeFeed(configuration, ExternalDataConstants.TRUE);
+        ExternalDataUtils.setRecordWithMeta(configuration, ExternalDataConstants.TRUE);
+        bucket = configuration.get(ExternalDataConstants.KEY_BUCKET);
+        couchbaseNodes = configuration.get(ExternalDataConstants.KEY_NODES).split(",");
+        feedName = configuration.get(ExternalDataConstants.KEY_FEED_NAME);
+        builder = DefaultCoreEnvironment.builder().dcpEnabled(DCP_ENABLED)
+                .autoreleaseAfter(AUTO_RELEASE_AFTER_MILLISECONDS);
+        env = builder.build();
+        core = new CouchbaseCore(env);
+        getNumberOfVbuckets();
+        schedule();
+    }
+
+    /*
+     * We distribute the work of streaming vbuckets between all the partitions in a round robin
+     * fashion.
+     */
+    private void schedule() {
+        schedule = new int[numOfVBuckets];
+        String[] locations = AsterixClusterProperties.INSTANCE.getClusterLocations().getLocations();
+        for (int i = 0; i < numOfVBuckets; i++) {
+            schedule[i] = i % locations.length;
+        }
+    }
+
+    private void getNumberOfVbuckets() {
+        core.send(new SeedNodesRequest(couchbaseNodes)).timeout(TIMEOUT, TIME_UNIT).toBlocking().single();
+        core.send(new OpenBucketRequest(bucket, password)).timeout(TIMEOUT, TIME_UNIT).toBlocking().single();
+        numOfVBuckets = core.<GetClusterConfigResponse> send(new GetClusterConfigRequest())
+                .map(new Func1<GetClusterConfigResponse, Integer>() {
+                    @Override
+                    public Integer call(GetClusterConfigResponse response) {
+                        CouchbaseBucketConfig config = (CouchbaseBucketConfig) response.config().bucketConfig(bucket);
+                        return config.numberOfPartitions();
+                    }
+                }).timeout(TIMEOUT, TIME_UNIT).toBlocking().single();
+        core.send(new CloseBucketRequest(bucket)).toBlocking();
+    }
+
+    @Override
+    public IRecordReader<? extends DCPRequest> createRecordReader(IHyracksTaskContext ctx, int partition)
+            throws HyracksDataException {
+        String nodeName = ctx.getJobletContext().getApplicationContext().getNodeId();
+        ArrayList<Short> listOfAssignedVBuckets = new ArrayList<Short>();
+        for (int i = 0; i < schedule.length; i++) {
+            if (schedule[i] == partition) {
+                listOfAssignedVBuckets.add((short) i);
+            }
+        }
+        short[] vbuckets = new short[listOfAssignedVBuckets.size()];
+        for (int i = 0; i < vbuckets.length; i++) {
+            vbuckets[i] = listOfAssignedVBuckets.get(i);
+        }
+        return new KVReader(feedName + ":" + nodeName + ":" + partition, bucket, password, couchbaseNodes,
+                vbuckets, ExternalDataUtils.getQueueSize(configuration));
+    }
+
+    @Override
+    public Class<?> getRecordClass() {
+        return DCPRequest.class;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/kv/KVTestReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/kv/KVTestReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/kv/KVTestReader.java
new file mode 100644
index 0000000..9e797e3
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/kv/KVTestReader.java
@@ -0,0 +1,178 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader.kv;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+
+import org.apache.asterix.external.api.IDataFlowController;
+import org.apache.asterix.external.api.IRawRecord;
+import org.apache.asterix.external.api.IRecordReader;
+import org.apache.asterix.external.input.record.GenericRecord;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.external.util.FeedLogManager;
+import org.apache.log4j.Logger;
+
+import com.couchbase.client.core.message.dcp.DCPRequest;
+import com.couchbase.client.core.message.dcp.MutationMessage;
+import com.couchbase.client.core.message.dcp.RemoveMessage;
+import com.couchbase.client.deps.io.netty.buffer.ByteBuf;
+import com.couchbase.client.deps.io.netty.buffer.ByteBufAllocator;
+
+public class KVTestReader implements IRecordReader<DCPRequest> {
+
+    private final GenericRecord<DCPRequest> record;
+    private static final Logger LOGGER = Logger.getLogger(KVTestReader.class);
+    // Test variables
+    private final String bucket;
+    private final ArrayList<Short> assigned;
+    private final int numberOfMutations;
+    private int counter = 0;
+    private boolean stopped = false;
+    // for deterministic data generation
+    private int expiration = 7999;
+    private long seq = 16L;
+    private int lockTime = 158;
+    private long cas = 0L;
+    private int deleteCycle;
+    private int upsertCycle;
+    private String nextDeleteKey;
+    private short nextDeletePartition;
+    private String nextUpsertKey;
+    private short nextUpsertPartition;
+    private final ByteBuf byteBuff;
+    private final StringBuilder strBuilder = new StringBuilder();
+    private int upsertCounter = 0;
+    private final String[] names = { "Michael Carey", "Till Westmann", "Michael Blow", "Chris Hillary", "Yingyi Bu",
+            "Ian Maxon", "Abdullah Alamoudi" };
+
+    public KVTestReader(final int partition, final String bucket, final int[] schedule,
+            final int numberOfMutations, final int deleteCycle, final int upsertCycle) {
+        this.bucket = bucket;
+        this.numberOfMutations = numberOfMutations;
+        this.assigned = new ArrayList<>();
+        this.deleteCycle = deleteCycle;
+        this.upsertCycle = upsertCycle;
+        if ((deleteCycle < 5) || (upsertCycle < 5)) {
+            this.deleteCycle = 5;
+            this.upsertCycle = 6;
+        }
+        for (int i = 0; i < schedule.length; i++) {
+            if (schedule[i] == partition) {
+                assigned.add((short) i);
+            }
+        }
+        this.byteBuff = ByteBufAllocator.DEFAULT.buffer(ExternalDataConstants.DEFAULT_BUFFER_SIZE);
+        byteBuff.retain();
+        this.record = new GenericRecord<DCPRequest>();
+    }
+
+    private String generateKey() {
+        final short vbucket = assigned.get(counter % assigned.size());
+        final String next = vbucket + "-" + counter;
+        counter++;
+        if ((counter % deleteCycle) == 0) {
+            nextDeleteKey = next;
+            nextDeletePartition = vbucket;
+        }
+        if ((counter % upsertCycle) == 3) {
+            nextUpsertKey = next;
+            nextUpsertPartition = vbucket;
+        }
+        return next;
+    }
+
+    @Override
+    public void close() throws IOException {
+        stop();
+    }
+
+    @Override
+    public boolean hasNext() throws Exception {
+        return !stopped;
+    }
+
+    @Override
+    public IRawRecord<DCPRequest> next() throws IOException, InterruptedException {
+        if (stopped) {
+            return null;
+        }
+        try {
+            final DCPRequest dcpRequest = generateNextDCPMessage();
+            record.set(dcpRequest);
+            if (counter >= numberOfMutations) {
+                stop();
+            }
+        } catch (final Throwable th) {
+            LOGGER.error(th.getMessage(), th);
+        }
+        return record;
+    }
+
+    private DCPRequest generateNextDCPMessage() {
+        if ((counter % deleteCycle) == (deleteCycle - 1)) {
+            if (nextDeleteKey != null) {
+                final String key = nextDeleteKey;
+                nextDeleteKey = null;
+                return new RemoveMessage(nextDeletePartition, key, cas++, seq++, 0L, bucket);
+            }
+        }
+        generateNextDocument();
+        if ((counter % upsertCycle) == (upsertCycle - 1)) {
+            if (nextUpsertKey != null) {
+                final String key = nextUpsertKey;
+                nextUpsertKey = null;
+                upsertCounter++;
+                return new MutationMessage(nextUpsertPartition, key, byteBuff, expiration++, seq++, 0, 0, lockTime++,
+                        cas++, bucket);
+            }
+        }
+        return new MutationMessage(assigned.get(counter % assigned.size()), generateKey(), byteBuff, expiration++,
+                seq++, 0, 0, lockTime++, cas++, bucket);
+    }
+
+    private void generateNextDocument() {
+        // reset the string
+        strBuilder.setLength(0);
+        strBuilder.append("{\"id\":" + (counter + upsertCounter) + ",\"name\":\""
+                + names[(counter + upsertCounter) % names.length] + "\",\"exp\":" + ((counter + upsertCounter) * 3)
+                + "}");
+        byteBuff.clear();
+        byteBuff.writeBytes(strBuilder.toString().getBytes(StandardCharsets.UTF_8));
+    }
+
+    @Override
+    public boolean stop() {
+        if (!stopped) {
+            stopped = true;
+            byteBuff.release();
+        }
+        return stopped;
+    }
+
+    @Override
+    public void setController(final IDataFlowController controller) {
+    }
+
+    @Override
+    public void setFeedLogManager(final FeedLogManager feedLogManager) {
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/kv/KVTestReaderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/kv/KVTestReaderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/kv/KVTestReaderFactory.java
new file mode 100644
index 0000000..8242554
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/kv/KVTestReaderFactory.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader.kv;
+
+import java.util.Map;
+
+import org.apache.asterix.external.api.IRecordReader;
+import org.apache.asterix.external.api.IRecordReaderFactory;
+import org.apache.asterix.om.util.AsterixClusterProperties;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+
+import com.couchbase.client.core.message.dcp.DCPRequest;
+
+public class KVTestReaderFactory implements IRecordReaderFactory<DCPRequest> {
+
+    private static final long serialVersionUID = 1L;
+    private final String bucket = "TestBucket";
+    private final int numOfVBuckets = 1024;
+    private final int[] schedule = new int[numOfVBuckets];
+    private int numOfRecords = 1000; // default = 1 Million
+    private int deleteCycle = 0;
+    private int upsertCycle = 0;
+    private transient AlgebricksAbsolutePartitionConstraint clusterLocations;
+
+    @Override
+    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() {
+        clusterLocations = AsterixClusterProperties.INSTANCE.getClusterLocations();
+        return clusterLocations;
+    }
+
+    @Override
+    public void configure(final Map<String, String> configuration) {
+        if (configuration.containsKey("num-of-records")) {
+            numOfRecords = Integer.parseInt(configuration.get("num-of-records"));
+        }
+        final int numOfReaders = getPartitionConstraint().getLocations().length;
+        for (int i = 0; i < numOfVBuckets; i++) {
+            schedule[i] = i % numOfReaders;
+        }
+
+        if (configuration.containsKey("delete-cycle")) {
+            deleteCycle = Integer.parseInt(configuration.get("delete-cycle"));
+        }
+
+        if (configuration.containsKey("upsert-cycle")) {
+            upsertCycle = Integer.parseInt(configuration.get("upsert-cycle"));
+        }
+    }
+
+    @Override
+    public IRecordReader<? extends DCPRequest> createRecordReader(final IHyracksTaskContext ctx, final int partition) {
+        return new KVTestReader(partition, bucket, schedule,
+                (int) Math.ceil((double) numOfRecords / (double) getPartitionConstraint().getLocations().length),
+                deleteCycle, upsertCycle);
+    }
+
+    @Override
+    public Class<?> getRecordClass() {
+        return DCPRequest.class;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReader.java
index 1af8695..a78f780 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReader.java
@@ -23,7 +23,6 @@ import java.net.MalformedURLException;
 import java.net.URL;
 import java.util.LinkedList;
 import java.util.List;
-import java.util.Map;
 import java.util.Queue;
 
 import org.apache.asterix.external.api.IDataFlowController;
@@ -74,10 +73,6 @@ public class RSSRecordReader implements IRecordReader<SyndEntryImpl> {
     }
 
     @Override
-    public void configure(Map<String, String> configurations) throws Exception {
-    }
-
-    @Override
     public boolean hasNext() throws Exception {
         return !done;
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReaderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReaderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReaderFactory.java
index beceea8..f9eedd1 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReaderFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReaderFactory.java
@@ -18,6 +18,7 @@
  */
 package org.apache.asterix.external.input.record.reader.rss;
 
+import java.net.MalformedURLException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
@@ -28,14 +29,14 @@ import org.apache.asterix.external.api.IRecordReaderFactory;
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 import com.sun.syndication.feed.synd.SyndEntryImpl;
 
 public class RSSRecordReaderFactory implements IRecordReaderFactory<SyndEntryImpl> {
 
     private static final long serialVersionUID = 1L;
-    private Map<String, String> configuration;
-    private List<String> urls = new ArrayList<String>();
+    private final List<String> urls = new ArrayList<String>();
     private transient AlgebricksAbsolutePartitionConstraint clusterLocations;
 
     @Override
@@ -44,15 +45,14 @@ public class RSSRecordReaderFactory implements IRecordReaderFactory<SyndEntryImp
     }
 
     @Override
-    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws Exception {
+    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() {
         int count = urls.size();
         clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(clusterLocations, count);
         return clusterLocations;
     }
 
     @Override
-    public void configure(Map<String, String> configuration) throws Exception {
-        this.configuration = configuration;
+    public void configure(Map<String, String> configuration) {
         String url = configuration.get(ExternalDataConstants.KEY_RSS_URL);
         if (url == null) {
             throw new IllegalArgumentException("no RSS URL provided");
@@ -75,10 +75,12 @@ public class RSSRecordReaderFactory implements IRecordReaderFactory<SyndEntryImp
 
     @Override
     public IRecordReader<? extends SyndEntryImpl> createRecordReader(IHyracksTaskContext ctx, int partition)
-            throws Exception {
-        RSSRecordReader reader = new RSSRecordReader(urls.get(partition));
-        reader.configure(configuration);
-        return reader;
+            throws HyracksDataException {
+        try {
+            return new RSSRecordReader(urls.get(partition));
+        } catch (MalformedURLException e) {
+            throw new HyracksDataException(e);
+        }
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/AbstractStreamRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/AbstractStreamRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/AbstractStreamRecordReader.java
index 2d6d8ea..6a5d776 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/AbstractStreamRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/AbstractStreamRecordReader.java
@@ -19,7 +19,6 @@
 package org.apache.asterix.external.input.record.reader.stream;
 
 import java.io.IOException;
-import java.util.Map;
 
 import org.apache.asterix.external.api.IDataFlowController;
 import org.apache.asterix.external.api.IExternalIndexer;
@@ -34,15 +33,22 @@ import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.external.util.FeedLogManager;
 
 public abstract class AbstractStreamRecordReader implements IRecordReader<char[]>, IIndexingDatasource {
-    protected AInputStreamReader reader;
+    protected final AInputStreamReader reader;
     protected CharArrayRecord record;
     protected char[] inputBuffer;
     protected int bufferLength = 0;
     protected int bufferPosn = 0;
-    protected IExternalIndexer indexer;
+    protected final IExternalIndexer indexer;
     protected boolean done = false;
     protected FeedLogManager feedLogManager;
 
+    public AbstractStreamRecordReader(AInputStream inputStream, IExternalIndexer indexer) {
+        this.reader = new AInputStreamReader(inputStream);
+        this.indexer = indexer;
+        record = new CharArrayRecord();
+        inputBuffer = new char[ExternalDataConstants.DEFAULT_BUFFER_SIZE];
+    }
+
     @Override
     public IRawRecord<char[]> next() throws IOException {
         return record;
@@ -56,27 +62,12 @@ public abstract class AbstractStreamRecordReader implements IRecordReader<char[]
         done = true;
     }
 
-    public void setInputStream(AInputStream inputStream) throws IOException {
-        this.reader = new AInputStreamReader(inputStream);
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration) throws Exception {
-        record = new CharArrayRecord();
-        inputBuffer = new char[ExternalDataConstants.DEFAULT_BUFFER_SIZE];
-    }
-
     @Override
     public IExternalIndexer getIndexer() {
         return indexer;
     }
 
     @Override
-    public void setIndexer(IExternalIndexer indexer) {
-        this.indexer = indexer;
-    }
-
-    @Override
     public boolean stop() {
         try {
             reader.stop();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/AbstractStreamRecordReaderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/AbstractStreamRecordReaderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/AbstractStreamRecordReaderFactory.java
index d02de03..12c0229 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/AbstractStreamRecordReaderFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/AbstractStreamRecordReaderFactory.java
@@ -21,16 +21,19 @@ package org.apache.asterix.external.input.record.reader.stream;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.api.IExternalIndexer;
 import org.apache.asterix.external.api.IIndexibleExternalDataSource;
 import org.apache.asterix.external.api.IIndexingDatasource;
 import org.apache.asterix.external.api.IInputStreamProvider;
 import org.apache.asterix.external.api.IInputStreamProviderFactory;
-import org.apache.asterix.external.api.IRecordReader;
 import org.apache.asterix.external.api.IRecordReaderFactory;
 import org.apache.asterix.external.indexing.ExternalFile;
+import org.apache.asterix.external.input.stream.AInputStream;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.algebricks.common.utils.Pair;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public abstract class AbstractStreamRecordReaderFactory<T>
         implements IRecordReaderFactory<T>, IIndexibleExternalDataSource {
@@ -51,26 +54,23 @@ public abstract class AbstractStreamRecordReaderFactory<T>
     }
 
     @Override
-    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws Exception {
+    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AsterixException {
         return inputStreamFactory.getPartitionConstraint();
     }
 
     @Override
-    public void configure(Map<String, String> configuration) throws Exception {
+    public void configure(Map<String, String> configuration) throws AsterixException {
         this.configuration = configuration;
         inputStreamFactory.configure(configuration);
-        configureStreamReaderFactory(configuration);
     }
 
-    protected abstract void configureStreamReaderFactory(Map<String, String> configuration) throws Exception;
-
     @Override
     public boolean isIndexible() {
         return inputStreamFactory.isIndexible();
     }
 
     @Override
-    public void setSnapshot(List<ExternalFile> files, boolean indexingOp) throws Exception {
+    public void setSnapshot(List<ExternalFile> files, boolean indexingOp) {
         ((IIndexibleExternalDataSource) inputStreamFactory).setSnapshot(files, indexingOp);
     }
 
@@ -82,8 +82,8 @@ public abstract class AbstractStreamRecordReaderFactory<T>
         return false;
     }
 
-    protected IRecordReader<char[]> configureReader(AbstractStreamRecordReader recordReader, IHyracksTaskContext ctx,
-            int partition) throws Exception {
+    protected Pair<AInputStream, IExternalIndexer> getStreamAndIndexer(IHyracksTaskContext ctx, int partition)
+            throws HyracksDataException {
         IInputStreamProvider inputStreamProvider = inputStreamFactory.createInputStreamProvider(ctx, partition);
         IExternalIndexer indexer = null;
         if (inputStreamFactory.isIndexible()) {
@@ -91,9 +91,6 @@ public abstract class AbstractStreamRecordReaderFactory<T>
                 indexer = ((IIndexingDatasource) inputStreamProvider).getIndexer();
             }
         }
-        recordReader.setInputStream(inputStreamProvider.getInputStream());
-        recordReader.setIndexer(indexer);
-        recordReader.configure(configuration);
-        return recordReader;
+        return new Pair<AInputStream, IExternalIndexer>(inputStreamProvider.getInputStream(), indexer);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReader.java
index ad2d90d..fb56062 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReader.java
@@ -20,14 +20,19 @@ package org.apache.asterix.external.input.record.reader.stream;
 
 import java.io.IOException;
 
+import org.apache.asterix.external.api.IExternalIndexer;
+import org.apache.asterix.external.input.stream.AInputStream;
 import org.apache.asterix.external.util.ExternalDataConstants;
 
 public class EmptyLineSeparatedRecordReader extends AbstractStreamRecordReader {
 
+    public EmptyLineSeparatedRecordReader(AInputStream inputStream, IExternalIndexer indexer) {
+        super(inputStream, indexer);
+    }
+
     private boolean prevCharCR;
     private boolean prevCharLF;
     private int newlineLength;
-    private int recordNumber = 0;
     private int readLength;
 
     @Override
@@ -53,7 +58,6 @@ public class EmptyLineSeparatedRecordReader extends AbstractStreamRecordReader {
                 if (bufferLength <= 0) {
                     if (readLength > 0) {
                         record.endRecord();
-                        recordNumber++;
                         return true;
                     }
                     close();
@@ -93,7 +97,6 @@ public class EmptyLineSeparatedRecordReader extends AbstractStreamRecordReader {
                 record.append(inputBuffer, startPosn, readLength);
             }
         } while (newlineLength < 2);
-        recordNumber++;
         record.endRecord();
         return true;
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReaderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReaderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReaderFactory.java
index a1e8f31..75d16c5 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReaderFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReaderFactory.java
@@ -18,28 +18,26 @@
  */
 package org.apache.asterix.external.input.record.reader.stream;
 
-import java.util.Map;
-
+import org.apache.asterix.external.api.IExternalIndexer;
 import org.apache.asterix.external.api.IRecordReader;
+import org.apache.asterix.external.input.stream.AInputStream;
+import org.apache.hyracks.algebricks.common.utils.Pair;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public class EmptyLineSeparatedRecordReaderFactory extends AbstractStreamRecordReaderFactory<char[]> {
 
     private static final long serialVersionUID = 1L;
 
     @Override
-    public IRecordReader<char[]> createRecordReader(IHyracksTaskContext ctx, int partition) throws Exception {
-        EmptyLineSeparatedRecordReader recordReader = new EmptyLineSeparatedRecordReader();
-        return configureReader(recordReader, ctx, partition);
+    public IRecordReader<char[]> createRecordReader(IHyracksTaskContext ctx, int partition)
+            throws HyracksDataException {
+        final Pair<AInputStream, IExternalIndexer> streamAndIndexer = getStreamAndIndexer(ctx, partition);
+        return new EmptyLineSeparatedRecordReader(streamAndIndexer.first, streamAndIndexer.second);
     }
 
     @Override
     public Class<? extends char[]> getRecordClass() {
         return char[].class;
     }
-
-    @Override
-    protected void configureStreamReaderFactory(Map<String, String> configuration) throws Exception {
-    }
-
 }


[14/19] incubator-asterixdb git commit: Support Change Feeds and Ingestion of Records with MetaData

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/results/feeds/change-feed/change-feed.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/change-feed/change-feed.1.adm b/asterix-app/src/test/resources/runtimets/results/feeds/change-feed/change-feed.1.adm
new file mode 100644
index 0000000..95c3f9a
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/change-feed/change-feed.1.adm
@@ -0,0 +1,10 @@
+{ "id": 0, "name": "Abdullah" }
+{ "id": 1, "name": "Michael" }
+{ "id": 2, "name": "Till" }
+{ "id": 3, "name": "Yingyi" }
+{ "id": 4, "name": "Ildar" }
+{ "id": 5, "name": "Taewoo" }
+{ "id": 6, "name": "Young-Seok" }
+{ "id": 7, "name": "Murtadha" }
+{ "id": 8, "name": "Ian" }
+{ "id": 9, "name": "Steven" }
\ No newline at end of file


[09/19] incubator-asterixdb git commit: Support Change Feeds and Ingestion of Records with MetaData

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReader.java
index d61dc5c..d55ac87 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReader.java
@@ -19,34 +19,50 @@
 package org.apache.asterix.external.input.record.reader.stream;
 
 import java.io.IOException;
-import java.util.Map;
 
+import org.apache.asterix.external.api.IExternalIndexer;
+import org.apache.asterix.external.input.stream.AInputStream;
 import org.apache.asterix.external.util.ExternalDataConstants;
-import org.apache.asterix.external.util.ExternalDataUtils;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public class LineRecordReader extends AbstractStreamRecordReader {
 
     protected boolean prevCharCR;
     protected int newlineLength;
     protected int recordNumber = 0;
-    private boolean configured = false;
+
+    public LineRecordReader(final boolean hasHeader, final AInputStream stream, final IExternalIndexer indexer)
+            throws HyracksDataException {
+        super(stream, indexer);
+        try {
+            if (hasHeader) {
+                if (hasNext()) {
+                    next();
+                }
+            }
+        } catch (final IOException e) {
+            throw new HyracksDataException(e);
+        }
+
+    }
 
     @Override
     public boolean hasNext() throws IOException {
         if (done) {
             return false;
         }
-        /* We're reading data from in, but the head of the stream may be
+        /*
+         * We're reading data from in, but the head of the stream may be
          * already buffered in buffer, so we have several cases:
          * 1. No newline characters are in the buffer, so we need to copy
-         *    everything and read another buffer from the stream.
+         *   everything and read another buffer from the stream.
          * 2. An unambiguously terminated line is in buffer, so we just
          *    copy to record.
          * 3. Ambiguously terminated line is in buffer, i.e. buffer ends
-         *    in CR.  In this case we copy everything up to CR to record, but
-         *    we also need to see what follows CR: if it's LF, then we
-         *    need consume LF as well, so next call to readLine will read
-         *    from after that.
+         *    in CR. In this case we copy everything up to CR to record, but
+         * we also need to see what follows CR: if it's LF, then we
+         * need consume LF as well, so next call to readLine will read
+         * from after that.
          * We use a flag prevCharCR to signal if previous character was CR
          * and, if it happens to be at the end of the buffer, delay
          * consuming it until we have a chance to look at the char that
@@ -95,17 +111,4 @@ public class LineRecordReader extends AbstractStreamRecordReader {
         recordNumber++;
         return true;
     }
-
-    @Override
-    public void configure(Map<String, String> configuration) throws Exception {
-        if (!configured) {
-            super.configure(configuration);
-            if (ExternalDataUtils.hasHeader(configuration)) {
-                if (hasNext()) {
-                    next();
-                }
-            }
-        }
-        configured = true;
-    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReaderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReaderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReaderFactory.java
index f0867d3..68f10f6 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReaderFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReaderFactory.java
@@ -18,26 +18,30 @@
  */
 package org.apache.asterix.external.input.record.reader.stream;
 
-import java.util.Map;
-
+import org.apache.asterix.external.api.IExternalIndexer;
 import org.apache.asterix.external.api.IRecordReader;
+import org.apache.asterix.external.input.stream.AInputStream;
 import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.external.util.ExternalDataUtils;
+import org.apache.hyracks.algebricks.common.utils.Pair;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public class LineRecordReaderFactory extends AbstractStreamRecordReaderFactory<char[]> {
 
     private static final long serialVersionUID = 1L;
 
     @Override
-    public IRecordReader<? extends char[]> createRecordReader(IHyracksTaskContext ctx, int partition) throws Exception {
+    public IRecordReader<? extends char[]> createRecordReader(IHyracksTaskContext ctx, int partition)
+            throws HyracksDataException {
         String quoteString = configuration.get(ExternalDataConstants.KEY_QUOTE);
-        LineRecordReader recordReader;
+        boolean hasHeader = ExternalDataUtils.hasHeader(configuration);
+        Pair<AInputStream, IExternalIndexer> streamAndIndexer = getStreamAndIndexer(ctx, partition);
         if (quoteString != null) {
-            recordReader = new QuotedLineRecordReader();
+            return new QuotedLineRecordReader(hasHeader, streamAndIndexer.first, streamAndIndexer.second, quoteString);
         } else {
-            recordReader = new LineRecordReader();
+            return new LineRecordReader(hasHeader, streamAndIndexer.first, streamAndIndexer.second);
         }
-        return configureReader(recordReader, ctx, partition);
     }
 
     @Override
@@ -45,8 +49,4 @@ public class LineRecordReaderFactory extends AbstractStreamRecordReaderFactory<c
         return char[].class;
     }
 
-    @Override
-    protected void configureStreamReaderFactory(Map<String, String> configuration) throws Exception {
-    }
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/QuotedLineRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/QuotedLineRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/QuotedLineRecordReader.java
index a8eb07b..6266aa2 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/QuotedLineRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/QuotedLineRecordReader.java
@@ -19,24 +19,24 @@
 package org.apache.asterix.external.input.record.reader.stream;
 
 import java.io.IOException;
-import java.util.Map;
 
-import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.api.IExternalIndexer;
+import org.apache.asterix.external.input.stream.AInputStream;
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.external.util.ExternalDataExceptionUtils;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public class QuotedLineRecordReader extends LineRecordReader {
 
-    private char quote;
+    private final char quote;
     private boolean prevCharEscape;
     private boolean inQuote;
 
-    @Override
-    public void configure(Map<String, String> configuration) throws Exception {
-        super.configure(configuration);
-        String quoteString = configuration.get(ExternalDataConstants.KEY_QUOTE);
-        if (quoteString == null || quoteString.length() != 1) {
-            throw new AsterixException(ExternalDataExceptionUtils.incorrectParameterMessage(
+    public QuotedLineRecordReader(final boolean hasHeader, final AInputStream stream, final IExternalIndexer indexer,
+            final String quoteString) throws HyracksDataException {
+        super(hasHeader, stream, indexer);
+        if ((quoteString == null) || (quoteString.length() != 1)) {
+            throw new HyracksDataException(ExternalDataExceptionUtils.incorrectParameterMessage(
                     ExternalDataConstants.KEY_QUOTE, ExternalDataConstants.PARAMETER_OF_SIZE_ONE, quoteString));
         }
         this.quote = quoteString.charAt(0);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReader.java
index f41486e..678dd03 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReader.java
@@ -19,9 +19,10 @@
 package org.apache.asterix.external.input.record.reader.stream;
 
 import java.io.IOException;
-import java.util.Map;
 
 import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.api.IExternalIndexer;
+import org.apache.asterix.external.input.stream.AInputStream;
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.external.util.ExternalDataExceptionUtils;
 
@@ -34,15 +35,10 @@ public class SemiStructuredRecordReader extends AbstractStreamRecordReader {
     private char recordEnd;
     private int recordNumber = 0;
 
-    public int getRecordNumber() {
-        return recordNumber;
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration) throws Exception {
-        super.configure(configuration);
-        String recStartString = configuration.get(ExternalDataConstants.KEY_RECORD_START);
-        String recEndString = configuration.get(ExternalDataConstants.KEY_RECORD_END);
+    public SemiStructuredRecordReader(AInputStream stream, IExternalIndexer indexer, String recStartString,
+            String recEndString) throws AsterixException {
+        super(stream, indexer);
+        // set record opening char
         if (recStartString != null) {
             if (recStartString.length() != 1) {
                 throw new AsterixException(
@@ -53,6 +49,7 @@ public class SemiStructuredRecordReader extends AbstractStreamRecordReader {
         } else {
             recordStart = ExternalDataConstants.DEFAULT_RECORD_START;
         }
+        // set record ending char
         if (recEndString != null) {
             if (recEndString.length() != 1) {
                 throw new AsterixException(
@@ -65,6 +62,10 @@ public class SemiStructuredRecordReader extends AbstractStreamRecordReader {
         }
     }
 
+    public int getRecordNumber() {
+        return recordNumber;
+    }
+
     @Override
     public boolean hasNext() throws Exception {
         if (done) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReaderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReaderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReaderFactory.java
index ec8eac9..206ae50 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReaderFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReaderFactory.java
@@ -18,27 +18,34 @@
  */
 package org.apache.asterix.external.input.record.reader.stream;
 
-import java.util.Map;
-
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.api.IExternalIndexer;
 import org.apache.asterix.external.api.IRecordReader;
+import org.apache.asterix.external.input.stream.AInputStream;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.hyracks.algebricks.common.utils.Pair;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public class SemiStructuredRecordReaderFactory extends AbstractStreamRecordReaderFactory<char[]> {
 
     private static final long serialVersionUID = 1L;
 
     @Override
-    public IRecordReader<? extends char[]> createRecordReader(IHyracksTaskContext ctx, int partition) throws Exception {
-        SemiStructuredRecordReader recordReader = new SemiStructuredRecordReader();
-        return configureReader(recordReader, ctx, partition);
+    public IRecordReader<? extends char[]> createRecordReader(IHyracksTaskContext ctx, int partition)
+            throws HyracksDataException {
+        Pair<AInputStream, IExternalIndexer> streamAndIndexer = getStreamAndIndexer(ctx, partition);
+        try {
+            return new SemiStructuredRecordReader(streamAndIndexer.first, streamAndIndexer.second,
+                    configuration.get(ExternalDataConstants.KEY_RECORD_START),
+                    configuration.get(ExternalDataConstants.KEY_RECORD_END));
+        } catch (AsterixException e) {
+            throw new HyracksDataException(e);
+        }
     }
 
     @Override
     public Class<? extends char[]> getRecordClass() {
         return char[].class;
     }
-
-    @Override
-    protected void configureStreamReaderFactory(Map<String, String> configuration) throws Exception {
-    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterPullRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterPullRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterPullRecordReader.java
index 617bc39..be9ce06 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterPullRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterPullRecordReader.java
@@ -20,15 +20,12 @@ package org.apache.asterix.external.input.record.reader.twitter;
 
 import java.io.IOException;
 import java.util.List;
-import java.util.Map;
 
 import org.apache.asterix.external.api.IDataFlowController;
 import org.apache.asterix.external.api.IRawRecord;
 import org.apache.asterix.external.api.IRecordReader;
 import org.apache.asterix.external.input.record.GenericRecord;
 import org.apache.asterix.external.util.FeedLogManager;
-import org.apache.asterix.external.util.TwitterUtil;
-import org.apache.asterix.external.util.TwitterUtil.SearchAPIConstants;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 import twitter4j.Query;
@@ -39,7 +36,6 @@ import twitter4j.TwitterException;
 
 public class TwitterPullRecordReader implements IRecordReader<Status> {
 
-    private String keywords;
     private Query query;
     private Twitter twitter;
     private int requestInterval = 5; // seconds
@@ -48,7 +44,12 @@ public class TwitterPullRecordReader implements IRecordReader<Status> {
     private long lastTweetIdReceived = 0;
     private GenericRecord<Status> record;
 
-    public TwitterPullRecordReader() {
+    public TwitterPullRecordReader(Twitter twitter, String keywords, int requestInterval) {
+        this.twitter = twitter;
+        this.requestInterval = requestInterval;
+        this.query = new Query(keywords);
+        this.query.setCount(100);
+        this.record = new GenericRecord<Status>();
     }
 
     @Override
@@ -56,16 +57,6 @@ public class TwitterPullRecordReader implements IRecordReader<Status> {
     }
 
     @Override
-    public void configure(Map<String, String> configuration) throws Exception {
-        twitter = TwitterUtil.getTwitterService(configuration);
-        keywords = configuration.get(SearchAPIConstants.QUERY);
-        requestInterval = Integer.parseInt(configuration.get(SearchAPIConstants.INTERVAL));
-        query = new Query(keywords);
-        query.setCount(100);
-        record = new GenericRecord<Status>();
-    }
-
-    @Override
     public boolean hasNext() throws Exception {
         return true;
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterPushRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterPushRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterPushRecordReader.java
index 19f156c..64695b5 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterPushRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterPushRecordReader.java
@@ -19,7 +19,6 @@
 package org.apache.asterix.external.input.record.reader.twitter;
 
 import java.io.IOException;
-import java.util.Map;
 import java.util.concurrent.LinkedBlockingQueue;
 
 import org.apache.asterix.external.api.IDataFlowController;
@@ -27,7 +26,6 @@ import org.apache.asterix.external.api.IRawRecord;
 import org.apache.asterix.external.api.IRecordReader;
 import org.apache.asterix.external.input.record.GenericRecord;
 import org.apache.asterix.external.util.FeedLogManager;
-import org.apache.asterix.external.util.TwitterUtil;
 
 import twitter4j.FilterQuery;
 import twitter4j.StallWarning;
@@ -42,6 +40,22 @@ public class TwitterPushRecordReader implements IRecordReader<Status> {
     private GenericRecord<Status> record;
     private boolean closed = false;
 
+    public TwitterPushRecordReader(TwitterStream twitterStream, FilterQuery query) {
+        record = new GenericRecord<Status>();
+        inputQ = new LinkedBlockingQueue<Status>();
+        this.twitterStream = twitterStream;//TwitterUtil.getTwitterStream(configuration);
+        this.twitterStream.addListener(new TweetListener(inputQ));
+        this.twitterStream.filter(query);
+    }
+
+    public TwitterPushRecordReader(TwitterStream twitterStream) {
+        record = new GenericRecord<Status>();
+        inputQ = new LinkedBlockingQueue<Status>();
+        this.twitterStream = twitterStream;//
+        this.twitterStream.addListener(new TweetListener(inputQ));
+        twitterStream.sample();
+    }
+
     @Override
     public void close() throws IOException {
         if (!closed) {
@@ -53,20 +67,6 @@ public class TwitterPushRecordReader implements IRecordReader<Status> {
     }
 
     @Override
-    public void configure(Map<String, String> configuration) throws Exception {
-        record = new GenericRecord<Status>();
-        inputQ = new LinkedBlockingQueue<Status>();
-        twitterStream = TwitterUtil.getTwitterStream(configuration);
-        twitterStream.addListener(new TweetListener(inputQ));
-        FilterQuery query = TwitterUtil.getFilterQuery(configuration);
-        if (query != null) {
-            twitterStream.filter(query);
-        } else {
-            twitterStream.sample();
-        }
-    }
-
-    @Override
     public boolean hasNext() throws Exception {
         return !closed;
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterRecordReaderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterRecordReaderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterRecordReaderFactory.java
index a2a4742..7ca185f 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterRecordReaderFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterRecordReaderFactory.java
@@ -33,7 +33,9 @@ import org.apache.asterix.external.util.TwitterUtil.AuthenticationConstants;
 import org.apache.asterix.external.util.TwitterUtil.SearchAPIConstants;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
+import twitter4j.FilterQuery;
 import twitter4j.Status;
 
 public class TwitterRecordReaderFactory implements IRecordReaderFactory<Status> {
@@ -54,13 +56,13 @@ public class TwitterRecordReaderFactory implements IRecordReaderFactory<Status>
     }
 
     @Override
-    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws Exception {
+    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() {
         clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(clusterLocations, INTAKE_CARDINALITY);
         return clusterLocations;
     }
 
     @Override
-    public void configure(Map<String, String> configuration) throws Exception {
+    public void configure(Map<String, String> configuration) throws AsterixException {
         this.configuration = configuration;
         TwitterUtil.initializeConfigurationWithAuthInfo(configuration);
         if (!validateConfiguration(configuration)) {
@@ -70,7 +72,7 @@ public class TwitterRecordReaderFactory implements IRecordReaderFactory<Status>
             builder.append(AuthenticationConstants.OAUTH_CONSUMER_SECRET + "\n");
             builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN + "\n");
             builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN_SECRET + "\n");
-            throw new Exception(builder.toString());
+            throw new AsterixException(builder.toString());
         }
         if (ExternalDataUtils.isPull(configuration)) {
             pull = true;
@@ -107,15 +109,22 @@ public class TwitterRecordReaderFactory implements IRecordReaderFactory<Status>
     }
 
     @Override
-    public IRecordReader<? extends Status> createRecordReader(IHyracksTaskContext ctx, int partition) throws Exception {
-        IRecordReader<Status> reader;
+    public IRecordReader<? extends Status> createRecordReader(IHyracksTaskContext ctx, int partition)
+            throws HyracksDataException {
         if (pull) {
-            reader = new TwitterPullRecordReader();
+            return new TwitterPullRecordReader(TwitterUtil.getTwitterService(configuration),
+                    configuration.get(SearchAPIConstants.QUERY),
+                    Integer.parseInt(configuration.get(SearchAPIConstants.INTERVAL)));
         } else {
-            reader = new TwitterPushRecordReader();
+            FilterQuery query;
+            try {
+                query = TwitterUtil.getFilterQuery(configuration);
+                return (query == null) ? new TwitterPushRecordReader(TwitterUtil.getTwitterStream(configuration))
+                        : new TwitterPushRecordReader(TwitterUtil.getTwitterStream(configuration), query);
+            } catch (AsterixException e) {
+                throw new HyracksDataException(e);
+            }
         }
-        reader.configure(configuration);
-        return reader;
     }
 
     @Override
@@ -128,7 +137,7 @@ public class TwitterRecordReaderFactory implements IRecordReaderFactory<Status>
         String consumerSecret = configuration.get(AuthenticationConstants.OAUTH_CONSUMER_SECRET);
         String accessToken = configuration.get(AuthenticationConstants.OAUTH_ACCESS_TOKEN);
         String tokenSecret = configuration.get(AuthenticationConstants.OAUTH_ACCESS_TOKEN_SECRET);
-        if (consumerKey == null || consumerSecret == null || accessToken == null || tokenSecret == null) {
+        if ((consumerKey == null) || (consumerSecret == null) || (accessToken == null) || (tokenSecret == null)) {
             return false;
         }
         return true;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStream.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStream.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStream.java
index 469e866..b78f96d 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStream.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStream.java
@@ -18,9 +18,7 @@
  */
 package org.apache.asterix.external.input.stream;
 
-import java.io.IOException;
 import java.io.InputStream;
-import java.util.Map;
 
 import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
 import org.apache.asterix.external.util.FeedLogManager;
@@ -30,8 +28,6 @@ public abstract class AInputStream extends InputStream {
 
     public abstract boolean stop() throws Exception;
 
-    public abstract void configure(Map<String, String> configuration) throws IOException;
-
     // TODO: Find a better way to send notifications
     public abstract void setController(AbstractFeedDataFlowController controller);
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStreamReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStreamReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStreamReader.java
index 89008aa..bf85330 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStreamReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStreamReader.java
@@ -19,7 +19,7 @@
 package org.apache.asterix.external.input.stream;
 
 import java.io.IOException;
-import java.io.InputStreamReader;
+import java.io.Reader;
 import java.nio.ByteBuffer;
 import java.nio.CharBuffer;
 import java.nio.charset.CharsetDecoder;
@@ -29,7 +29,7 @@ import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.external.util.FeedLogManager;
 
-public class AInputStreamReader extends InputStreamReader {
+public class AInputStreamReader extends Reader {
     private AInputStream in;
     private byte[] bytes = new byte[ExternalDataConstants.DEFAULT_BUFFER_SIZE];
     private ByteBuffer byteBuffer = ByteBuffer.wrap(bytes);
@@ -38,7 +38,6 @@ public class AInputStreamReader extends InputStreamReader {
     private boolean done = false;
 
     public AInputStreamReader(AInputStream in) {
-        super(in);
         this.in = in;
         this.decoder = StandardCharsets.UTF_8.newDecoder();
         this.byteBuffer.flip();
@@ -74,22 +73,42 @@ public class AInputStreamReader extends InputStreamReader {
         if (done) {
             return -1;
         }
+        int len = 0;
         charBuffer.clear();
-        if (byteBuffer.hasRemaining()) {
+        while (charBuffer.position() == 0) {
+            if (byteBuffer.hasRemaining()) {
+                decoder.decode(byteBuffer, charBuffer, false);
+                System.arraycopy(charBuffer.array(), 0, cbuf, offset, charBuffer.position());
+                if (charBuffer.position() > 0) {
+                    return charBuffer.position();
+                } else {
+                    // need to read more data
+                    System.arraycopy(bytes, byteBuffer.position(), bytes, 0, byteBuffer.remaining());
+                    byteBuffer.position(byteBuffer.remaining());
+                    while (len == 0) {
+                        len = in.read(bytes, byteBuffer.position(), bytes.length - byteBuffer.position());
+                    }
+                }
+            } else {
+                byteBuffer.clear();
+                while (len == 0) {
+                    len = in.read(bytes, 0, bytes.length);
+                }
+            }
+            if (len == -1) {
+                done = true;
+                return len;
+            }
+            byteBuffer.position(len);
+            byteBuffer.flip();
             decoder.decode(byteBuffer, charBuffer, false);
             System.arraycopy(charBuffer.array(), 0, cbuf, offset, charBuffer.position());
-            return charBuffer.position();
         }
-        int len = in.read(bytes, 0, bytes.length);
-        if (len == -1) {
-            done = true;
-            return len;
-        }
-        byteBuffer.clear();
-        byteBuffer.position(len);
-        byteBuffer.flip();
-        decoder.decode(byteBuffer, charBuffer, false);
-        System.arraycopy(charBuffer.array(), 0, cbuf, offset, charBuffer.position());
         return charBuffer.position();
     }
+
+    @Override
+    public void close() throws IOException {
+        in.close();
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/BasicInputStream.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/BasicInputStream.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/BasicInputStream.java
index 5b654eb..176f5f4 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/BasicInputStream.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/BasicInputStream.java
@@ -20,7 +20,6 @@ package org.apache.asterix.external.input.stream;
 
 import java.io.IOException;
 import java.io.InputStream;
-import java.util.Map;
 
 import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
 import org.apache.asterix.external.util.FeedLogManager;
@@ -89,10 +88,6 @@ public class BasicInputStream extends AInputStream {
     }
 
     @Override
-    public void configure(Map<String, String> configuration) {
-    }
-
-    @Override
     public void setFeedLogManager(FeedLogManager logManager) {
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFileSystemInputStream.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFileSystemInputStream.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFileSystemInputStream.java
index 8dcd5b6..dc6a130 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFileSystemInputStream.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFileSystemInputStream.java
@@ -21,7 +21,6 @@ package org.apache.asterix.external.input.stream;
 import java.io.FileInputStream;
 import java.io.IOException;
 import java.nio.file.Path;
-import java.util.Map;
 
 import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
 import org.apache.asterix.external.util.ExternalDataConstants;
@@ -34,8 +33,10 @@ public class LocalFileSystemInputStream extends AInputStream {
     private FileInputStream in;
     private byte lastByte;
 
-    public LocalFileSystemInputStream(Path inputResource, String expression, boolean isFeed) throws IOException {
+    public LocalFileSystemInputStream(Path inputResource, String expression, boolean isFeed)
+            throws HyracksDataException {
         this.watcher = new FileSystemWatcher(inputResource, expression, isFeed);
+        watcher.init();
     }
 
     @Override
@@ -105,10 +106,10 @@ public class LocalFileSystemInputStream extends AInputStream {
             }
         }
         int result = in.read(b, off, len);
-        while (result < 0 && advance()) {
+        while ((result < 0) && advance()) {
             // return a new line at the end of every file <--Might create problems for some cases
             // depending on the parser implementation-->
-            if (lastByte != ExternalDataConstants.BYTE_LF && lastByte != ExternalDataConstants.BYTE_LF) {
+            if ((lastByte != ExternalDataConstants.BYTE_LF) && (lastByte != ExternalDataConstants.BYTE_LF)) {
                 lastByte = ExternalDataConstants.BYTE_LF;
                 b[off] = ExternalDataConstants.BYTE_LF;
                 return 1;
@@ -117,7 +118,7 @@ public class LocalFileSystemInputStream extends AInputStream {
             result = in.read(b, off, len);
         }
         if (result > 0) {
-            lastByte = b[off + result - 1];
+            lastByte = b[(off + result) - 1];
         }
         return result;
     }
@@ -133,9 +134,4 @@ public class LocalFileSystemInputStream extends AInputStream {
         watcher.close();
         return true;
     }
-
-    @Override
-    public void configure(Map<String, String> configuration) throws IOException {
-        watcher.init();
-    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/SocketInputStream.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/SocketInputStream.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/SocketInputStream.java
deleted file mode 100644
index 67c4493..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/SocketInputStream.java
+++ /dev/null
@@ -1,175 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.input.stream;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.ServerSocket;
-import java.net.Socket;
-import java.util.Map;
-
-import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
-import org.apache.asterix.external.util.ExternalDataExceptionUtils;
-import org.apache.asterix.external.util.FeedLogManager;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-public class SocketInputStream extends AInputStream {
-    private ServerSocket server;
-    private Socket socket;
-    private InputStream connectionStream;
-    private AbstractFeedDataFlowController controller;
-
-    public SocketInputStream(ServerSocket server) throws IOException {
-        this.server = server;
-        socket = new Socket();
-        connectionStream = new InputStream() {
-            @Override
-            public int read() throws IOException {
-                return -1;
-            }
-        };
-    }
-
-    @Override
-    public int read() throws IOException {
-        int read = connectionStream.read();
-        while (read < 0) {
-            accept();
-            read = connectionStream.read();
-        }
-        return read;
-    }
-
-    @Override
-    public boolean skipError() throws Exception {
-        accept();
-        return true;
-    }
-
-    @Override
-    public int read(byte b[]) throws IOException {
-        return read(b, 0, b.length);
-    }
-
-    @Override
-    public int read(byte b[], int off, int len) throws IOException {
-        if (server == null) {
-            return -1;
-        }
-        int read = -1;
-        try {
-            if (connectionStream.available() < 1) {
-                controller.flush();
-            }
-            read = connectionStream.read(b, off, len);
-        } catch (IOException e) {
-            e.printStackTrace();
-            read = -1;
-        }
-        while (read < 0) {
-            if (!accept()) {
-                return -1;
-            }
-            try {
-                read = connectionStream.read(b, off, len);
-            } catch (IOException e) {
-                e.printStackTrace();
-                read = -1;
-            }
-        }
-        return read;
-    }
-
-    @Override
-    public long skip(long n) throws IOException {
-        return 0;
-    }
-
-    @Override
-    public int available() throws IOException {
-        return 1;
-    }
-
-    @Override
-    public synchronized void close() throws IOException {
-        HyracksDataException hde = null;
-        try {
-            if (connectionStream != null) {
-                connectionStream.close();
-            }
-            connectionStream = null;
-        } catch (IOException e) {
-            hde = new HyracksDataException(e);
-        }
-        try {
-            if (socket != null) {
-                socket.close();
-            }
-            socket = null;
-        } catch (IOException e) {
-            hde = ExternalDataExceptionUtils.suppress(hde, e);
-        }
-        try {
-            if (server != null) {
-                server.close();
-            }
-        } catch (IOException e) {
-            hde = ExternalDataExceptionUtils.suppress(hde, e);
-        } finally {
-            server = null;
-        }
-        if (hde != null) {
-            throw hde;
-        }
-    }
-
-    private boolean accept() throws IOException {
-        try {
-            connectionStream.close();
-            connectionStream = null;
-            socket.close();
-            socket = null;
-            socket = server.accept();
-            connectionStream = socket.getInputStream();
-            return true;
-        } catch (Exception e) {
-            close();
-            return false;
-        }
-    }
-
-    @Override
-    public boolean stop() throws Exception {
-        close();
-        return true;
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration) {
-    }
-
-    @Override
-    public void setFeedLogManager(FeedLogManager logManager) {
-    }
-
-    @Override
-    public void setController(AbstractFeedDataFlowController controller) {
-        this.controller = controller;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/SocketServerInputStream.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/SocketServerInputStream.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/SocketServerInputStream.java
new file mode 100644
index 0000000..1c33709
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/SocketServerInputStream.java
@@ -0,0 +1,170 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.stream;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.ServerSocket;
+import java.net.Socket;
+
+import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
+import org.apache.asterix.external.util.ExternalDataExceptionUtils;
+import org.apache.asterix.external.util.FeedLogManager;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class SocketServerInputStream extends AInputStream {
+    private ServerSocket server;
+    private Socket socket;
+    private InputStream connectionStream;
+    private AbstractFeedDataFlowController controller;
+
+    public SocketServerInputStream(ServerSocket server) {
+        this.server = server;
+        socket = new Socket();
+        connectionStream = new InputStream() {
+            @Override
+            public int read() throws IOException {
+                return -1;
+            }
+        };
+    }
+
+    @Override
+    public int read() throws IOException {
+        int read = connectionStream.read();
+        while (read < 0) {
+            accept();
+            read = connectionStream.read();
+        }
+        return read;
+    }
+
+    @Override
+    public boolean skipError() throws Exception {
+        accept();
+        return true;
+    }
+
+    @Override
+    public int read(byte b[]) throws IOException {
+        return read(b, 0, b.length);
+    }
+
+    @Override
+    public int read(byte b[], int off, int len) throws IOException {
+        if (server == null) {
+            return -1;
+        }
+        int read = -1;
+        try {
+            if (connectionStream.available() < 1) {
+                controller.flush();
+            }
+            read = connectionStream.read(b, off, len);
+        } catch (IOException e) {
+            e.printStackTrace();
+            read = -1;
+        }
+        while (read < 0) {
+            if (!accept()) {
+                return -1;
+            }
+            try {
+                read = connectionStream.read(b, off, len);
+            } catch (IOException e) {
+                e.printStackTrace();
+                read = -1;
+            }
+        }
+        return read;
+    }
+
+    @Override
+    public long skip(long n) throws IOException {
+        return 0;
+    }
+
+    @Override
+    public int available() throws IOException {
+        return 1;
+    }
+
+    @Override
+    public synchronized void close() throws IOException {
+        HyracksDataException hde = null;
+        try {
+            if (connectionStream != null) {
+                connectionStream.close();
+            }
+            connectionStream = null;
+        } catch (IOException e) {
+            hde = new HyracksDataException(e);
+        }
+        try {
+            if (socket != null) {
+                socket.close();
+            }
+            socket = null;
+        } catch (IOException e) {
+            hde = ExternalDataExceptionUtils.suppress(hde, e);
+        }
+        try {
+            if (server != null) {
+                server.close();
+            }
+        } catch (IOException e) {
+            hde = ExternalDataExceptionUtils.suppress(hde, e);
+        } finally {
+            server = null;
+        }
+        if (hde != null) {
+            throw hde;
+        }
+    }
+
+    private boolean accept() throws IOException {
+        try {
+            connectionStream.close();
+            connectionStream = null;
+            socket.close();
+            socket = null;
+            socket = server.accept();
+            connectionStream = socket.getInputStream();
+            return true;
+        } catch (Exception e) {
+            close();
+            return false;
+        }
+    }
+
+    @Override
+    public boolean stop() throws Exception {
+        close();
+        return true;
+    }
+
+    @Override
+    public void setFeedLogManager(FeedLogManager logManager) {
+    }
+
+    @Override
+    public void setController(AbstractFeedDataFlowController controller) {
+        this.controller = controller;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamProviderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamProviderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamProviderFactory.java
index 5c1583e..54ee780 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamProviderFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamProviderFactory.java
@@ -47,15 +47,14 @@ public class LocalFSInputStreamProviderFactory implements IInputStreamProviderFa
     protected static INodeResolver nodeResolver;
     protected Map<String, String> configuration;
     protected FileSplit[] inputFileSplits;
-    protected FileSplit[] feedLogFileSplits; // paths where instances of this feed can use as log
-                                             // storage
+    protected FileSplit[] feedLogFileSplits; // paths where instances of this feed can use as log storage
     protected boolean isFeed;
     protected String expression;
     // transient fields (They don't need to be serialized and transferred)
     private transient AlgebricksAbsolutePartitionConstraint constraints;
 
     @Override
-    public IInputStreamProvider createInputStreamProvider(IHyracksTaskContext ctx, int partition) throws Exception {
+    public IInputStreamProvider createInputStreamProvider(IHyracksTaskContext ctx, int partition) {
         return new LocalFSInputStreamProvider(inputFileSplits, ctx, configuration, partition, expression, isFeed);
     }
 
@@ -70,7 +69,7 @@ public class LocalFSInputStreamProviderFactory implements IInputStreamProviderFa
     }
 
     @Override
-    public void configure(Map<String, String> configuration) throws Exception {
+    public void configure(Map<String, String> configuration) throws AsterixException {
         this.configuration = configuration;
         String[] splits = configuration.get(ExternalDataConstants.KEY_PATH).split(",");
         configureFileSplits(splits);
@@ -84,7 +83,7 @@ public class LocalFSInputStreamProviderFactory implements IInputStreamProviderFa
     }
 
     @Override
-    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws Exception {
+    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() {
         return constraints;
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketClientInputStreamProviderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketClientInputStreamProviderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketClientInputStreamProviderFactory.java
new file mode 100644
index 0000000..5e84123
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketClientInputStreamProviderFactory.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.stream.factory;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.api.IExternalDataSourceFactory;
+import org.apache.asterix.external.api.IInputStreamProvider;
+import org.apache.asterix.external.api.IInputStreamProviderFactory;
+import org.apache.asterix.external.input.stream.provider.SocketClientInputStreamProvider;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.http.impl.conn.SystemDefaultDnsResolver;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class SocketClientInputStreamProviderFactory implements IInputStreamProviderFactory {
+
+    private static final long serialVersionUID = 1L;
+    private transient AlgebricksAbsolutePartitionConstraint clusterLocations;
+    private List<Pair<String, Integer>> sockets;
+
+    @Override
+    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() {
+        clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(clusterLocations, sockets.size());
+        return clusterLocations;
+    }
+
+    @Override
+    public void configure(Map<String, String> configuration) throws AsterixException {
+        try {
+            this.sockets = new ArrayList<Pair<String, Integer>>();
+            String socketsValue = configuration.get(ExternalDataConstants.KEY_SOCKETS);
+            if (socketsValue == null) {
+                throw new IllegalArgumentException(
+                        "\'sockets\' parameter not specified as part of adapter configuration");
+            }
+            String[] socketsArray = socketsValue.split(",");
+            for (String socket : socketsArray) {
+                String[] socketTokens = socket.split(":");
+                String host = socketTokens[0].trim();
+                int port = Integer.parseInt(socketTokens[1].trim());
+                InetAddress[] resolved;
+                resolved = SystemDefaultDnsResolver.INSTANCE.resolve(host);
+                Pair<String, Integer> p = new Pair<String, Integer>(resolved[0].getHostAddress(), port);
+                sockets.add(p);
+            }
+        } catch (UnknownHostException e) {
+            throw new AsterixException(e);
+        }
+    }
+
+    @Override
+    public IInputStreamProvider createInputStreamProvider(IHyracksTaskContext ctx, int partition)
+            throws HyracksDataException {
+        return new SocketClientInputStreamProvider(sockets.get(partition));
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketInputStreamProviderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketInputStreamProviderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketInputStreamProviderFactory.java
deleted file mode 100644
index 6fdc42d..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketInputStreamProviderFactory.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.input.stream.factory;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.ServerSocket;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-import java.util.Set;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.external.api.IInputStreamProvider;
-import org.apache.asterix.external.api.IInputStreamProviderFactory;
-import org.apache.asterix.external.input.stream.provider.SocketInputStreamProvider;
-import org.apache.asterix.external.util.ExternalDataConstants;
-import org.apache.asterix.om.util.AsterixRuntimeUtil;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
-import org.apache.hyracks.algebricks.common.utils.Pair;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-
-public class SocketInputStreamProviderFactory implements IInputStreamProviderFactory {
-
-    private static final long serialVersionUID = 1L;
-    private List<Pair<String, Integer>> sockets;
-    private Mode mode = Mode.IP;
-
-    public static enum Mode {
-        NC,
-        IP
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration) throws Exception {
-        sockets = new ArrayList<Pair<String, Integer>>();
-        String modeValue = configuration.get(ExternalDataConstants.KEY_MODE);
-        if (modeValue != null) {
-            mode = Mode.valueOf(modeValue.trim().toUpperCase());
-        }
-        String socketsValue = configuration.get(ExternalDataConstants.KEY_SOCKETS);
-        if (socketsValue == null) {
-            throw new IllegalArgumentException("\'sockets\' parameter not specified as part of adapter configuration");
-        }
-        Map<InetAddress, Set<String>> ncMap = AsterixRuntimeUtil.getNodeControllerMap();
-        List<String> ncs = AsterixRuntimeUtil.getAllNodeControllers();
-        String[] socketsArray = socketsValue.split(",");
-        Random random = new Random();
-        for (String socket : socketsArray) {
-            String[] socketTokens = socket.split(":");
-            String host = socketTokens[0].trim();
-            int port = Integer.parseInt(socketTokens[1].trim());
-            Pair<String, Integer> p = null;
-            switch (mode) {
-                case IP:
-                    Set<String> ncsOnIp = ncMap.get(InetAddress.getByName(host));
-                    if (ncsOnIp == null || ncsOnIp.isEmpty()) {
-                        throw new IllegalArgumentException("Invalid host " + host
-                                + " as it is not part of the AsterixDB cluster. Valid choices are "
-                                + StringUtils.join(ncMap.keySet(), ", "));
-                    }
-                    String[] ncArray = ncsOnIp.toArray(new String[] {});
-                    String nc = ncArray[random.nextInt(ncArray.length)];
-                    p = new Pair<String, Integer>(nc, port);
-                    break;
-
-                case NC:
-                    p = new Pair<String, Integer>(host, port);
-                    if (!ncs.contains(host)) {
-                        throw new IllegalArgumentException(
-                                "Invalid NC " + host + " as it is not part of the AsterixDB cluster. Valid choices are "
-                                        + StringUtils.join(ncs, ", "));
-
-                    }
-                    break;
-            }
-            sockets.add(p);
-        }
-    }
-
-    @Override
-    public synchronized IInputStreamProvider createInputStreamProvider(IHyracksTaskContext ctx, int partition)
-            throws IOException, AsterixException {
-        Pair<String, Integer> socket = sockets.get(partition);
-        ServerSocket server = new ServerSocket(socket.second);
-        return new SocketInputStreamProvider(server);
-    }
-
-    @Override
-    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() {
-        List<String> locations = new ArrayList<String>();
-        for (Pair<String, Integer> socket : sockets) {
-            locations.add(socket.first);
-        }
-        return new AlgebricksAbsolutePartitionConstraint(locations.toArray(new String[] {}));
-    }
-
-    public List<Pair<String, Integer>> getSockets() {
-        return sockets;
-    }
-
-    @Override
-    public DataSourceType getDataSourceType() {
-        return DataSourceType.STREAM;
-    }
-
-    @Override
-    public boolean isIndexible() {
-        return false;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketServerInputStreamProviderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketServerInputStreamProviderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketServerInputStreamProviderFactory.java
new file mode 100644
index 0000000..a301c1a
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketServerInputStreamProviderFactory.java
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.stream.factory;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.ServerSocket;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.api.IInputStreamProvider;
+import org.apache.asterix.external.api.IInputStreamProviderFactory;
+import org.apache.asterix.external.input.stream.provider.SocketServerInputStreamProvider;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.om.util.AsterixRuntimeUtil;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class SocketServerInputStreamProviderFactory implements IInputStreamProviderFactory {
+
+    private static final long serialVersionUID = 1L;
+    private List<Pair<String, Integer>> sockets;
+    private Mode mode = Mode.IP;
+
+    public static enum Mode {
+        NC,
+        IP
+    }
+
+    @Override
+    public void configure(Map<String, String> configuration) throws AsterixException {
+        try {
+            sockets = new ArrayList<Pair<String, Integer>>();
+            String modeValue = configuration.get(ExternalDataConstants.KEY_MODE);
+            if (modeValue != null) {
+                mode = Mode.valueOf(modeValue.trim().toUpperCase());
+            }
+            String socketsValue = configuration.get(ExternalDataConstants.KEY_SOCKETS);
+            if (socketsValue == null) {
+                throw new IllegalArgumentException(
+                        "\'sockets\' parameter not specified as part of adapter configuration");
+            }
+            Map<InetAddress, Set<String>> ncMap;
+            ncMap = AsterixRuntimeUtil.getNodeControllerMap();
+            List<String> ncs = AsterixRuntimeUtil.getAllNodeControllers();
+            String[] socketsArray = socketsValue.split(",");
+            Random random = new Random();
+            for (String socket : socketsArray) {
+                String[] socketTokens = socket.split(":");
+                String host = socketTokens[0].trim();
+                int port = Integer.parseInt(socketTokens[1].trim());
+                Pair<String, Integer> p = null;
+                switch (mode) {
+                    case IP:
+                        Set<String> ncsOnIp = ncMap.get(InetAddress.getByName(host));
+                        if ((ncsOnIp == null) || ncsOnIp.isEmpty()) {
+                            throw new IllegalArgumentException("Invalid host " + host
+                                    + " as it is not part of the AsterixDB cluster. Valid choices are "
+                                    + StringUtils.join(ncMap.keySet(), ", "));
+                        }
+                        String[] ncArray = ncsOnIp.toArray(new String[] {});
+                        String nc = ncArray[random.nextInt(ncArray.length)];
+                        p = new Pair<String, Integer>(nc, port);
+                        break;
+
+                    case NC:
+                        p = new Pair<String, Integer>(host, port);
+                        if (!ncs.contains(host)) {
+                            throw new IllegalArgumentException("Invalid NC " + host
+                                    + " as it is not part of the AsterixDB cluster. Valid choices are "
+                                    + StringUtils.join(ncs, ", "));
+
+                        }
+                        break;
+                }
+                sockets.add(p);
+            }
+        } catch (Exception e) {
+            throw new AsterixException(e);
+        }
+    }
+
+    @Override
+    public synchronized IInputStreamProvider createInputStreamProvider(IHyracksTaskContext ctx, int partition)
+            throws HyracksDataException {
+        try {
+            Pair<String, Integer> socket = sockets.get(partition);
+            ServerSocket server;
+            server = new ServerSocket(socket.second);
+            return new SocketServerInputStreamProvider(server);
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() {
+        List<String> locations = new ArrayList<String>();
+        for (Pair<String, Integer> socket : sockets) {
+            locations.add(socket.first);
+        }
+        return new AlgebricksAbsolutePartitionConstraint(locations.toArray(new String[] {}));
+    }
+
+    public List<Pair<String, Integer>> getSockets() {
+        return sockets;
+    }
+
+    @Override
+    public DataSourceType getDataSourceType() {
+        return DataSourceType.STREAM;
+    }
+
+    @Override
+    public boolean isIndexible() {
+        return false;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/TwitterFirehoseStreamProviderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/TwitterFirehoseStreamProviderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/TwitterFirehoseStreamProviderFactory.java
index 95378cb..7b09ade 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/TwitterFirehoseStreamProviderFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/TwitterFirehoseStreamProviderFactory.java
@@ -28,6 +28,7 @@ import org.apache.asterix.external.input.stream.provider.TwitterFirehoseInputStr
 import org.apache.asterix.om.util.AsterixClusterProperties;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 /**
  * Factory class for creating @see{TwitterFirehoseFeedAdapter}. The adapter
@@ -53,7 +54,7 @@ public class TwitterFirehoseStreamProviderFactory implements IInputStreamProvide
     private Map<String, String> configuration;
 
     @Override
-    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws Exception {
+    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() {
         String ingestionCardinalityParam = configuration.get(KEY_INGESTION_CARDINALITY);
         String ingestionLocationParam = configuration.get(KEY_INGESTION_LOCATIONS);
         String[] locations = null;
@@ -80,7 +81,7 @@ public class TwitterFirehoseStreamProviderFactory implements IInputStreamProvide
     }
 
     @Override
-    public void configure(Map<String, String> configuration) throws Exception {
+    public void configure(Map<String, String> configuration) {
         this.configuration = configuration;
     }
 
@@ -90,7 +91,8 @@ public class TwitterFirehoseStreamProviderFactory implements IInputStreamProvide
     }
 
     @Override
-    public IInputStreamProvider createInputStreamProvider(IHyracksTaskContext ctx, int partition) throws Exception {
+    public IInputStreamProvider createInputStreamProvider(IHyracksTaskContext ctx, int partition)
+            throws HyracksDataException {
         return new TwitterFirehoseInputStreamProvider(configuration, ctx, partition);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/HDFSInputStreamProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/HDFSInputStreamProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/HDFSInputStreamProvider.java
index bf9653d..e1ab331 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/HDFSInputStreamProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/HDFSInputStreamProvider.java
@@ -38,12 +38,10 @@ public class HDFSInputStreamProvider<K> extends HDFSRecordReader<K, Text> implem
 
     public HDFSInputStreamProvider(boolean read[], InputSplit[] inputSplits, String[] readSchedule, String nodeName,
             JobConf conf, Map<String, String> configuration, List<ExternalFile> snapshot) throws Exception {
-        super(read, inputSplits, readSchedule, nodeName, conf);
+        super(read, inputSplits, readSchedule, nodeName, conf, snapshot,
+                snapshot == null ? null : ExternalIndexerProvider.getIndexer(configuration));
         value = new Text();
-        configure(configuration);
         if (snapshot != null) {
-            setSnapshot(snapshot);
-            setIndexer(ExternalIndexerProvider.getIndexer(configuration));
             if (currentSplitIndex < snapshot.size()) {
                 indexer.reset(this);
             }
@@ -51,7 +49,7 @@ public class HDFSInputStreamProvider<K> extends HDFSRecordReader<K, Text> implem
     }
 
     @Override
-    public AInputStream getInputStream() throws Exception {
+    public AInputStream getInputStream() {
         return new HDFSInputStream();
     }
 
@@ -119,10 +117,6 @@ public class HDFSInputStreamProvider<K> extends HDFSRecordReader<K, Text> implem
         }
 
         @Override
-        public void configure(Map<String, String> configuration) {
-        }
-
-        @Override
         public void setFeedLogManager(FeedLogManager logManager) {
         }
 
@@ -130,8 +124,4 @@ public class HDFSInputStreamProvider<K> extends HDFSRecordReader<K, Text> implem
         public void setController(AbstractFeedDataFlowController controller) {
         }
     }
-
-    @Override
-    public void configure(Map<String, String> configuration) {
-    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/LocalFSInputStreamProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/LocalFSInputStreamProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/LocalFSInputStreamProvider.java
index 77520d4..fbe6035 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/LocalFSInputStreamProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/LocalFSInputStreamProvider.java
@@ -18,7 +18,6 @@
  */
 package org.apache.asterix.external.input.stream.provider;
 
-import java.io.IOException;
 import java.nio.file.Path;
 import java.util.Map;
 
@@ -27,38 +26,33 @@ import org.apache.asterix.external.input.stream.AInputStream;
 import org.apache.asterix.external.input.stream.LocalFileSystemInputStream;
 import org.apache.asterix.external.util.FeedLogManager;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.std.file.FileSplit;
 
 public class LocalFSInputStreamProvider implements IInputStreamProvider {
 
-    private String expression;
-    private boolean isFeed;
-    private Path path;
+    private final String expression;
+    private final boolean isFeed;
+    private final Path path;
     private FeedLogManager feedLogManager;
-    private Map<String, String> configuration;
 
-    public LocalFSInputStreamProvider(FileSplit[] fileSplits, IHyracksTaskContext ctx,
-            Map<String, String> configuration, int partition, String expression, boolean isFeed) {
+    public LocalFSInputStreamProvider(final FileSplit[] fileSplits, final IHyracksTaskContext ctx,
+            final Map<String, String> configuration, final int partition, final String expression,
+            final boolean isFeed) {
         this.expression = expression;
         this.isFeed = isFeed;
         this.path = fileSplits[partition].getLocalFile().getFile().toPath();
     }
 
     @Override
-    public AInputStream getInputStream() throws IOException {
-        LocalFileSystemInputStream stream = new LocalFileSystemInputStream(path, expression, isFeed);
+    public AInputStream getInputStream() throws HyracksDataException {
+        final LocalFileSystemInputStream stream = new LocalFileSystemInputStream(path, expression, isFeed);
         stream.setFeedLogManager(feedLogManager);
-        stream.configure(configuration);
         return stream;
     }
 
     @Override
-    public void configure(Map<String, String> configuration) {
-        this.configuration = configuration;
-    }
-
-    @Override
-    public void setFeedLogManager(FeedLogManager feedLogManager) {
+    public void setFeedLogManager(final FeedLogManager feedLogManager) {
         this.feedLogManager = feedLogManager;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/SocketClientInputStreamProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/SocketClientInputStreamProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/SocketClientInputStreamProvider.java
new file mode 100644
index 0000000..f842638
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/SocketClientInputStreamProvider.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.stream.provider;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.Socket;
+
+import org.apache.asterix.external.api.IInputStreamProvider;
+import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
+import org.apache.asterix.external.input.stream.AInputStream;
+import org.apache.asterix.external.util.FeedLogManager;
+import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.log4j.Logger;
+
+public class SocketClientInputStreamProvider implements IInputStreamProvider {
+
+    private static final Logger LOGGER = Logger.getLogger(SocketClientInputStreamProvider.class.getName());
+    private final Socket socket;
+
+    public SocketClientInputStreamProvider(Pair<String, Integer> ipAndPort) throws HyracksDataException {
+        try {
+            socket = new Socket(ipAndPort.first, ipAndPort.second);
+        } catch (IOException e) {
+            LOGGER.error(
+                    "Problem in creating socket against host " + ipAndPort.first + " on the port " + ipAndPort.second,
+                    e);
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public AInputStream getInputStream() throws HyracksDataException {
+        InputStream in;
+        try {
+            in = socket.getInputStream();
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
+        return new AInputStream() {
+            @Override
+            public int read() throws IOException {
+                throw new IOException("method not supported. use read(byte[] buffer, int offset, int length) instead");
+            }
+
+            @Override
+            public int read(byte[] buffer, int offset, int length) throws IOException {
+                return in.read(buffer, offset, length);
+            }
+
+            @Override
+            public boolean stop() throws Exception {
+                if (!socket.isClosed()) {
+                    try {
+                        in.close();
+                    } finally {
+                        socket.close();
+                    }
+                }
+                return true;
+            }
+
+            @Override
+            public boolean skipError() throws Exception {
+                return false;
+            }
+
+            @Override
+            public void setFeedLogManager(FeedLogManager logManager) {
+            }
+
+            @Override
+            public void setController(AbstractFeedDataFlowController controller) {
+            }
+        };
+    }
+
+    @Override
+    public void setFeedLogManager(FeedLogManager feedLogManager) {
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/SocketInputStreamProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/SocketInputStreamProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/SocketInputStreamProvider.java
deleted file mode 100644
index b6da314..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/SocketInputStreamProvider.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.input.stream.provider;
-
-import java.net.ServerSocket;
-import java.util.Map;
-
-import org.apache.asterix.external.api.IInputStreamProvider;
-import org.apache.asterix.external.input.stream.AInputStream;
-import org.apache.asterix.external.input.stream.SocketInputStream;
-import org.apache.asterix.external.util.FeedLogManager;
-
-public class SocketInputStreamProvider implements IInputStreamProvider {
-    private ServerSocket server;
-
-    public SocketInputStreamProvider(ServerSocket server) {
-        this.server = server;
-    }
-
-    @Override
-    public AInputStream getInputStream() throws Exception {
-        return new SocketInputStream(server);
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration) {
-    }
-
-    @Override
-    public void setFeedLogManager(FeedLogManager feedLogManager) {
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/SocketServerInputStreamProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/SocketServerInputStreamProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/SocketServerInputStreamProvider.java
new file mode 100644
index 0000000..64f0342
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/SocketServerInputStreamProvider.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.stream.provider;
+
+import java.net.ServerSocket;
+
+import org.apache.asterix.external.api.IInputStreamProvider;
+import org.apache.asterix.external.input.stream.AInputStream;
+import org.apache.asterix.external.input.stream.SocketServerInputStream;
+import org.apache.asterix.external.util.FeedLogManager;
+
+public class SocketServerInputStreamProvider implements IInputStreamProvider {
+    private final ServerSocket server;
+
+    public SocketServerInputStreamProvider(ServerSocket server) {
+        this.server = server;
+    }
+
+    @Override
+    public AInputStream getInputStream() {
+        return new SocketServerInputStream(server);
+    }
+
+    @Override
+    public void setFeedLogManager(FeedLogManager feedLogManager) {
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/TwitterFirehoseInputStreamProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/TwitterFirehoseInputStreamProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/TwitterFirehoseInputStreamProvider.java
index cd4a3c1..a979262 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/TwitterFirehoseInputStreamProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/TwitterFirehoseInputStreamProvider.java
@@ -35,40 +35,45 @@ import org.apache.asterix.external.input.stream.AInputStream;
 import org.apache.asterix.external.util.FeedLogManager;
 import org.apache.asterix.external.util.TweetGenerator;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public class TwitterFirehoseInputStreamProvider implements IInputStreamProvider {
 
     private static final Logger LOGGER = Logger.getLogger(TwitterFirehoseInputStreamProvider.class.getName());
 
-    private ExecutorService executorService;
+    private final ExecutorService executorService;
 
-    private PipedOutputStream outputStream;
+    private final PipedOutputStream outputStream;
 
-    private PipedInputStream inputStream;
+    private final PipedInputStream inputStream;
 
-    private TwitterServer twitterServer;
+    private final TwitterServer twitterServer;
 
     public TwitterFirehoseInputStreamProvider(Map<String, String> configuration, IHyracksTaskContext ctx, int partition)
-            throws Exception {
-        executorService = Executors.newCachedThreadPool();
-        outputStream = new PipedOutputStream();
-        inputStream = new PipedInputStream(outputStream);
-        twitterServer = new TwitterServer(configuration, partition, outputStream, executorService, inputStream);
+            throws HyracksDataException {
+        try {
+            executorService = Executors.newCachedThreadPool();
+            outputStream = new PipedOutputStream();
+            inputStream = new PipedInputStream(outputStream);
+            twitterServer = new TwitterServer(configuration, partition, outputStream, executorService, inputStream);
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
     }
 
     @Override
-    public AInputStream getInputStream() throws Exception {
+    public AInputStream getInputStream() {
         return twitterServer;
     }
 
     private static class TwitterServer extends AInputStream {
         private final DataProvider dataProvider;
         private final ExecutorService executorService;
-        private InputStream in;
+        private final InputStream in;
         private boolean started;
 
         public TwitterServer(Map<String, String> configuration, int partition, OutputStream os,
-                ExecutorService executorService, InputStream in) throws Exception {
+                ExecutorService executorService, InputStream in) {
             dataProvider = new DataProvider(configuration, partition, os);
             this.executorService = executorService;
             this.in = in;
@@ -111,10 +116,6 @@ public class TwitterFirehoseInputStreamProvider implements IInputStreamProvider
         }
 
         @Override
-        public void configure(Map<String, String> configuration) {
-        }
-
-        @Override
         public void setFeedLogManager(FeedLogManager logManager) {
         }
 
@@ -127,7 +128,7 @@ public class TwitterFirehoseInputStreamProvider implements IInputStreamProvider
 
         public static final String KEY_MODE = "mode";
 
-        private TweetGenerator tweetGenerator;
+        private final TweetGenerator tweetGenerator;
         private boolean continuePush = true;
         private int batchSize;
         private final Mode mode;
@@ -138,7 +139,7 @@ public class TwitterFirehoseInputStreamProvider implements IInputStreamProvider
             CONTROLLED
         }
 
-        public DataProvider(Map<String, String> configuration, int partition, OutputStream os) throws Exception {
+        public DataProvider(Map<String, String> configuration, int partition, OutputStream os) {
             this.tweetGenerator = new TweetGenerator(configuration, partition);
             this.tweetGenerator.registerSubscriber(os);
             this.os = os;
@@ -163,7 +164,6 @@ public class TwitterFirehoseInputStreamProvider implements IInputStreamProvider
             boolean moreData = true;
             long startBatch;
             long endBatch;
-
             while (true) {
                 try {
                     while (moreData && continuePush) {
@@ -175,7 +175,7 @@ public class TwitterFirehoseInputStreamProvider implements IInputStreamProvider
                                 startBatch = System.currentTimeMillis();
                                 moreData = tweetGenerator.generateNextBatch(batchSize);
                                 endBatch = System.currentTimeMillis();
-                                if (endBatch - startBatch < 1000) {
+                                if ((endBatch - startBatch) < 1000) {
                                     Thread.sleep(1000 - (endBatch - startBatch));
                                 }
                                 break;
@@ -194,11 +194,6 @@ public class TwitterFirehoseInputStreamProvider implements IInputStreamProvider
         public void stop() {
             continuePush = false;
         }
-
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration) {
     }
 
     @Override



[03/19] incubator-asterixdb git commit: Support Change Feeds and Ingestion of Records with MetaData

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-installer/src/test/java/org/apache/asterix/installer/test/AbstractExecutionIT.java
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/java/org/apache/asterix/installer/test/AbstractExecutionIT.java b/asterix-installer/src/test/java/org/apache/asterix/installer/test/AbstractExecutionIT.java
index 5df074b..a6d544e 100644
--- a/asterix-installer/src/test/java/org/apache/asterix/installer/test/AbstractExecutionIT.java
+++ b/asterix-installer/src/test/java/org/apache/asterix/installer/test/AbstractExecutionIT.java
@@ -17,6 +17,7 @@ package org.apache.asterix.installer.test;
 import java.io.File;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
@@ -25,6 +26,8 @@ import org.apache.asterix.external.util.IdentitiyResolverFactory;
 import org.apache.asterix.test.aql.TestExecutor;
 import org.apache.asterix.test.runtime.HDFSCluster;
 import org.apache.asterix.testframework.context.TestCaseContext;
+import org.apache.asterix.testframework.context.TestFileContext;
+import org.apache.asterix.testframework.xml.TestCase.CompilationUnit;
 import org.apache.commons.lang3.StringUtils;
 import org.codehaus.plexus.util.FileUtils;
 import org.junit.AfterClass;
@@ -50,6 +53,8 @@ public abstract class AbstractExecutionIT {
 
     protected final static TestExecutor testExecutor = new TestExecutor();
 
+    private static final String EXTERNAL_LIBRARY_TEST_GROUP = "lib";
+
     @BeforeClass
     public static void setUp() throws Exception {
         System.out.println("Starting setup");
@@ -85,7 +90,7 @@ public abstract class AbstractExecutionIT {
     public static void tearDown() throws Exception {
         File outdir = new File(PATH_ACTUAL);
         File[] files = outdir.listFiles();
-        if (files == null || files.length == 0) {
+        if ((files == null) || (files.length == 0)) {
             outdir.delete();
         }
         AsterixLifecycleIT.tearDown();
@@ -111,6 +116,23 @@ public abstract class AbstractExecutionIT {
 
     @Test
     public void test() throws Exception {
+        if (skip()) {
+            return;
+        }
         testExecutor.executeTest(PATH_ACTUAL, tcCtx, null, false);
     }
+
+    protected boolean skip() {
+        // If the test case contains library commands, we skip them
+        List<CompilationUnit> cUnits = tcCtx.getTestCase().getCompilationUnit();
+        for (CompilationUnit cUnit : cUnits) {
+            List<TestFileContext> testFileCtxs = tcCtx.getTestFiles(cUnit);
+            for (TestFileContext ctx : testFileCtxs) {
+                if (ctx.getType().equals(EXTERNAL_LIBRARY_TEST_GROUP)) {
+                    return true;
+                }
+            }
+        }
+        return false;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-installer/src/test/java/org/apache/asterix/installer/test/ClusterExecutionIT.java
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/java/org/apache/asterix/installer/test/ClusterExecutionIT.java b/asterix-installer/src/test/java/org/apache/asterix/installer/test/ClusterExecutionIT.java
index 97709cb..8db985d 100644
--- a/asterix-installer/src/test/java/org/apache/asterix/installer/test/ClusterExecutionIT.java
+++ b/asterix-installer/src/test/java/org/apache/asterix/installer/test/ClusterExecutionIT.java
@@ -73,7 +73,7 @@ public class ClusterExecutionIT extends AbstractExecutionIT {
     public static void tearDown() throws Exception {
         File outdir = new File(PATH_ACTUAL);
         File[] files = outdir.listFiles();
-        if (files == null || files.length == 0) {
+        if ((files == null) || (files.length == 0)) {
             outdir.delete();
         }
 
@@ -102,6 +102,9 @@ public class ClusterExecutionIT extends AbstractExecutionIT {
     @Override
     @Test
     public void test() throws Exception {
+        if (skip()) {
+            return;
+        }
         testExecutor.executeTest(PATH_ACTUAL, tcCtx, null, false);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-installer/src/test/resources/integrationts/library/queries/library-adapters/typed_adapter/typed_adapter.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-adapters/typed_adapter/typed_adapter.1.ddl.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-adapters/typed_adapter/typed_adapter.1.ddl.aql
index 0789cf8..f188629 100644
--- a/asterix-installer/src/test/resources/integrationts/library/queries/library-adapters/typed_adapter/typed_adapter.1.ddl.aql
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-adapters/typed_adapter/typed_adapter.1.ddl.aql
@@ -36,4 +36,6 @@ create dataset TweetsTestAdapter(TestTypedAdapterOutputType)
 primary key tweetid;
 
 create feed TestTypedAdapterFeed
-using "testlib#test_typed_adapter" (("num_output_records"="5"),("type-name"="TestTypedAdapterOutputType"));
+using "testlib#test_typed_adapter" (
+("num_output_records"="5"),
+("type-name"="TestTypedAdapterOutputType"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.1.ddl.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.1.ddl.aql
index ebbc65e..f9a6eda 100644
--- a/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.1.ddl.aql
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.1.ddl.aql
@@ -45,9 +45,12 @@ create type TweetOutputType as closed {
 }
 
 create feed TweetFeed
-using file_feed
-(("type-name"="TweetInputType"),("fs"="localfs"),("path"="asterix_nc1://../../../../../../asterix-app/data/twitter/obamatweets.adm"),("format"="adm"),("tuple-interval"="10"))
+using localfs
+(("type-name"="TweetInputType"),
+("path"="asterix_nc1://../../../../../../asterix-app/data/twitter/obamatweets.adm"),
+("format"="adm"),
+("tuple-interval"="10"))
 apply function testlib#parseTweet;
 
-create dataset TweetsFeedIngest(TweetOutputType) 
+create dataset TweetsFeedIngest(TweetOutputType)
 primary key id;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-installer/src/test/resources/integrationts/library/queries/library-parsers/record-parser/record-parser.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-parsers/record-parser/record-parser.1.ddl.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-parsers/record-parser/record-parser.1.ddl.aql
index 0f40b07..d523247 100644
--- a/asterix-installer/src/test/resources/integrationts/library/queries/library-parsers/record-parser/record-parser.1.ddl.aql
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-parsers/record-parser/record-parser.1.ddl.aql
@@ -29,6 +29,5 @@ create type Classad as open {
 
 create external dataset Condor(Classad) using localfs(
 ("path"="asterix_nc1://data/external-parser/jobads.new"),
-("reader"="adm"),
-("parser"="testlib#org.apache.asterix.external.library.ClassAdParserFactory"),
-("reader-stream"="localfs"));
+("format"="adm"),
+("parser"="testlib#org.apache.asterix.external.library.ClassAdParserFactory"));


[11/19] incubator-asterixdb git commit: Support Change Feeds and Ingestion of Records with MetaData

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedTupleForwarder.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedTupleForwarder.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedTupleForwarder.java
index 926022c..73de838 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedTupleForwarder.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedTupleForwarder.java
@@ -20,7 +20,6 @@ package org.apache.asterix.external.dataflow;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
-import java.util.Map;
 
 import javax.annotation.Nonnull;
 
@@ -41,12 +40,12 @@ import org.apache.hyracks.dataflow.common.comm.util.FrameUtils;
 
 public class FeedTupleForwarder implements ITupleForwarder {
 
+    private final FeedLogManager feedLogManager;
     private int maxRecordSize; // temporary until the big object in storage is solved
     private FrameTupleAppender appender;
     private IFrame frame;
     private IFrameWriter writer;
     private boolean paused = false;
-    private final FeedLogManager feedLogManager;
     private boolean initialized;
 
     public FeedTupleForwarder(@Nonnull FeedLogManager feedLogManager) {
@@ -58,10 +57,6 @@ public class FeedTupleForwarder implements ITupleForwarder {
     }
 
     @Override
-    public void configure(Map<String, String> configuration) {
-    }
-
-    @Override
     public void initialize(IHyracksTaskContext ctx, IFrameWriter writer) throws HyracksDataException {
         if (!initialized) {
             this.maxRecordSize = ((IAsterixAppRuntimeContext) ctx.getJobletContext().getApplicationContext()

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedWithMetaDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedWithMetaDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedWithMetaDataFlowController.java
new file mode 100644
index 0000000..203b5a7
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedWithMetaDataFlowController.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.dataflow;
+
+import java.io.IOException;
+
+import org.apache.asterix.external.api.IRawRecord;
+import org.apache.asterix.external.api.IRecordReader;
+import org.apache.asterix.external.parser.RecordWithMetadataParser;
+import org.apache.asterix.external.util.FeedLogManager;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+
+public class FeedWithMetaDataFlowController<T, O> extends FeedRecordDataFlowController<T> {
+
+    //This field mask a super class field dataParser. We do this to avoid down-casting when calling parseMeta
+    protected RecordWithMetadataParser<T, O> dataParser;
+
+    public FeedWithMetaDataFlowController(IHyracksTaskContext ctx, FeedTupleForwarder tupleForwarder,
+            FeedLogManager feedLogManager, int numOfOutputFields, RecordWithMetadataParser<T, O> dataParser,
+            IRecordReader<T> recordReader) {
+        super(ctx, tupleForwarder, feedLogManager, numOfOutputFields, dataParser, recordReader);
+        this.dataParser = dataParser;
+    }
+
+    @Override
+    protected void addMetaPart(ArrayTupleBuilder tb, IRawRecord<? extends T> record) throws IOException {
+        dataParser.parseMeta(tb.getDataOutput());
+        tb.addFieldEndOffset();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FrameFullTupleForwarder.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FrameFullTupleForwarder.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FrameFullTupleForwarder.java
index 2caf98c..be737ae 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FrameFullTupleForwarder.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FrameFullTupleForwarder.java
@@ -18,8 +18,6 @@
  */
 package org.apache.asterix.external.dataflow;
 
-import java.util.Map;
-
 import org.apache.asterix.external.api.ITupleForwarder;
 import org.apache.hyracks.api.comm.IFrame;
 import org.apache.hyracks.api.comm.IFrameWriter;
@@ -37,11 +35,6 @@ public class FrameFullTupleForwarder implements ITupleForwarder {
     private IFrameWriter writer;
 
     @Override
-    public void configure(Map<String, String> configuration) {
-        // no-op
-    }
-
-    @Override
     public void initialize(IHyracksTaskContext ctx, IFrameWriter writer) throws HyracksDataException {
         this.appender = new FrameTupleAppender();
         this.frame = new VSizeFrame(ctx);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/IndexingDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/IndexingDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/IndexingDataFlowController.java
index ffa025b..9c8563d 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/IndexingDataFlowController.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/IndexingDataFlowController.java
@@ -18,26 +18,34 @@
  */
 package org.apache.asterix.external.dataflow;
 
+import java.io.IOException;
+
 import javax.annotation.Nonnull;
 
 import org.apache.asterix.external.api.IExternalIndexer;
-import org.apache.asterix.external.api.IIndexingDatasource;
 import org.apache.asterix.external.api.IRecordDataParser;
 import org.apache.asterix.external.api.IRecordReader;
+import org.apache.asterix.external.api.ITupleForwarder;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 
 public class IndexingDataFlowController<T> extends RecordDataFlowController<T> {
     private final IExternalIndexer indexer;
 
-    public IndexingDataFlowController(@Nonnull IRecordDataParser<T> dataParser,
-            @Nonnull IRecordReader<? extends T> recordReader) throws Exception {
-        super(dataParser, recordReader);
-        indexer = ((IIndexingDatasource) recordReader).getIndexer();
-        numOfTupleFields += indexer.getNumberOfFields();
+    public IndexingDataFlowController(IHyracksTaskContext ctx, ITupleForwarder tupleForwarder,
+            @Nonnull IRecordDataParser<T> dataParser, @Nonnull IRecordReader<? extends T> recordReader,
+            IExternalIndexer indexer) throws IOException {
+        super(ctx, tupleForwarder, dataParser, recordReader, 1 + indexer.getNumberOfFields());
+        this.indexer = indexer;
     }
 
     @Override
-    protected void appendOtherTupleFields(ArrayTupleBuilder tb) throws Exception {
-        indexer.index(tb);
+    protected void appendOtherTupleFields(ArrayTupleBuilder tb) throws HyracksDataException {
+        try {
+            indexer.index(tb);
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RateControlledTupleForwarder.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RateControlledTupleForwarder.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RateControlledTupleForwarder.java
index f8fcd6f..cb80e45 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RateControlledTupleForwarder.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RateControlledTupleForwarder.java
@@ -40,13 +40,19 @@ public class RateControlledTupleForwarder implements ITupleForwarder {
 
     public static final String INTER_TUPLE_INTERVAL = "tuple-interval";
 
-    @Override
-    public void configure(Map<String, String> configuration) {
+    private RateControlledTupleForwarder(long interTupleInterval) {
+        this.interTupleInterval = interTupleInterval;
+        delayConfigured = interTupleInterval != 0L;
+    }
+
+    // Factory method
+    public static RateControlledTupleForwarder create(Map<String, String> configuration) {
+        long interTupleInterval = 0L;
         String propValue = configuration.get(INTER_TUPLE_INTERVAL);
         if (propValue != null) {
             interTupleInterval = Long.parseLong(propValue);
         }
-        delayConfigured = interTupleInterval != 0;
+        return new RateControlledTupleForwarder(interTupleInterval);
     }
 
     @Override
@@ -82,6 +88,5 @@ public class RateControlledTupleForwarder implements ITupleForwarder {
         if (appender.getTupleCount() > 0) {
             FrameUtils.flushFrame(frame.getBuffer(), writer);
         }
-
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java
index 57f0f3d..99654d0 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java
@@ -23,7 +23,9 @@ import javax.annotation.Nonnull;
 import org.apache.asterix.external.api.IRawRecord;
 import org.apache.asterix.external.api.IRecordDataParser;
 import org.apache.asterix.external.api.IRecordReader;
+import org.apache.asterix.external.api.ITupleForwarder;
 import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 
@@ -31,19 +33,22 @@ public class RecordDataFlowController<T> extends AbstractDataFlowController {
 
     protected final IRecordDataParser<T> dataParser;
     protected final IRecordReader<? extends T> recordReader;
-    protected int numOfTupleFields = 1;
+    protected final int numOfTupleFields;
 
-    public RecordDataFlowController(@Nonnull IRecordDataParser<T> dataParser,
-            @Nonnull IRecordReader<? extends T> recordReader) {
+    public RecordDataFlowController(IHyracksTaskContext ctx, ITupleForwarder tupleForwarder,
+            @Nonnull IRecordDataParser<T> dataParser, @Nonnull IRecordReader<? extends T> recordReader,
+            int numOfTupleFields) {
+        super(ctx, tupleForwarder);
         this.dataParser = dataParser;
         this.recordReader = recordReader;
+        this.numOfTupleFields = numOfTupleFields;
     }
 
     @Override
     public void start(IFrameWriter writer) throws HyracksDataException {
         try {
             ArrayTupleBuilder tb = new ArrayTupleBuilder(numOfTupleFields);
-            initializeTupleForwarder(writer);
+            tupleForwarder.initialize(ctx, writer);
             while (recordReader.hasNext()) {
                 IRawRecord<? extends T> record = recordReader.next();
                 tb.reset();
@@ -61,24 +66,4 @@ public class RecordDataFlowController<T> extends AbstractDataFlowController {
 
     protected void appendOtherTupleFields(ArrayTupleBuilder tb) throws Exception {
     }
-
-    @Override
-    public boolean stop() {
-        return recordReader.stop();
-    }
-
-    @Override
-    public boolean handleException(Throwable th) {
-        return false;
-    }
-
-    @Override
-    public boolean pause() throws HyracksDataException {
-        return false;
-    }
-
-    @Override
-    public boolean resume() throws HyracksDataException {
-        return false;
-    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/StreamDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/StreamDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/StreamDataFlowController.java
index 43738eb..ccf22da 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/StreamDataFlowController.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/StreamDataFlowController.java
@@ -19,20 +19,26 @@
 package org.apache.asterix.external.dataflow;
 
 import org.apache.asterix.external.api.IStreamDataParser;
-import org.apache.asterix.external.api.IStreamFlowController;
+import org.apache.asterix.external.api.ITupleForwarder;
 import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 
-public class StreamDataFlowController extends AbstractDataFlowController implements IStreamFlowController {
-    private IStreamDataParser dataParser;
-    private static final int NUMBER_OF_TUPLE_FIELDS = 1;
+public class StreamDataFlowController extends AbstractDataFlowController {
+    private final IStreamDataParser dataParser;
+
+    public StreamDataFlowController(IHyracksTaskContext ctx, ITupleForwarder tupleForwarder,
+            IStreamDataParser dataParser) {
+        super(ctx, tupleForwarder);
+        this.dataParser = dataParser;
+    }
 
     @Override
     public void start(IFrameWriter writer) throws HyracksDataException {
         try {
-            ArrayTupleBuilder tb = new ArrayTupleBuilder(NUMBER_OF_TUPLE_FIELDS);
-            initializeTupleForwarder(writer);
+            ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
+            tupleForwarder.initialize(ctx, writer);
             while (true) {
                 tb.reset();
                 if (!dataParser.parse(tb.getDataOutput())) {
@@ -46,29 +52,4 @@ public class StreamDataFlowController extends AbstractDataFlowController impleme
             throw new HyracksDataException(e);
         }
     }
-
-    @Override
-    public boolean stop() {
-        return false;
-    }
-
-    @Override
-    public boolean handleException(Throwable th) {
-        return false;
-    }
-
-    @Override
-    public void setStreamParser(IStreamDataParser dataParser) {
-        this.dataParser = dataParser;
-    }
-
-    @Override
-    public boolean pause() throws HyracksDataException {
-        return false;
-    }
-
-    @Override
-    public boolean resume() throws HyracksDataException {
-        return false;
-    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/FeedAdapter.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/FeedAdapter.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/FeedAdapter.java
new file mode 100644
index 0000000..d1bde71
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/FeedAdapter.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.dataset.adapter;
+
+import org.apache.asterix.external.api.IDataSourceAdapter;
+import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class FeedAdapter implements IDataSourceAdapter {
+    private static final long serialVersionUID = 1L;
+    private final AbstractFeedDataFlowController controller;
+
+    public FeedAdapter(AbstractFeedDataFlowController controller) {
+        this.controller = controller;
+    }
+
+    @Override
+    public void start(int partition, IFrameWriter writer) throws HyracksDataException {
+        controller.start(writer);
+    }
+
+    public boolean stop() throws HyracksDataException {
+        return controller.stop();
+    }
+
+    public boolean handleException(Throwable e) {
+        return controller.handleException(e);
+    }
+
+    public boolean pause() throws HyracksDataException {
+        return controller.pause();
+    }
+
+    public boolean resume() throws HyracksDataException {
+        return controller.resume();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/GenericAdapter.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/GenericAdapter.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/GenericAdapter.java
index d19eedf..3ab370e 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/GenericAdapter.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/GenericAdapter.java
@@ -19,11 +19,11 @@
 package org.apache.asterix.external.dataset.adapter;
 
 import org.apache.asterix.external.api.IDataFlowController;
-import org.apache.asterix.external.api.IFeedAdapter;
+import org.apache.asterix.external.api.IDataSourceAdapter;
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
-public class GenericAdapter implements IFeedAdapter {
+public class GenericAdapter implements IDataSourceAdapter {
 
     private static final long serialVersionUID = 1L;
     private final IDataFlowController controller;
@@ -36,24 +36,4 @@ public class GenericAdapter implements IFeedAdapter {
     public void start(int partition, IFrameWriter writer) throws HyracksDataException {
         controller.start(writer);
     }
-
-    @Override
-    public boolean stop() throws HyracksDataException {
-        return controller.stop();
-    }
-
-    @Override
-    public boolean handleException(Throwable e) {
-        return controller.handleException(e);
-    }
-
-    @Override
-    public boolean pause() throws HyracksDataException {
-        return controller.pause();
-    }
-
-    @Override
-    public boolean resume() throws HyracksDataException {
-        return controller.resume();
-    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedExceptionHandler.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedExceptionHandler.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedExceptionHandler.java
index 483ba19..f102f93 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedExceptionHandler.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedExceptionHandler.java
@@ -35,7 +35,7 @@ import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 
 public class FeedExceptionHandler implements IExceptionHandler {
 
-    private static final Logger LOGGER = Logger.getLogger(FeedExceptionHandler.class.getName());
+    private static Logger LOGGER = Logger.getLogger(FeedExceptionHandler.class.getName());
 
     //TODO: Enable logging
     private final IHyracksTaskContext ctx;
@@ -47,6 +47,11 @@ public class FeedExceptionHandler implements IExceptionHandler {
         this.fta = fta;
     }
 
+    public void prettyPrint(ByteBuffer frame) {
+        fta.reset(frame);
+        fta.prettyPrint();
+    }
+
     @Override
     public ByteBuffer handleException(Exception e, ByteBuffer frame) {
         try {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java
index 3370118..1ceb36b 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java
@@ -23,7 +23,6 @@ import java.util.Iterator;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.apache.asterix.external.feed.api.IExceptionHandler;
 import org.apache.asterix.external.feed.api.IFeedManager;
 import org.apache.asterix.external.feed.api.IFeedMemoryComponent;
 import org.apache.asterix.external.feed.api.IFeedMessage;
@@ -49,29 +48,30 @@ import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
  * 2. FeedMetaStoreNodePushable.initializeNewFeedRuntime();
  *              ______
  *             |      |
- * ============|core  |============
- * ============| op   |============
+ * ============| core |============
+ * ============|  op  |============
  * ^^^^^^^^^^^^|______|
- *  Input Side
- *  Handler
- *
+ * Input Side
+ * Handler
  **/
 public class FeedRuntimeInputHandler implements IFrameWriter {
 
-    private static Logger LOGGER = Logger.getLogger(FeedRuntimeInputHandler.class.getName());
+    private static final Logger LOGGER = Logger.getLogger(FeedRuntimeInputHandler.class.getName());
 
     private final FeedConnectionId connectionId;
     private final FeedRuntimeId runtimeId;
     private final FeedPolicyAccessor feedPolicyAccessor;
-    private final IExceptionHandler exceptionHandler;
+    private final FeedExceptionHandler exceptionHandler;
     private final FeedFrameDiscarder discarder;
     private final FeedFrameSpiller spiller;
     private final FeedPolicyAccessor fpa;
     private final IFeedManager feedManager;
+    private final MonitoredBuffer mBuffer;
+    private final DataBucketPool pool;
+    private final FrameEventCallback frameEventCallback;
+
     private boolean bufferingEnabled;
     private IFrameWriter coreOperator;
-    private final MonitoredBuffer mBuffer;
-    private DataBucketPool pool;
     private FrameCollection frameCollection;
     private Mode mode;
     private Mode lastMode;
@@ -79,8 +79,6 @@ public class FeedRuntimeInputHandler implements IFrameWriter {
     private long nProcessed;
     private boolean throttlingEnabled;
 
-    private FrameEventCallback frameEventCallback;
-
     public FeedRuntimeInputHandler(IHyracksTaskContext ctx, FeedConnectionId connectionId, FeedRuntimeId runtimeId,
             IFrameWriter coreOperator, FeedPolicyAccessor fpa, boolean bufferingEnabled, FrameTupleAccessor fta,
             RecordDescriptor recordDesc, IFeedManager feedManager, int nPartitions) throws HyracksDataException {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/AdapterExecutor.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/AdapterExecutor.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/AdapterExecutor.java
index 22be702..43d5bce 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/AdapterExecutor.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/AdapterExecutor.java
@@ -20,7 +20,7 @@ package org.apache.asterix.external.feed.runtime;
 
 import org.apache.asterix.external.api.IAdapterRuntimeManager;
 import org.apache.asterix.external.api.IAdapterRuntimeManager.State;
-import org.apache.asterix.external.api.IFeedAdapter;
+import org.apache.asterix.external.dataset.adapter.FeedAdapter;
 import org.apache.asterix.external.feed.dataflow.DistributeFeedFrameWriter;
 import org.apache.asterix.external.util.ExternalDataExceptionUtils;
 import org.apache.log4j.Logger;
@@ -34,10 +34,10 @@ public class AdapterExecutor implements Runnable {
 
     private final DistributeFeedFrameWriter writer;     // A writer that sends frames to multiple receivers (that can
                                                         // increase or decrease at any time)
-    private final IFeedAdapter adapter;                 // The adapter
+    private final FeedAdapter adapter;                 // The adapter
     private final IAdapterRuntimeManager adapterManager;// The runtime manager <-- two way visibility -->
 
-    public AdapterExecutor(int partition, DistributeFeedFrameWriter writer, IFeedAdapter adapter,
+    public AdapterExecutor(int partition, DistributeFeedFrameWriter writer, FeedAdapter adapter,
             IAdapterRuntimeManager adapterManager) {
         this.writer = writer;
         this.adapter = adapter;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/AdapterRuntimeManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/AdapterRuntimeManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/AdapterRuntimeManager.java
index 6c3e44d..b0f2517 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/AdapterRuntimeManager.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/runtime/AdapterRuntimeManager.java
@@ -23,7 +23,7 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.asterix.external.api.IAdapterRuntimeManager;
-import org.apache.asterix.external.api.IFeedAdapter;
+import org.apache.asterix.external.dataset.adapter.FeedAdapter;
 import org.apache.asterix.external.feed.api.IIntakeProgressTracker;
 import org.apache.asterix.external.feed.dataflow.DistributeFeedFrameWriter;
 import org.apache.asterix.external.feed.management.FeedId;
@@ -38,7 +38,7 @@ public class AdapterRuntimeManager implements IAdapterRuntimeManager {
 
     private final FeedId feedId;                    // (dataverse-feed)
 
-    private final IFeedAdapter feedAdapter;         // The adapter
+    private final FeedAdapter feedAdapter;         // The adapter
 
     private final IIntakeProgressTracker tracker;   // Not used. needs to be fixed soon.
 
@@ -54,7 +54,7 @@ public class AdapterRuntimeManager implements IAdapterRuntimeManager {
     private State state;                            // One of {ACTIVE_INGESTION, NACTIVE_INGESTION, FINISHED_INGESTION,
                                                     // FAILED_INGESTION}
 
-    public AdapterRuntimeManager(FeedId feedId, IFeedAdapter feedAdapter, IIntakeProgressTracker tracker,
+    public AdapterRuntimeManager(FeedId feedId, FeedAdapter feedAdapter, IIntakeProgressTracker tracker,
             DistributeFeedFrameWriter writer, int partition) {
         this.feedId = feedId;
         this.feedAdapter = feedAdapter;
@@ -107,7 +107,7 @@ public class AdapterRuntimeManager implements IAdapterRuntimeManager {
     }
 
     @Override
-    public IFeedAdapter getFeedAdapter() {
+    public FeedAdapter getFeedAdapter() {
         return feedAdapter;
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/FileOffsetIndexer.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/FileOffsetIndexer.java b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/FileOffsetIndexer.java
index 0fbbd2e..873b420 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/FileOffsetIndexer.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/FileOffsetIndexer.java
@@ -31,14 +31,15 @@ import org.apache.asterix.om.types.BuiltinType;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 
 public class FileOffsetIndexer implements IExternalIndexer {
 
     private static final long serialVersionUID = 1L;
     public static final int NUM_OF_FIELDS = 2;
-    protected AMutableInt32 fileNumber = new AMutableInt32(0);
-    protected AMutableInt64 offset = new AMutableInt64(0);
+    protected final AMutableInt32 fileNumber = new AMutableInt32(0);
+    protected final AMutableInt64 offset = new AMutableInt64(0);
     protected RecordReader<?, Writable> recordReader;
 
     @SuppressWarnings("unchecked")
@@ -49,21 +50,29 @@ public class FileOffsetIndexer implements IExternalIndexer {
             .getSerializerDeserializer(BuiltinType.AINT64);
 
     @Override
-    public void reset(IRecordReader<?> reader) throws IOException {
-        //TODO: Make it more generic since we can't assume it is always going to be HDFS records.
-        @SuppressWarnings("unchecked")
-        HDFSRecordReader<?, Writable> hdfsReader = (HDFSRecordReader<?, Writable>) reader;
-        fileNumber.setValue(hdfsReader.getSnapshot().get(hdfsReader.getCurrentSplitIndex()).getFileNumber());
-        recordReader = hdfsReader.getReader();
-        offset.setValue(recordReader.getPos());
+    public void reset(IRecordReader<?> reader) throws HyracksDataException {
+        try {
+            //TODO: Make it more generic since we can't assume it is always going to be HDFS records.
+            @SuppressWarnings("unchecked")
+            HDFSRecordReader<?, Writable> hdfsReader = (HDFSRecordReader<?, Writable>) reader;
+            fileNumber.setValue(hdfsReader.getSnapshot().get(hdfsReader.getCurrentSplitIndex()).getFileNumber());
+            recordReader = hdfsReader.getReader();
+            offset.setValue(recordReader.getPos());
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
     }
 
     @Override
-    public void index(ArrayTupleBuilder tb) throws IOException {
-        tb.addField(intSerde, fileNumber);
-        tb.addField(longSerde, offset);
-        // Get position for next index(tb) call
-        offset.setValue(recordReader.getPos());
+    public void index(ArrayTupleBuilder tb) throws HyracksDataException {
+        try {
+            tb.addField(intSerde, fileNumber);
+            tb.addField(longSerde, offset);
+            // Get position for next index(tb) call
+            offset.setValue(recordReader.getPos());
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/RecordColumnarIndexer.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/RecordColumnarIndexer.java b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/RecordColumnarIndexer.java
index 9fa26f0..a2641c8 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/RecordColumnarIndexer.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/RecordColumnarIndexer.java
@@ -31,16 +31,17 @@ import org.apache.asterix.om.types.BuiltinType;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 
 public class RecordColumnarIndexer implements IExternalIndexer {
 
     private static final long serialVersionUID = 1L;
     public static final int NUM_OF_FIELDS = 3;
-    protected AMutableInt32 fileNumber = new AMutableInt32(0);
-    protected AMutableInt64 offset = new AMutableInt64(0);
+    protected final AMutableInt32 fileNumber = new AMutableInt32(0);
+    protected final AMutableInt64 offset = new AMutableInt64(0);
     protected long nextOffset;
-    protected AMutableInt32 rowNumber = new AMutableInt32(0);
+    protected final AMutableInt32 rowNumber = new AMutableInt32(0);
     protected RecordReader<?, Writable> recordReader;
 
     @SuppressWarnings("unchecked")
@@ -51,29 +52,38 @@ public class RecordColumnarIndexer implements IExternalIndexer {
             .getSerializerDeserializer(BuiltinType.AINT64);
 
     @Override
-    public void reset(IRecordReader<?> reader) throws IOException {
-        //TODO: Make this more generic. right now, it works because we only index hdfs files.
-        @SuppressWarnings("unchecked")
-        HDFSRecordReader<?, Writable> hdfsReader = (HDFSRecordReader<?, Writable>) reader;
-        fileNumber.setValue(hdfsReader.getSnapshot().get(hdfsReader.getCurrentSplitIndex()).getFileNumber());
-        recordReader = hdfsReader.getReader();
-        offset.setValue(recordReader.getPos());
-        nextOffset = offset.getLongValue();
-        rowNumber.setValue(0);
+    public void reset(IRecordReader<?> reader) throws HyracksDataException {
+        try {
+            //TODO: Make this more generic. right now, it works because we only index hdfs files.
+            @SuppressWarnings("unchecked")
+            HDFSRecordReader<?, Writable> hdfsReader = (HDFSRecordReader<?, Writable>) reader;
+            fileNumber.setValue(hdfsReader.getSnapshot().get(hdfsReader.getCurrentSplitIndex()).getFileNumber());
+            recordReader = hdfsReader.getReader();
+            offset.setValue(recordReader.getPos());
+
+            nextOffset = offset.getLongValue();
+            rowNumber.setValue(0);
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
     }
 
     @Override
-    public void index(ArrayTupleBuilder tb) throws IOException {
-        if (recordReader.getPos() != nextOffset) {
-            // start of a new group
-            offset.setValue(nextOffset);
-            nextOffset = recordReader.getPos();
-            rowNumber.setValue(0);
+    public void index(ArrayTupleBuilder tb) throws HyracksDataException {
+        try {
+            if (recordReader.getPos() != nextOffset) {
+                // start of a new group
+                offset.setValue(nextOffset);
+                nextOffset = recordReader.getPos();
+                rowNumber.setValue(0);
+            }
+            tb.addField(intSerde, fileNumber);
+            tb.addField(longSerde, offset);
+            tb.addField(intSerde, rowNumber);
+            rowNumber.setValue(rowNumber.getIntegerValue() + 1);
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
         }
-        tb.addField(intSerde, fileNumber);
-        tb.addField(longSerde, offset);
-        tb.addField(intSerde, rowNumber);
-        rowNumber.setValue(rowNumber.getIntegerValue() + 1);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/HDFSDataSourceFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/HDFSDataSourceFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/HDFSDataSourceFactory.java
index 6e3ead2..f44d7bc 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/HDFSDataSourceFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/HDFSDataSourceFactory.java
@@ -18,10 +18,12 @@
  */
 package org.apache.asterix.external.input;
 
+import java.io.IOException;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.api.IIndexibleExternalDataSource;
 import org.apache.asterix.external.api.IInputStreamProvider;
 import org.apache.asterix.external.api.IInputStreamProviderFactory;
@@ -59,7 +61,7 @@ public class HDFSDataSourceFactory
     protected static Scheduler hdfsScheduler;
     protected static IndexingScheduler indexingScheduler;
     protected static Boolean initialized = false;
-    protected static final Object initLock = new Object();
+    protected static Object initLock = new Object();
     protected List<ExternalFile> files;
     protected Map<String, String> configuration;
     protected Class<?> recordClass;
@@ -69,37 +71,41 @@ public class HDFSDataSourceFactory
     private String nodeName;
 
     @Override
-    public void configure(Map<String, String> configuration) throws Exception {
-        initialize();
-        this.configuration = configuration;
-        JobConf conf = HDFSUtils.configureHDFSJobConf(configuration);
-        confFactory = new ConfFactory(conf);
-        clusterLocations = getPartitionConstraint();
-        int numPartitions = clusterLocations.getLocations().length;
-        // if files list was set, we restrict the splits to the list
-        InputSplit[] inputSplits;
-        if (files == null) {
-            inputSplits = conf.getInputFormat().getSplits(conf, numPartitions);
-        } else {
-            inputSplits = HDFSUtils.getSplits(conf, files);
-        }
-        if (indexingOp) {
-            readSchedule = indexingScheduler.getLocationConstraints(inputSplits);
-        } else {
-            readSchedule = hdfsScheduler.getLocationConstraints(inputSplits);
-        }
-        inputSplitsFactory = new InputSplitsFactory(inputSplits);
-        read = new boolean[readSchedule.length];
-        Arrays.fill(read, false);
-        if (!ExternalDataUtils.isDataSourceStreamProvider(configuration)) {
-            RecordReader<?, ?> reader = conf.getInputFormat().getRecordReader(inputSplits[0], conf, Reporter.NULL);
-            this.recordClass = reader.createValue().getClass();
-            reader.close();
+    public void configure(Map<String, String> configuration) throws AsterixException {
+        try {
+            init();
+            this.configuration = configuration;
+            JobConf conf = HDFSUtils.configureHDFSJobConf(configuration);
+            confFactory = new ConfFactory(conf);
+            clusterLocations = getPartitionConstraint();
+            int numPartitions = clusterLocations.getLocations().length;
+            // if files list was set, we restrict the splits to the list
+            InputSplit[] inputSplits;
+            if (files == null) {
+                inputSplits = conf.getInputFormat().getSplits(conf, numPartitions);
+            } else {
+                inputSplits = HDFSUtils.getSplits(conf, files);
+            }
+            if (indexingOp) {
+                readSchedule = indexingScheduler.getLocationConstraints(inputSplits);
+            } else {
+                readSchedule = hdfsScheduler.getLocationConstraints(inputSplits);
+            }
+            inputSplitsFactory = new InputSplitsFactory(inputSplits);
+            read = new boolean[readSchedule.length];
+            Arrays.fill(read, false);
+            if (!ExternalDataUtils.getDataSourceType(configuration).equals(DataSourceType.STREAM)) {
+                RecordReader<?, ?> reader = conf.getInputFormat().getRecordReader(inputSplits[0], conf, Reporter.NULL);
+                this.recordClass = reader.createValue().getClass();
+                reader.close();
+            }
+        } catch (IOException e) {
+            throw new AsterixException(e);
         }
     }
 
-    // Used to tell the factory to restrict the splits to the intersection between this list and the
-    // actual files on hdfs side
+    // Used to tell the factory to restrict the splits to the intersection between this list a
+    // actual files on hde
     @Override
     public void setSnapshot(List<ExternalFile> files, boolean indexingOp) {
         this.files = files;
@@ -110,8 +116,7 @@ public class HDFSDataSourceFactory
      * The method below was modified to take care of the following
      * 1. when target files are not null, it generates a file aware input stream that validate
      * against the files
-     * 2. if the data is binary, it returns a generic reader
-     */
+     * 2. if the data is binary, it returns a generic reade */
     @Override
     public IInputStreamProvider createInputStreamProvider(IHyracksTaskContext ctx, int partition)
             throws HyracksDataException {
@@ -133,6 +138,7 @@ public class HDFSDataSourceFactory
      * Get the cluster locations for this input stream factory. This method specifies on which asterix nodes the
      * external
      * adapter will run and how many threads per node.
+     *
      * @return
      */
     @Override
@@ -145,7 +151,7 @@ public class HDFSDataSourceFactory
      * This method initialize the scheduler which assigns responsibility of reading different logical input splits from
      * HDFS
      */
-    private static void initialize() {
+    private static void init() {
         if (!initialized) {
             synchronized (initLock) {
                 if (!initialized) {
@@ -163,24 +169,21 @@ public class HDFSDataSourceFactory
 
     @Override
     public DataSourceType getDataSourceType() {
-        return (ExternalDataUtils.isDataSourceStreamProvider(configuration)) ? DataSourceType.STREAM
-                : DataSourceType.RECORDS;
+        return ExternalDataUtils.getDataSourceType(configuration);
     }
 
     @Override
     public IRecordReader<? extends Writable> createRecordReader(IHyracksTaskContext ctx, int partition)
-            throws Exception {
-        JobConf conf = confFactory.getConf();
-        InputSplit[] inputSplits = inputSplitsFactory.getSplits();
-        String nodeName = ctx.getJobletContext().getApplicationContext().getNodeId();
-        HDFSRecordReader<Object, Writable> recordReader = new HDFSRecordReader<Object, Writable>(read, inputSplits,
-                readSchedule, nodeName, conf);
-        if (files != null) {
-            recordReader.setSnapshot(files);
-            recordReader.setIndexer(ExternalIndexerProvider.getIndexer(configuration));
+            throws HyracksDataException {
+        try {
+            JobConf conf = confFactory.getConf();
+            InputSplit[] inputSplits = inputSplitsFactory.getSplits();
+            String nodeName = ctx.getJobletContext().getApplicationContext().getNodeId();
+            return new HDFSRecordReader<Object, Writable>(read, inputSplits, readSchedule, nodeName, conf, files,
+                    files == null ? null : ExternalIndexerProvider.getIndexer(configuration));
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
         }
-        recordReader.configure(configuration);
-        return recordReader;
     }
 
     @Override
@@ -195,6 +198,6 @@ public class HDFSDataSourceFactory
 
     @Override
     public boolean isIndexingOp() {
-        return (files != null && indexingOp);
+        return ((files != null) && indexingOp);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/CharArrayRecord.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/CharArrayRecord.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/CharArrayRecord.java
index c4b37f1..affdc84 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/CharArrayRecord.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/CharArrayRecord.java
@@ -84,10 +84,6 @@ public class CharArrayRecord implements IRawRecord<char[]> {
         return String.valueOf(value, 0, size);
     }
 
-    public void setValue(char[] value) {
-        this.value = value;
-    }
-
     public void endRecord() {
         if (value[size - 1] != ExternalDataConstants.LF) {
             appendChar(ExternalDataConstants.LF);
@@ -117,4 +113,10 @@ public class CharArrayRecord implements IRawRecord<char[]> {
         this.value = value;
         this.size = value.length;
     }
+
+    public void set(StringBuilder builder) {
+        ensureCapacity(builder.length());
+        builder.getChars(0, builder.length(), value, 0);
+        this.size = builder.length();
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/RecordWithMetadata.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/RecordWithMetadata.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/RecordWithMetadata.java
deleted file mode 100644
index d5640a6..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/RecordWithMetadata.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.input.record;
-
-import java.io.DataOutput;
-import java.io.IOException;
-
-import org.apache.asterix.external.api.IDataParser;
-import org.apache.asterix.external.api.IRawRecord;
-import org.apache.asterix.external.util.ExternalDataUtils;
-import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
-import org.apache.asterix.om.base.ABoolean;
-import org.apache.asterix.om.base.ADouble;
-import org.apache.asterix.om.base.AInt32;
-import org.apache.asterix.om.base.AInt64;
-import org.apache.asterix.om.base.AMutableDouble;
-import org.apache.asterix.om.base.AMutableInt32;
-import org.apache.asterix.om.base.AMutableInt64;
-import org.apache.asterix.om.base.AMutableString;
-import org.apache.asterix.om.base.AString;
-import org.apache.asterix.om.types.ATypeTag;
-import org.apache.asterix.om.types.BuiltinType;
-import org.apache.asterix.om.types.IAType;
-import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
-import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
-import org.apache.hyracks.dataflow.common.data.parsers.IValueParserFactory;
-
-public class RecordWithMetadata<T> {
-
-    private ArrayBackedValueStorage[] fieldValueBuffers;
-    private DataOutput[] fieldValueBufferOutputs;
-    private IValueParserFactory[] valueParserFactories;
-    private byte[] fieldTypeTags;
-    private IRawRecord<T> record;
-
-    // Serializers
-    @SuppressWarnings("unchecked")
-    private ISerializerDeserializer<ADouble> doubleSerde = AqlSerializerDeserializerProvider.INSTANCE
-            .getSerializerDeserializer(BuiltinType.ADOUBLE);
-    private AMutableDouble mutableDouble = new AMutableDouble(0);
-    @SuppressWarnings("unchecked")
-    private ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
-            .getSerializerDeserializer(BuiltinType.ASTRING);
-    private AMutableString mutableString = new AMutableString(null);
-    @SuppressWarnings("unchecked")
-    private ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
-            .getSerializerDeserializer(BuiltinType.AINT32);
-    private AMutableInt32 mutableInt = new AMutableInt32(0);
-    @SuppressWarnings("unchecked")
-    protected ISerializerDeserializer<AInt64> int64Serde = AqlSerializerDeserializerProvider.INSTANCE
-            .getSerializerDeserializer(BuiltinType.AINT64);
-    private AMutableInt64 mutableLong = new AMutableInt64(0);
-    @SuppressWarnings("unchecked")
-    private ISerializerDeserializer<ABoolean> booleanSerde = AqlSerializerDeserializerProvider.INSTANCE
-            .getSerializerDeserializer(BuiltinType.ABOOLEAN);
-
-    public RecordWithMetadata(Class<? extends T> recordClass) {
-    }
-
-    public RecordWithMetadata(IAType[] metaTypes, Class<? extends T> recordClass) {
-        int n = metaTypes.length;
-        this.fieldValueBuffers = new ArrayBackedValueStorage[n];
-        this.fieldValueBufferOutputs = new DataOutput[n];
-        this.valueParserFactories = new IValueParserFactory[n];
-        this.fieldTypeTags = new byte[n];
-        for (int i = 0; i < n; i++) {
-            ATypeTag tag = metaTypes[i].getTypeTag();
-            fieldTypeTags[i] = tag.serialize();
-            fieldValueBuffers[i] = new ArrayBackedValueStorage();
-            fieldValueBufferOutputs[i] = fieldValueBuffers[i].getDataOutput();
-            valueParserFactories[i] = ExternalDataUtils.getParserFactory(tag);
-        }
-    }
-
-    public IRawRecord<T> getRecord() {
-        return record;
-    }
-
-    public ArrayBackedValueStorage getMetadata(int index) {
-        return fieldValueBuffers[index];
-    }
-
-    public void setRecord(IRawRecord<T> record) {
-        this.record = record;
-    }
-
-    public void reset() {
-        record.reset();
-        for (ArrayBackedValueStorage fieldBuffer : fieldValueBuffers) {
-            fieldBuffer.reset();
-        }
-    }
-
-    public void setMetadata(int index, int value) throws IOException {
-        fieldValueBufferOutputs[index].writeByte(fieldTypeTags[index]);
-        mutableInt.setValue(value);
-        IDataParser.toBytes(mutableInt, fieldValueBuffers[index], int32Serde);
-    }
-
-    public void setMetadata(int index, long value) throws IOException {
-        fieldValueBufferOutputs[index].writeByte(fieldTypeTags[index]);
-        mutableLong.setValue(value);
-        IDataParser.toBytes(mutableLong, fieldValueBuffers[index], int64Serde);
-    }
-
-    public void setMetadata(int index, String value) throws IOException {
-        fieldValueBufferOutputs[index].writeByte(fieldTypeTags[index]);
-        mutableString.setValue(value);
-        IDataParser.toBytes(mutableString, fieldValueBuffers[index], stringSerde);
-    }
-
-    public void setMeta(int index, boolean value) throws IOException {
-        fieldValueBufferOutputs[index].writeByte(fieldTypeTags[index]);
-        IDataParser.toBytes(value ? ABoolean.TRUE : ABoolean.FALSE, fieldValueBuffers[index], booleanSerde);
-    }
-
-    public void setMeta(int index, double value) throws IOException {
-        fieldValueBufferOutputs[index].writeByte(fieldTypeTags[index]);
-        mutableDouble.setValue(value);
-        IDataParser.toBytes(mutableDouble, fieldValueBuffers[index], doubleSerde);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/RecordWithMetadataAndPK.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/RecordWithMetadataAndPK.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/RecordWithMetadataAndPK.java
new file mode 100644
index 0000000..ca6725f
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/RecordWithMetadataAndPK.java
@@ -0,0 +1,186 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.asterix.external.api.IDataParser;
+import org.apache.asterix.external.api.IRawRecord;
+import org.apache.asterix.external.util.ExternalDataUtils;
+import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import org.apache.asterix.om.base.ABoolean;
+import org.apache.asterix.om.base.ADouble;
+import org.apache.asterix.om.base.AInt32;
+import org.apache.asterix.om.base.AInt64;
+import org.apache.asterix.om.base.AMutableDouble;
+import org.apache.asterix.om.base.AMutableInt32;
+import org.apache.asterix.om.base.AMutableInt64;
+import org.apache.asterix.om.base.AMutableString;
+import org.apache.asterix.om.base.AString;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.BuiltinType;
+import org.apache.asterix.om.types.IAType;
+import org.apache.asterix.om.util.NonTaggedFormatUtil;
+import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
+import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import org.apache.hyracks.dataflow.common.data.parsers.IValueParser;
+
+public class RecordWithMetadataAndPK<T> extends RecordWithPK<T> {
+
+    private final ArrayBackedValueStorage[] fieldValueBuffers;
+    private final DataOutput[] fieldValueBufferOutputs;
+    private final IValueParser[] valueParsers;
+    private final byte[] fieldTypeTags;
+    private final IAType[] metaTypes;
+
+    // Serializers
+    @SuppressWarnings("unchecked")
+    private final ISerializerDeserializer<ADouble> doubleSerde = AqlSerializerDeserializerProvider.INSTANCE
+            .getSerializerDeserializer(BuiltinType.ADOUBLE);
+    private final AMutableDouble mutableDouble = new AMutableDouble(0);
+    @SuppressWarnings("unchecked")
+    private final ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
+            .getSerializerDeserializer(BuiltinType.ASTRING);
+    private final AMutableString mutableString = new AMutableString(null);
+    @SuppressWarnings("unchecked")
+    private final ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
+            .getSerializerDeserializer(BuiltinType.AINT32);
+    private final AMutableInt32 mutableInt = new AMutableInt32(0);
+    @SuppressWarnings("unchecked")
+    protected ISerializerDeserializer<AInt64> int64Serde = AqlSerializerDeserializerProvider.INSTANCE
+            .getSerializerDeserializer(BuiltinType.AINT64);
+    private final AMutableInt64 mutableLong = new AMutableInt64(0);
+    @SuppressWarnings("unchecked")
+    private final ISerializerDeserializer<ABoolean> booleanSerde = AqlSerializerDeserializerProvider.INSTANCE
+            .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+    private final int[] keyIndicator;
+
+    public RecordWithMetadataAndPK(final IRawRecord<T> record, final IAType[] metaTypes, final ARecordType recordType,
+            final int[] keyIndicator, final int[] pkIndexes, final IAType[] keyTypes) {
+        super(record, keyTypes, pkIndexes);
+        this.metaTypes = metaTypes;
+        this.fieldValueBuffers = new ArrayBackedValueStorage[metaTypes.length];
+        this.fieldValueBufferOutputs = new DataOutput[metaTypes.length];
+        this.valueParsers = new IValueParser[metaTypes.length];
+        this.fieldTypeTags = new byte[metaTypes.length];
+        for (int i = 0; i < metaTypes.length; i++) {
+            final ATypeTag tag = metaTypes[i].getTypeTag();
+            fieldTypeTags[i] = tag.serialize();
+            fieldValueBuffers[i] = new ArrayBackedValueStorage();
+            fieldValueBufferOutputs[i] = fieldValueBuffers[i].getDataOutput();
+            valueParsers[i] = ExternalDataUtils.getParserFactory(tag).createValueParser();
+        }
+        this.keyIndicator = keyIndicator;
+    }
+
+    @Override
+    public IRawRecord<T> getRecord() {
+        return record;
+    }
+
+    public ArrayBackedValueStorage getMetadata(final int index) {
+        return fieldValueBuffers[index];
+    }
+
+    @Override
+    public void reset() {
+        record.reset();
+        for (final ArrayBackedValueStorage fieldBuffer : fieldValueBuffers) {
+            fieldBuffer.reset();
+        }
+    }
+
+    public void setMetadata(final int index, final int value) throws IOException {
+        fieldValueBufferOutputs[index].writeByte(fieldTypeTags[index]);
+        mutableInt.setValue(value);
+        IDataParser.toBytes(mutableInt, fieldValueBuffers[index], int32Serde);
+    }
+
+    public void setMetadata(final int index, final long value) throws IOException {
+        fieldValueBufferOutputs[index].writeByte(fieldTypeTags[index]);
+        mutableLong.setValue(value);
+        IDataParser.toBytes(mutableLong, fieldValueBuffers[index], int64Serde);
+    }
+
+    public void setMetadata(final int index, final String value) throws IOException {
+        fieldValueBufferOutputs[index].writeByte(fieldTypeTags[index]);
+        mutableString.setValue(value);
+        IDataParser.toBytes(mutableString, fieldValueBuffers[index], stringSerde);
+    }
+
+    public void setMetadata(final int index, final boolean value) throws IOException {
+        fieldValueBufferOutputs[index].writeByte(fieldTypeTags[index]);
+        IDataParser.toBytes(value ? ABoolean.TRUE : ABoolean.FALSE, fieldValueBuffers[index], booleanSerde);
+    }
+
+    public void setMetadata(final int index, final double value) throws IOException {
+        fieldValueBufferOutputs[index].writeByte(fieldTypeTags[index]);
+        mutableDouble.setValue(value);
+        IDataParser.toBytes(mutableDouble, fieldValueBuffers[index], doubleSerde);
+    }
+
+    public void setRawMetadata(final int index, final char[] src, final int offset, final int length)
+            throws IOException {
+        if (length == 0) {
+            if (!NonTaggedFormatUtil.isOptional(metaTypes[index])) {
+                throw new HyracksDataException(
+                        "Field " + index + " of meta record is not an optional type so it cannot accept null value. ");
+            }
+            fieldValueBufferOutputs[index].writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
+        } else {
+            fieldValueBufferOutputs[index].writeByte(fieldTypeTags[index]);
+            valueParsers[index].parse(src, offset, length, fieldValueBufferOutputs[index]);
+        }
+    }
+
+    @Override
+    public void appendPk(final ArrayTupleBuilder tb) throws IOException {
+        for (int i = 0; i < pkIndexes.length; i++) {
+            if (keyIndicator[i] == 1) {
+                tb.addField(getMetadata(pkIndexes[i]));
+            } else {
+                throw new HyracksDataException("Can't get PK from record part");
+            }
+        }
+    }
+
+    @Override
+    public byte[] getBytes() {
+        return record.getBytes();
+    }
+
+    @Override
+    public T get() {
+        return record.get();
+    }
+
+    @Override
+    public int size() {
+        return record.size();
+    }
+
+    @Override
+    public void set(final T t) {
+        record.set(t);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/RecordWithPK.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/RecordWithPK.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/RecordWithPK.java
new file mode 100644
index 0000000..b99d4d5
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/RecordWithPK.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record;
+
+import java.io.IOException;
+
+import org.apache.asterix.external.api.IRawRecord;
+import org.apache.asterix.om.types.IAType;
+import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
+import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+
+public class RecordWithPK<T> implements IRawRecord<T> {
+
+    protected final ArrayBackedValueStorage[] pkFieldValueBuffers;
+    protected final int[] pkIndexes;
+    protected final IAType[] keyTypes;
+    protected IRawRecord<T> record;
+
+    public RecordWithPK(final IRawRecord<T> record, final IAType[] pkTypes, final int[] pkIndexes) {
+        this.record = record;
+        this.keyTypes = pkTypes;
+        this.pkIndexes = pkIndexes;
+        if (keyTypes != null) {
+            this.pkFieldValueBuffers = new ArrayBackedValueStorage[pkTypes.length];
+        } else {
+            this.pkFieldValueBuffers = null;
+        }
+    }
+
+    public RecordWithPK(final IRawRecord<T> rawRecord, final ArrayBackedValueStorage[] pkFieldValueBuffers) {
+        this.record = rawRecord;
+        this.keyTypes = null;
+        this.pkIndexes = null;
+        this.pkFieldValueBuffers = pkFieldValueBuffers;
+    }
+
+    public ArrayBackedValueStorage[] getPKs() {
+        return pkFieldValueBuffers;
+    }
+
+    @Override
+    public byte[] getBytes() {
+        return record.getBytes();
+    }
+
+    @Override
+    public T get() {
+        return record.get();
+    }
+
+    public IRawRecord<? extends T> getRecord() {
+        return record;
+    }
+
+    @Override
+    public void reset() {
+        record.reset();
+        for (final ArrayBackedValueStorage pkStorage : pkFieldValueBuffers) {
+            pkStorage.reset();
+        }
+    }
+
+    @Override
+    public int size() {
+        return record.size();
+    }
+
+    @Override
+    public void set(final T t) {
+        record.set(t);
+    }
+
+    public void appendPk(final ArrayTupleBuilder tb) throws IOException {
+        for (final ArrayBackedValueStorage pkStorage : pkFieldValueBuffers) {
+            tb.addField(pkStorage);
+        }
+    }
+
+    public void set(final IRawRecord<? extends T> record) {
+        this.record.set(record.get());
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/CSVToRecordWithMetadataAndPKConverter.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/CSVToRecordWithMetadataAndPKConverter.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/CSVToRecordWithMetadataAndPKConverter.java
new file mode 100644
index 0000000..8255ebb
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/CSVToRecordWithMetadataAndPKConverter.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.converter;
+
+import java.io.IOException;
+
+import org.apache.asterix.external.api.IRawRecord;
+import org.apache.asterix.external.input.record.CharArrayRecord;
+import org.apache.asterix.external.input.record.RecordWithMetadataAndPK;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.IAType;
+import org.apache.hyracks.dataflow.std.file.FieldCursorForDelimitedDataParser;
+
+public class CSVToRecordWithMetadataAndPKConverter
+        implements IRecordToRecordWithMetadataAndPKConverter<char[], char[]> {
+
+    private final FieldCursorForDelimitedDataParser cursor;
+    private final int valueIndex;
+    private final RecordWithMetadataAndPK<char[]> recordWithMetadata;
+    private final CharArrayRecord record;
+
+    public CSVToRecordWithMetadataAndPKConverter(final int valueIndex, final char delimiter, final ARecordType metaType,
+            final ARecordType recordType, final int[] keyIndicator, final int[] keyIndexes, final IAType[] keyTypes) {
+        this.cursor = new FieldCursorForDelimitedDataParser(null, delimiter, ExternalDataConstants.QUOTE);
+        this.record = new CharArrayRecord();
+        this.valueIndex = valueIndex;
+        this.recordWithMetadata = new RecordWithMetadataAndPK<char[]>(record, metaType.getFieldTypes(), recordType,
+                keyIndicator, keyIndexes, keyTypes);
+    }
+
+    @Override
+    public RecordWithMetadataAndPK<char[]> convert(final IRawRecord<? extends char[]> input) throws IOException {
+        record.reset();
+        recordWithMetadata.reset();
+        cursor.nextRecord(input.get(), input.size());
+        int i = 0;
+        int j = 0;
+        while (cursor.nextField()) {
+            if (cursor.isDoubleQuoteIncludedInThisField) {
+                cursor.eliminateDoubleQuote(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart);
+                cursor.fEnd -= cursor.doubleQuoteCount;
+                cursor.isDoubleQuoteIncludedInThisField = false;
+            }
+            if (i == valueIndex) {
+                record.setValue(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart);
+                record.endRecord();
+            } else {
+                recordWithMetadata.setRawMetadata(j, cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart);
+                j++;
+            }
+            i++;
+        }
+        return recordWithMetadata;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/CSVWithRecordConverterFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/CSVWithRecordConverterFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/CSVWithRecordConverterFactory.java
new file mode 100644
index 0000000..ee16228
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/CSVWithRecordConverterFactory.java
@@ -0,0 +1,124 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.converter;
+
+import java.util.Arrays;
+import java.util.Map;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.api.IRecordConverter;
+import org.apache.asterix.external.input.record.RecordWithMetadataAndPK;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.IAType;
+
+public class CSVWithRecordConverterFactory implements IRecordConverterFactory<char[], RecordWithMetadataAndPK<char[]>> {
+
+    private static final long serialVersionUID = 1L;
+    private int recordIndex;
+    private char delimiter;
+    private ARecordType metaType;
+    private ARecordType recordType;
+    private int[] keyIndicators;
+    private int[] keyIndexes;
+    private IAType[] keyTypes;
+
+    @Override
+    public IRecordConverter<char[], RecordWithMetadataAndPK<char[]>> createConverter() {
+        return new CSVToRecordWithMetadataAndPKConverter(recordIndex, delimiter, metaType, recordType, keyIndicators,
+                keyIndexes, keyTypes);
+    }
+
+    @Override
+    public void configure(final Map<String, String> configuration) throws AsterixException {
+        //validate and set
+        String property = configuration.get(ExternalDataConstants.KEY_RECORD_INDEX);
+        if (property == null) {
+            throw new AsterixException(
+                    "Unspecified " + ExternalDataConstants.KEY_RECORD_INDEX + " for csv to csv with record converter");
+        }
+        recordIndex = Integer.parseInt(property);
+        property = configuration.get(ExternalDataConstants.KEY_DELIMITER);
+        if (property == null) {
+            throw new AsterixException(
+                    "Unspecified " + ExternalDataConstants.KEY_DELIMITER + " for csv to csv with record converter");
+        }
+        if (property.trim().length() > 1) {
+            throw new AsterixException("Large delimiter. The maximum delimiter size = 1");
+        }
+        delimiter = property.trim().charAt(0);
+        // only works for top level keys
+        property = configuration.get(ExternalDataConstants.KEY_KEY_INDEXES);
+        if (property == null) {
+            keyIndexes = null;
+            keyIndicators = null;
+            keyTypes = null;
+        } else {
+            final String[] indexes = property.split(",");
+            keyIndexes = new int[indexes.length];
+            for (int i = 0; i < keyIndexes.length; i++) {
+                keyIndexes[i] = Integer.parseInt(indexes[i].trim());
+            }
+            // default key indicators point to meta part
+            property = configuration.get(ExternalDataConstants.KEY_KEY_INDICATORS);
+            if (property == null) {
+                keyIndicators = new int[keyIndexes.length];
+                Arrays.fill(keyIndicators, 1);
+            } else {
+                keyIndicators = new int[keyIndexes.length];
+                final String[] indicators = property.split(",");
+                for (int i = 0; i < keyIndicators.length; i++) {
+                    keyIndicators[i] = Integer.parseInt(indicators[i].trim());
+                    if ((keyIndicators[i] > 1) || (keyIndicators[i] < 0)) {
+                        throw new AsterixException("Invalid " + ExternalDataConstants.KEY_KEY_INDICATORS
+                                + " value. Allowed values are only 0 and 1.");
+                    }
+                }
+                keyTypes = new IAType[keyIndexes.length];
+                for (int i = 0; i < keyIndicators.length; i++) {
+                    if (keyIndicators[i] == 0) {
+                        keyTypes[i] = recordType.getFieldTypes()[keyIndexes[i]];
+                    } else {
+                        keyTypes[i] = metaType.getFieldTypes()[keyIndexes[i]];
+                    }
+                }
+            }
+        }
+    }
+
+    @Override
+    public Class<?> getInputClass() {
+        return char[].class;
+    }
+
+    @Override
+    public Class<?> getOutputClass() {
+        return char[].class;
+    }
+
+    @Override
+    public void setRecordType(final ARecordType recordType) {
+        this.recordType = recordType;
+    }
+
+    @Override
+    public void setMetaType(final ARecordType metaType) {
+        this.metaType = metaType;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/DCPConverterFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/DCPConverterFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/DCPConverterFactory.java
new file mode 100644
index 0000000..1d9311e
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/DCPConverterFactory.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.converter;
+
+import java.util.Map;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.api.IRecordConverter;
+import org.apache.asterix.external.input.record.RecordWithMetadataAndPK;
+import org.apache.asterix.om.types.ARecordType;
+
+import com.couchbase.client.core.message.dcp.DCPRequest;
+
+public class DCPConverterFactory implements IRecordConverterFactory<DCPRequest, RecordWithMetadataAndPK<char[]>> {
+
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public void configure(final Map<String, String> configuration) throws AsterixException {
+    }
+
+    @Override
+    public Class<?> getInputClass() {
+        return DCPRequest.class;
+    }
+
+    @Override
+    public Class<?> getOutputClass() {
+        return char[].class;
+    }
+
+    @Override
+    public void setRecordType(final ARecordType recordType) {
+    }
+
+    @Override
+    public IRecordConverter<DCPRequest, RecordWithMetadataAndPK<char[]>> createConverter() {
+        return new DCPRequestToRecordWithMetadataAndPKConverter();
+    }
+
+    @Override
+    public void setMetaType(final ARecordType metaType) {
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/DCPRequestToRecordWithMetadataAndPKConverter.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/DCPRequestToRecordWithMetadataAndPKConverter.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/DCPRequestToRecordWithMetadataAndPKConverter.java
new file mode 100644
index 0000000..1f82e85
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/DCPRequestToRecordWithMetadataAndPKConverter.java
@@ -0,0 +1,122 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.converter;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.CharBuffer;
+import java.nio.charset.CharsetDecoder;
+import java.nio.charset.StandardCharsets;
+
+import org.apache.asterix.external.api.IRawRecord;
+import org.apache.asterix.external.input.record.CharArrayRecord;
+import org.apache.asterix.external.input.record.RecordWithMetadataAndPK;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.BuiltinType;
+import org.apache.asterix.om.types.IAType;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+import com.couchbase.client.core.message.dcp.DCPRequest;
+import com.couchbase.client.core.message.dcp.MutationMessage;
+import com.couchbase.client.core.message.dcp.RemoveMessage;
+import com.couchbase.client.deps.io.netty.buffer.ByteBuf;
+
+public class DCPRequestToRecordWithMetadataAndPKConverter
+        implements IRecordToRecordWithMetadataAndPKConverter<DCPRequest, char[]> {
+
+    private final RecordWithMetadataAndPK<char[]> recordWithMetadata;
+    private final CharArrayRecord value;
+    private final CharsetDecoder decoder = StandardCharsets.UTF_8.newDecoder();
+    private final ByteBuffer bytes = ByteBuffer.allocateDirect(ExternalDataConstants.DEFAULT_BUFFER_SIZE);
+    private final CharBuffer chars = CharBuffer.allocate(ExternalDataConstants.DEFAULT_BUFFER_SIZE);
+    // metaTypes = {key(string), bucket(string), vbucket(int32), seq(long), cas(long),
+    // creationTime(long),expiration(int32),flags(int32),revSeqNumber(long),lockTime(int32)}
+    private static final IAType[] CB_META_TYPES = new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING,
+            BuiltinType.AINT32, BuiltinType.AINT64, BuiltinType.AINT64, BuiltinType.AINT64, BuiltinType.AINT32,
+            BuiltinType.AINT32, BuiltinType.AINT64, BuiltinType.AINT32 };
+    private static final int[] PK_INDICATOR = { 1 };
+    private static final int[] PK_INDEXES = { 0 };
+    private static final IAType[] PK_TYPES = { BuiltinType.ASTRING };
+
+    public DCPRequestToRecordWithMetadataAndPKConverter() {
+        this.value = new CharArrayRecord();
+        this.recordWithMetadata = new RecordWithMetadataAndPK<char[]>(value, CB_META_TYPES,
+                ARecordType.FULLY_OPEN_RECORD_TYPE, PK_INDICATOR, PK_INDEXES, PK_TYPES);
+    }
+
+    @Override
+    public RecordWithMetadataAndPK<char[]> convert(final IRawRecord<? extends DCPRequest> input) throws IOException {
+        final DCPRequest dcpRequest = input.get();
+        if (dcpRequest instanceof MutationMessage) {
+            final MutationMessage message = (MutationMessage) dcpRequest;
+            final String key = message.key();
+            final int vbucket = message.partition();
+            final long seq = message.bySequenceNumber();
+            final String bucket = message.bucket();
+            final long cas = message.cas();
+            final long creationTime = message.creationTime();
+            final int expiration = message.expiration();
+            final int flags = message.flags();
+            final long revSeqNumber = message.revisionSequenceNumber();
+            final int lockTime = message.lockTime();
+            recordWithMetadata.reset();
+            recordWithMetadata.setMetadata(0, key);
+            recordWithMetadata.setMetadata(1, bucket);
+            recordWithMetadata.setMetadata(2, vbucket);
+            recordWithMetadata.setMetadata(3, seq);
+            recordWithMetadata.setMetadata(4, cas);
+            recordWithMetadata.setMetadata(5, creationTime);
+            recordWithMetadata.setMetadata(6, expiration);
+            recordWithMetadata.setMetadata(7, flags);
+            recordWithMetadata.setMetadata(8, revSeqNumber);
+            recordWithMetadata.setMetadata(9, lockTime);
+            DCPRequestToRecordWithMetadataAndPKConverter.set(message.content(), decoder, bytes, chars, value);
+        } else if (dcpRequest instanceof RemoveMessage) {
+            final RemoveMessage message = (RemoveMessage) dcpRequest;
+            final String key = message.key();
+            recordWithMetadata.reset();
+            recordWithMetadata.setMetadata(0, key);
+        } else {
+            throw new HyracksDataException("Unknown DCP request: " + dcpRequest);
+        }
+        return recordWithMetadata;
+    }
+
+    public static void set(final ByteBuf content, final CharsetDecoder decoder, final ByteBuffer bytes,
+            final CharBuffer chars, final CharArrayRecord record) {
+        int position = content.readerIndex();
+        final int limit = content.writerIndex();
+        final int contentSize = content.readableBytes();
+        while (position < limit) {
+            bytes.clear();
+            chars.clear();
+            if ((contentSize - position) < bytes.capacity()) {
+                bytes.limit(contentSize - position);
+            }
+            content.getBytes(position, bytes);
+            position += bytes.position();
+            bytes.flip();
+            decoder.decode(bytes, chars, false);
+            chars.flip();
+            record.append(chars);
+        }
+        record.endRecord();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/IRecordConverterFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/IRecordConverterFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/IRecordConverterFactory.java
new file mode 100644
index 0000000..4990527
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/IRecordConverterFactory.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.converter;
+
+import java.io.Serializable;
+import java.util.Map;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.api.IRecordConverter;
+import org.apache.asterix.om.types.ARecordType;
+
+public interface IRecordConverterFactory<I, O> extends Serializable {
+
+    public IRecordConverter<I, O> createConverter();
+
+    public void configure(Map<String, String> configuration) throws AsterixException;
+
+    public Class<?> getInputClass();
+
+    public Class<?> getOutputClass();
+
+    public void setRecordType(ARecordType recordType);
+
+    public void setMetaType(ARecordType metaType);
+
+}



[08/19] incubator-asterixdb git commit: Support Change Feeds and Ingestion of Records with MetaData

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesCommitOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesCommitOperatorDescriptor.java b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesCommitOperatorDescriptor.java
index 71ffef8..fb6e0f9 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesCommitOperatorDescriptor.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/ExternalDatasetIndexesCommitOperatorDescriptor.java
@@ -21,18 +21,23 @@ package org.apache.asterix.external.operators;
 import java.util.List;
 
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.job.IOperatorDescriptorRegistry;
 import org.apache.hyracks.storage.am.common.api.IIndex;
 import org.apache.hyracks.storage.am.common.api.IIndexDataflowHelper;
+import org.apache.hyracks.storage.am.common.api.IndexException;
 import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
 import org.apache.hyracks.storage.am.common.util.IndexFileNameUtil;
 import org.apache.hyracks.storage.am.lsm.btree.dataflow.ExternalBTreeDataflowHelperFactory;
 import org.apache.hyracks.storage.am.lsm.btree.dataflow.ExternalBTreeWithBuddyDataflowHelperFactory;
 import org.apache.hyracks.storage.am.lsm.common.api.ITwoPCIndex;
 import org.apache.hyracks.storage.am.lsm.rtree.dataflow.ExternalRTreeDataflowHelperFactory;
+import org.apache.log4j.Logger;
 
 public class ExternalDatasetIndexesCommitOperatorDescriptor extends AbstractExternalDatasetIndexesOperatorDescriptor {
+    private static final Logger LOGGER = Logger
+            .getLogger(ExternalDatasetIndexesCommitOperatorDescriptor.class.getName());
 
     public ExternalDatasetIndexesCommitOperatorDescriptor(IOperatorDescriptorRegistry spec,
             ExternalBTreeDataflowHelperFactory filesIndexDataflowHelperFactory,
@@ -49,17 +54,22 @@ public class ExternalDatasetIndexesCommitOperatorDescriptor extends AbstractExte
 
     @Override
     protected void performOpOnIndex(IIndexDataflowHelperFactory indexDataflowHelperFactory, IHyracksTaskContext ctx,
-            IndexInfoOperatorDescriptor fileIndexInfo, int partition) throws Exception {
-        FileReference resourecePath = IndexFileNameUtil.getIndexAbsoluteFileRef(fileIndexInfo, partition, ctx.getIOManager());
-        System.err.println("performing the operation on "+ resourecePath.getFile().getAbsolutePath());
-        // Get DataflowHelper
-        IIndexDataflowHelper indexHelper = indexDataflowHelperFactory.createIndexDataflowHelper(fileIndexInfo, ctx, partition);
-        // Get index
-        IIndex index = indexHelper.getIndexInstance();
-        // commit transaction
-        ((ITwoPCIndex) index).commitTransaction();
-        System.err.println("operation on "+ resourecePath.getFile().getAbsolutePath() + " Succeded");
-
+            IndexInfoOperatorDescriptor fileIndexInfo, int partition) {
+        try {
+            FileReference resourecePath = IndexFileNameUtil.getIndexAbsoluteFileRef(fileIndexInfo, partition,
+                    ctx.getIOManager());
+            LOGGER.warn("performing the operation on " + resourecePath.getFile().getAbsolutePath());
+            // Get DataflowHelper
+            IIndexDataflowHelper indexHelper = indexDataflowHelperFactory.createIndexDataflowHelper(fileIndexInfo, ctx,
+                    partition);
+            // Get index
+            IIndex index = indexHelper.getIndexInstance();
+            // commit transaction
+            ((ITwoPCIndex) index).commitTransaction();
+            LOGGER.warn("operation on " + resourecePath.getFile().getAbsolutePath() + " Succeded");
+        } catch (HyracksDataException | IndexException e) {
+            throw new IllegalStateException(e);
+        }
     }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
index a18ebcd..db11caa 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
@@ -36,6 +36,7 @@ import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.dataflow.IOperatorNodePushable;
 import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.job.JobSpecification;
 import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
@@ -70,19 +71,21 @@ public class FeedIntakeOperatorDescriptor extends AbstractSingleActivityOperator
     /** The configuration parameters associated with the adapter. **/
     private Map<String, String> adaptorConfiguration;
 
-    private ARecordType adapterOutputType;
+    private final ARecordType adapterOutputType;
 
     public FeedIntakeOperatorDescriptor(JobSpecification spec, IFeed primaryFeed, IAdapterFactory adapterFactory,
-            ARecordType adapterOutputType, FeedPolicyAccessor policyAccessor) {
+            ARecordType adapterOutputType, FeedPolicyAccessor policyAccessor, RecordDescriptor rDesc) {
         super(spec, 0, 1);
         this.feedId = new FeedId(primaryFeed.getDataverseName(), primaryFeed.getFeedName());
         this.adaptorFactory = adapterFactory;
         this.adapterOutputType = adapterOutputType;
         this.policyAccessor = policyAccessor;
+        this.recordDescriptors[0] = rDesc;
     }
 
     public FeedIntakeOperatorDescriptor(JobSpecification spec, IFeed primaryFeed, String adapterLibraryName,
-            String adapterFactoryClassName, ARecordType adapterOutputType, FeedPolicyAccessor policyAccessor) {
+            String adapterFactoryClassName, ARecordType adapterOutputType, FeedPolicyAccessor policyAccessor,
+            RecordDescriptor rDesc) {
         super(spec, 0, 1);
         this.feedId = new FeedId(primaryFeed.getDataverseName(), primaryFeed.getFeedName());
         this.adaptorFactoryClassName = adapterFactoryClassName;
@@ -90,6 +93,7 @@ public class FeedIntakeOperatorDescriptor extends AbstractSingleActivityOperator
         this.adaptorConfiguration = primaryFeed.getAdapterConfiguration();
         this.adapterOutputType = adapterOutputType;
         this.policyAccessor = policyAccessor;
+        this.recordDescriptors[0] = rDesc;
     }
 
     @Override
@@ -112,7 +116,7 @@ public class FeedIntakeOperatorDescriptor extends AbstractSingleActivityOperator
 
         }
         return new FeedIntakeOperatorNodePushable(ctx, feedId, adaptorFactory, partition, ingestionRuntime,
-                policyAccessor);
+                policyAccessor, recordDescProvider, this);
     }
 
     private IAdapterFactory createExtenralAdapterFactory(IHyracksTaskContext ctx, int partition) throws Exception {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorNodePushable.java b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorNodePushable.java
index 9398fa1..c1748d9 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorNodePushable.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorNodePushable.java
@@ -28,7 +28,7 @@ import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
 import org.apache.asterix.external.api.IAdapterFactory;
 import org.apache.asterix.external.api.IAdapterRuntimeManager;
 import org.apache.asterix.external.api.IAdapterRuntimeManager.State;
-import org.apache.asterix.external.api.IFeedAdapter;
+import org.apache.asterix.external.dataset.adapter.FeedAdapter;
 import org.apache.asterix.external.feed.api.IFeedManager;
 import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
 import org.apache.asterix.external.feed.api.IFeedSubscriptionManager;
@@ -42,6 +42,7 @@ import org.apache.asterix.external.feed.runtime.CollectionRuntime;
 import org.apache.asterix.external.feed.runtime.IngestionRuntime;
 import org.apache.asterix.external.feed.runtime.SubscribableFeedRuntimeId;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 import org.apache.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
@@ -63,12 +64,19 @@ public class FeedIntakeOperatorNodePushable extends AbstractUnaryOutputSourceOpe
     private final IAdapterFactory adapterFactory;
 
     private IngestionRuntime ingestionRuntime;
-    private IFeedAdapter adapter;
+    private FeedAdapter adapter;
     private IIntakeProgressTracker tracker;
     private DistributeFeedFrameWriter feedFrameWriter;
 
+    private final FeedIntakeOperatorDescriptor opDesc;
+
+    private final IRecordDescriptorProvider recordDescProvider;
+
     public FeedIntakeOperatorNodePushable(IHyracksTaskContext ctx, FeedId feedId, IAdapterFactory adapterFactory,
-            int partition, IngestionRuntime ingestionRuntime, FeedPolicyAccessor policyAccessor) {
+            int partition, IngestionRuntime ingestionRuntime, FeedPolicyAccessor policyAccessor,
+            IRecordDescriptorProvider recordDescProvider, FeedIntakeOperatorDescriptor feedIntakeOperatorDescriptor) {
+        this.opDesc = feedIntakeOperatorDescriptor;
+        this.recordDescProvider = recordDescProvider;
         this.ctx = ctx;
         this.feedId = feedId;
         this.partition = partition;
@@ -86,7 +94,7 @@ public class FeedIntakeOperatorNodePushable extends AbstractUnaryOutputSourceOpe
         try {
             if (ingestionRuntime == null) {
                 try {
-                    adapter = (IFeedAdapter) adapterFactory.createAdapter(ctx, partition);
+                    adapter = (FeedAdapter) adapterFactory.createAdapter(ctx, partition);
                     //TODO: Fix record tracking
                     //                    if (adapterFactory.isRecordTrackingEnabled()) {
                     //                        tracker = adapterFactory.createIntakeProgressTracker();
@@ -96,6 +104,8 @@ public class FeedIntakeOperatorNodePushable extends AbstractUnaryOutputSourceOpe
                             + " Exception " + e);
                     throw new HyracksDataException(e);
                 }
+
+                recordDesc = recordDescProvider.getOutputRecordDescriptor(opDesc.getActivityId(), 0);
                 FrameTupleAccessor fta = new FrameTupleAccessor(recordDesc);
                 feedFrameWriter = new DistributeFeedFrameWriter(ctx, feedId, writer, FeedRuntimeType.INTAKE, partition,
                         fta, feedManager);
@@ -180,7 +190,7 @@ public class FeedIntakeOperatorNodePushable extends AbstractUnaryOutputSourceOpe
             e.printStackTrace();
             throw new HyracksDataException(e);
         } finally {
-            if (ingestionRuntime != null
+            if ((ingestionRuntime != null)
                     && !ingestionRuntime.getAdapterRuntimeManager().getState().equals(State.INACTIVE_INGESTION)) {
                 feedFrameWriter.close();
                 if (LOGGER.isLoggable(Level.INFO)) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaOperatorDescriptor.java b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaOperatorDescriptor.java
index 9eb6c78..354636d 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaOperatorDescriptor.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaOperatorDescriptor.java
@@ -52,7 +52,7 @@ public class FeedMetaOperatorDescriptor extends AbstractSingleActivityOperatorDe
      * The actual (Hyracks) operator that is wrapped around by the MetaFeed
      * operator.
      **/
-    private IOperatorDescriptor coreOperator;
+    private final IOperatorDescriptor coreOperator;
 
     /**
      * A unique identifier for the feed instance. A feed instance represents the
@@ -73,9 +73,9 @@ public class FeedMetaOperatorDescriptor extends AbstractSingleActivityOperatorDe
 
     private final String operandId;
 
-    public FeedMetaOperatorDescriptor(JobSpecification spec, FeedConnectionId feedConnectionId,
-            IOperatorDescriptor coreOperatorDescriptor, Map<String, String> feedPolicyProperties,
-            FeedRuntimeType runtimeType, boolean enableSubscriptionMode, String operandId) {
+    public FeedMetaOperatorDescriptor(final JobSpecification spec, final FeedConnectionId feedConnectionId,
+            final IOperatorDescriptor coreOperatorDescriptor, final Map<String, String> feedPolicyProperties,
+            final FeedRuntimeType runtimeType, final boolean enableSubscriptionMode, final String operandId) {
         super(spec, coreOperatorDescriptor.getInputArity(), coreOperatorDescriptor.getOutputArity());
         this.feedConnectionId = feedConnectionId;
         this.feedPolicyProperties = feedPolicyProperties;
@@ -88,8 +88,9 @@ public class FeedMetaOperatorDescriptor extends AbstractSingleActivityOperatorDe
     }
 
     @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+            final IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions)
+                    throws HyracksDataException {
         IOperatorNodePushable nodePushable = null;
         switch (runtimeType) {
             case COMPUTE:
@@ -98,7 +99,7 @@ public class FeedMetaOperatorDescriptor extends AbstractSingleActivityOperatorDe
                 break;
             case STORE:
                 nodePushable = new FeedMetaStoreNodePushable(ctx, recordDescProvider, partition, nPartitions,
-                        coreOperator, feedConnectionId, feedPolicyProperties, operandId);
+                        coreOperator, feedConnectionId, feedPolicyProperties, operandId, this);
                 break;
             case OTHER:
                 nodePushable = new FeedMetaNodePushable(ctx, recordDescProvider, partition, nPartitions, coreOperator,

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java
index 018aeaa..7150210 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java
@@ -48,13 +48,13 @@ public class FeedMetaStoreNodePushable extends AbstractUnaryInputUnaryOutputOper
     private static final Logger LOGGER = Logger.getLogger(FeedMetaStoreNodePushable.class.getName());
 
     /** Runtime node pushable corresponding to the core feed operator **/
-    private AbstractUnaryInputUnaryOutputOperatorNodePushable coreOperator;
+    private final AbstractUnaryInputUnaryOutputOperatorNodePushable coreOperator;
 
     /**
      * A policy enforcer that ensures dyanmic decisions for a feed are taken
      * in accordance with the associated ingestion policy
      **/
-    private FeedPolicyEnforcer policyEnforcer;
+    private final FeedPolicyEnforcer policyEnforcer;
 
     /**
      * The Feed Runtime instance associated with the operator. Feed Runtime
@@ -66,21 +66,21 @@ public class FeedMetaStoreNodePushable extends AbstractUnaryInputUnaryOutputOper
      * A unique identifier for the feed instance. A feed instance represents
      * the flow of data from a feed to a dataset.
      **/
-    private FeedConnectionId connectionId;
+    private final FeedConnectionId connectionId;
 
     /**
      * Denotes the i'th operator instance in a setting where K operator
      * instances are scheduled to run in parallel
      **/
-    private int partition;
+    private final int partition;
 
-    private int nPartitions;
+    private final int nPartitions;
 
     /** Type associated with the core feed operator **/
     private final FeedRuntimeType runtimeType = FeedRuntimeType.STORE;
 
     /** The (singleton) instance of IFeedManager **/
-    private IFeedManager feedManager;
+    private final IFeedManager feedManager;
 
     private FrameTupleAccessor fta;
 
@@ -90,11 +90,16 @@ public class FeedMetaStoreNodePushable extends AbstractUnaryInputUnaryOutputOper
 
     private FeedRuntimeInputHandler inputSideHandler;
 
-    private ByteBuffer message = ByteBuffer.allocate(MessagingFrameTupleAppender.MAX_MESSAGE_SIZE);
+    private final ByteBuffer message = ByteBuffer.allocate(MessagingFrameTupleAppender.MAX_MESSAGE_SIZE);
+
+    private final IRecordDescriptorProvider recordDescProvider;
+
+    private final FeedMetaOperatorDescriptor opDesc;
 
     public FeedMetaStoreNodePushable(IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider,
             int partition, int nPartitions, IOperatorDescriptor coreOperator, FeedConnectionId feedConnectionId,
-            Map<String, String> feedPolicyProperties, String operationId) throws HyracksDataException {
+            Map<String, String> feedPolicyProperties, String operationId,
+            FeedMetaOperatorDescriptor feedMetaOperatorDescriptor) throws HyracksDataException {
         this.ctx = ctx;
         this.coreOperator = (AbstractUnaryInputUnaryOutputOperatorNodePushable) ((IActivity) coreOperator)
                 .createPushRuntime(ctx, recordDescProvider, partition, nPartitions);
@@ -106,6 +111,8 @@ public class FeedMetaStoreNodePushable extends AbstractUnaryInputUnaryOutputOper
                 .getApplicationObject()).getFeedManager();
         this.operandId = operationId;
         ctx.setSharedObject(message);
+        this.recordDescProvider = recordDescProvider;
+        this.opDesc = feedMetaOperatorDescriptor;
     }
 
     @Override
@@ -129,7 +136,7 @@ public class FeedMetaStoreNodePushable extends AbstractUnaryInputUnaryOutputOper
         if (LOGGER.isLoggable(Level.WARNING)) {
             LOGGER.warning("Runtime not found for  " + runtimeId + " connection id " + connectionId);
         }
-        this.fta = new FrameTupleAccessor(recordDesc);
+        this.fta = new FrameTupleAccessor(recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0));
         this.inputSideHandler = new FeedRuntimeInputHandler(ctx, connectionId, runtimeId, coreOperator,
                 policyEnforcer.getFeedPolicyAccessor(), policyEnforcer.getFeedPolicyAccessor().bufferingEnabled(), fta,
                 recordDesc, feedManager, nPartitions);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
index 60c80f1..c273412 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
@@ -24,7 +24,6 @@ import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.util.BitSet;
 import java.util.List;
-import java.util.Map;
 
 import org.apache.asterix.builders.AbvsBuilderFactory;
 import org.apache.asterix.builders.IARecordBuilder;
@@ -34,11 +33,9 @@ import org.apache.asterix.builders.OrderedListBuilder;
 import org.apache.asterix.builders.RecordBuilderFactory;
 import org.apache.asterix.builders.UnorderedListBuilder;
 import org.apache.asterix.dataflow.data.nontagged.serde.APolygonSerializerDeserializer;
-import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 import org.apache.asterix.external.api.IRawRecord;
 import org.apache.asterix.external.api.IRecordDataParser;
 import org.apache.asterix.external.api.IStreamDataParser;
-import org.apache.asterix.external.util.ExternalDataUtils;
 import org.apache.asterix.om.base.ABoolean;
 import org.apache.asterix.om.base.AMutableInterval;
 import org.apache.asterix.om.base.ANull;
@@ -63,26 +60,24 @@ import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
  * Parser for ADM formatted data.
  */
 public class ADMDataParser extends AbstractDataParser implements IStreamDataParser, IRecordDataParser<char[]> {
-
     private AdmLexer admLexer;
-    private ARecordType recordType;
+    private final ARecordType recordType;
     private boolean isStreamParser = true;
 
     private int nullableFieldId = 0;
-    private ArrayBackedValueStorage castBuffer = new ArrayBackedValueStorage();
+    private final ArrayBackedValueStorage castBuffer = new ArrayBackedValueStorage();
 
-    private IObjectPool<IARecordBuilder, ATypeTag> recordBuilderPool = new ListObjectPool<IARecordBuilder, ATypeTag>(
+    private final IObjectPool<IARecordBuilder, ATypeTag> recordBuilderPool = new ListObjectPool<IARecordBuilder, ATypeTag>(
             new RecordBuilderFactory());
-    private IObjectPool<IAsterixListBuilder, ATypeTag> listBuilderPool = new ListObjectPool<IAsterixListBuilder, ATypeTag>(
+    private final IObjectPool<IAsterixListBuilder, ATypeTag> listBuilderPool = new ListObjectPool<IAsterixListBuilder, ATypeTag>(
             new ListBuilderFactory());
-    private IObjectPool<IMutableValueStorage, ATypeTag> abvsBuilderPool = new ListObjectPool<IMutableValueStorage, ATypeTag>(
+    private final IObjectPool<IMutableValueStorage, ATypeTag> abvsBuilderPool = new ListObjectPool<IMutableValueStorage, ATypeTag>(
             new AbvsBuilderFactory());
 
     protected final AMutableInterval aInterval = new AMutableInterval(0L, 0L, (byte) 0);
 
-    private String mismatchErrorMessage = "Mismatch Type, expecting a value of type ";
-    private String mismatchErrorMessage2 = " got a value of type ";
-    private Map<String, String> configuration;
+    private final String mismatchErrorMessage = "Mismatch Type, expecting a value of type ";
+    private final String mismatchErrorMessage2 = " got a value of type ";
 
     static class ParseException extends HyracksDataException {
         private static final long serialVersionUID = 1L;
@@ -130,12 +125,17 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         }
     }
 
-    public ADMDataParser() {
-        this(null);
+    public ADMDataParser(ARecordType recordType, boolean isStream) throws IOException {
+        this(null, recordType, isStream);
     }
 
-    public ADMDataParser(String filename) {
+    public ADMDataParser(String filename, ARecordType recordType, boolean isStream) throws IOException {
         this.filename = filename;
+        this.recordType = recordType;
+        this.isStreamParser = isStream;
+        if (!isStreamParser) {
+            this.admLexer = new AdmLexer();
+        }
     }
 
     @Override
@@ -152,22 +152,6 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
     }
 
     @Override
-    public DataSourceType getDataSourceType() {
-        return ExternalDataUtils.isDataSourceStreamProvider(configuration) ? DataSourceType.STREAM
-                : DataSourceType.RECORDS;
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration, ARecordType recordType) throws IOException {
-        this.recordType = recordType;
-        this.configuration = configuration;
-        this.isStreamParser = ExternalDataUtils.isDataSourceStreamProvider(configuration);
-        if (!isStreamParser) {
-            this.admLexer = new AdmLexer();
-        }
-    }
-
-    @Override
     public void parse(IRawRecord<? extends char[]> record, DataOutput out) throws IOException {
         try {
             resetPools();
@@ -182,11 +166,6 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
     }
 
     @Override
-    public Class<? extends char[]> getRecordClass() {
-        return char[].class;
-    }
-
-    @Override
     public void setInputStream(InputStream in) throws IOException {
         admLexer = new AdmLexer(new java.io.InputStreamReader(in));
     }
@@ -288,13 +267,13 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
             }
             case AdmLexer.TOKEN_STRING_LITERAL: {
                 if (checkType(ATypeTag.STRING, objectType)) {
-                    final String tokenImage = admLexer.getLastTokenImage().substring(1,
+                    String tokenImage = admLexer.getLastTokenImage().substring(1,
                             admLexer.getLastTokenImage().length() - 1);
                     aString.setValue(admLexer.containsEscapes() ? replaceEscapes(tokenImage) : tokenImage);
                     stringSerde.serialize(aString, out);
                 } else if (checkType(ATypeTag.UUID, objectType)) {
                     // Dealing with UUID type that is represented by a string
-                    final String tokenImage = admLexer.getLastTokenImage().substring(1,
+                    String tokenImage = admLexer.getLastTokenImage().substring(1,
                             admLexer.getLastTokenImage().length() - 1);
                     aUUID.parseUUIDString(tokenImage);
                     uuidSerde.serialize(aUUID, out);
@@ -466,7 +445,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         return new String(chars, 0, len - (readpos - writepos));
     }
 
-    private static void moveChars(final char[] chars, final int start, final int end, final int offset) {
+    private static void moveChars(char[] chars, int start, int end, int offset) {
         if (offset == 0) {
             return;
         }
@@ -500,7 +479,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
             return expectedTypeTag;
         }
         if (aObjectType.getTypeTag() != ATypeTag.UNION) {
-            final ATypeTag typeTag = aObjectType.getTypeTag();
+            ATypeTag typeTag = aObjectType.getTypeTag();
             if (ATypeHierarchy.canPromote(expectedTypeTag, typeTag)
                     || ATypeHierarchy.canDemote(expectedTypeTag, typeTag)) {
                 return typeTag;
@@ -511,7 +490,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         } else { // union
             List<IAType> unionList = ((AUnionType) aObjectType).getUnionList();
             for (IAType t : unionList) {
-                final ATypeTag typeTag = t.getTypeTag();
+                ATypeTag typeTag = t.getTypeTag();
                 if (ATypeHierarchy.canPromote(expectedTypeTag, typeTag)
                         || ATypeHierarchy.canDemote(expectedTypeTag, typeTag)) {
                     return typeTag;
@@ -569,10 +548,10 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
                         String fldName = admLexer.getLastTokenImage().substring(1,
                                 admLexer.getLastTokenImage().length() - 1);
                         fieldId = recBuilder.getFieldId(fldName);
-                        if (fieldId < 0 && !recType.isOpen()) {
+                        if ((fieldId < 0) && !recType.isOpen()) {
                             throw new ParseException(
                                     "This record is closed, you can not add extra fields! new field name: " + fldName);
-                        } else if (fieldId < 0 && recType.isOpen()) {
+                        } else if ((fieldId < 0) && recType.isOpen()) {
                             aStringFieldName.setValue(admLexer.getLastTokenImage().substring(1,
                                     admLexer.getLastTokenImage().length() - 1));
                             stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
@@ -646,7 +625,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         for (int i = 0; i < recType.getFieldTypes().length; i++) {
             if (nulls.get(i) == false) {
                 IAType type = recType.getFieldTypes()[i];
-                if (type.getTypeTag() != ATypeTag.NULL && type.getTypeTag() != ATypeTag.UNION) {
+                if ((type.getTypeTag() != ATypeTag.NULL) && (type.getTypeTag() != ATypeTag.UNION)) {
                     return i;
                 }
 
@@ -730,7 +709,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
             token = admLexer.next();
             if (token == AdmLexer.TOKEN_STRING_LITERAL) {
                 long chrononTimeInMs = 0;
-                final String arg = admLexer.getLastTokenImage();
+                String arg = admLexer.getLastTokenImage();
                 switch (tag) {
                     case DATE:
                         chrononTimeInMs += (parseDatePart(arg, 0, arg.length() - 1)
@@ -879,21 +858,21 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
     }
 
     private void parseToNumericTarget(ATypeTag typeTag, IAType objectType, DataOutput out) throws IOException {
-        final ATypeTag targetTypeTag = getTargetTypeTag(typeTag, objectType);
-        if (targetTypeTag == null || !parseValue(admLexer.getLastTokenImage(), targetTypeTag, out)) {
+        ATypeTag targetTypeTag = getTargetTypeTag(typeTag, objectType);
+        if ((targetTypeTag == null) || !parseValue(admLexer.getLastTokenImage(), targetTypeTag, out)) {
             throw new ParseException(mismatchErrorMessage + objectType.getTypeName() + mismatchErrorMessage2 + typeTag);
         }
     }
 
     private void parseAndCastNumeric(ATypeTag typeTag, IAType objectType, DataOutput out) throws IOException {
-        final ATypeTag targetTypeTag = getTargetTypeTag(typeTag, objectType);
+        ATypeTag targetTypeTag = getTargetTypeTag(typeTag, objectType);
         DataOutput dataOutput = out;
         if (targetTypeTag != typeTag) {
             castBuffer.reset();
             dataOutput = castBuffer.getDataOutput();
         }
 
-        if (targetTypeTag == null || !parseValue(admLexer.getLastTokenImage(), typeTag, dataOutput)) {
+        if ((targetTypeTag == null) || !parseValue(admLexer.getLastTokenImage(), typeTag, dataOutput)) {
             throw new ParseException(mismatchErrorMessage + objectType.getTypeName() + mismatchErrorMessage2 + typeTag);
         }
 
@@ -923,7 +902,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
     }
 
     private void parseConstructor(ATypeTag typeTag, IAType objectType, DataOutput out) throws IOException {
-        final ATypeTag targetTypeTag = getTargetTypeTag(typeTag, objectType);
+        ATypeTag targetTypeTag = getTargetTypeTag(typeTag, objectType);
         if (targetTypeTag != null) {
             DataOutput dataOutput = out;
             if (targetTypeTag != typeTag) {
@@ -934,7 +913,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
             if (token == AdmLexer.TOKEN_CONSTRUCTOR_OPEN) {
                 token = admLexer.next();
                 if (token == AdmLexer.TOKEN_STRING_LITERAL) {
-                    final String unquoted = admLexer.getLastTokenImage().substring(1,
+                    String unquoted = admLexer.getLastTokenImage().substring(1,
                             admLexer.getLastTokenImage().length() - 1);
                     if (!parseValue(unquoted, typeTag, dataOutput)) {
                         throw new ParseException("Missing deserializer method for constructor: "
@@ -960,7 +939,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         throw new ParseException(mismatchErrorMessage + objectType.getTypeName() + ". Got " + typeTag + " instead.");
     }
 
-    private boolean parseValue(final String unquoted, ATypeTag typeTag, DataOutput out) throws HyracksDataException {
+    private boolean parseValue(String unquoted, ATypeTag typeTag, DataOutput out) throws HyracksDataException {
         switch (typeTag) {
             case BOOLEAN:
                 parseBoolean(unquoted, out);
@@ -1058,9 +1037,10 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
             positive = false;
         }
         for (; offset < int8.length(); offset++) {
-            if (int8.charAt(offset) >= '0' && int8.charAt(offset) <= '9') {
-                value = (byte) (value * 10 + int8.charAt(offset) - '0');
-            } else if (int8.charAt(offset) == 'i' && int8.charAt(offset + 1) == '8' && offset + 2 == int8.length()) {
+            if ((int8.charAt(offset) >= '0') && (int8.charAt(offset) <= '9')) {
+                value = (byte) (((value * 10) + int8.charAt(offset)) - '0');
+            } else if ((int8.charAt(offset) == 'i') && (int8.charAt(offset + 1) == '8')
+                    && ((offset + 2) == int8.length())) {
                 break;
             } else {
                 throw new ParseException(errorMessage);
@@ -1069,7 +1049,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         if (value < 0) {
             throw new ParseException(errorMessage);
         }
-        if (value > 0 && !positive) {
+        if ((value > 0) && !positive) {
             value *= -1;
         }
         aInt8.setValue(value);
@@ -1089,10 +1069,10 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
             positive = false;
         }
         for (; offset < int16.length(); offset++) {
-            if (int16.charAt(offset) >= '0' && int16.charAt(offset) <= '9') {
-                value = (short) (value * 10 + int16.charAt(offset) - '0');
-            } else if (int16.charAt(offset) == 'i' && int16.charAt(offset + 1) == '1' && int16.charAt(offset + 2) == '6'
-                    && offset + 3 == int16.length()) {
+            if ((int16.charAt(offset) >= '0') && (int16.charAt(offset) <= '9')) {
+                value = (short) (((value * 10) + int16.charAt(offset)) - '0');
+            } else if ((int16.charAt(offset) == 'i') && (int16.charAt(offset + 1) == '1')
+                    && (int16.charAt(offset + 2) == '6') && ((offset + 3) == int16.length())) {
                 break;
             } else {
                 throw new ParseException(errorMessage);
@@ -1101,7 +1081,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         if (value < 0) {
             throw new ParseException(errorMessage);
         }
-        if (value > 0 && !positive) {
+        if ((value > 0) && !positive) {
             value *= -1;
         }
         aInt16.setValue(value);
@@ -1121,10 +1101,10 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
             positive = false;
         }
         for (; offset < int32.length(); offset++) {
-            if (int32.charAt(offset) >= '0' && int32.charAt(offset) <= '9') {
-                value = (value * 10 + int32.charAt(offset) - '0');
-            } else if (int32.charAt(offset) == 'i' && int32.charAt(offset + 1) == '3' && int32.charAt(offset + 2) == '2'
-                    && offset + 3 == int32.length()) {
+            if ((int32.charAt(offset) >= '0') && (int32.charAt(offset) <= '9')) {
+                value = (((value * 10) + int32.charAt(offset)) - '0');
+            } else if ((int32.charAt(offset) == 'i') && (int32.charAt(offset + 1) == '3')
+                    && (int32.charAt(offset + 2) == '2') && ((offset + 3) == int32.length())) {
                 break;
             } else {
                 throw new ParseException(errorMessage);
@@ -1133,7 +1113,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         if (value < 0) {
             throw new ParseException(errorMessage);
         }
-        if (value > 0 && !positive) {
+        if ((value > 0) && !positive) {
             value *= -1;
         }
 
@@ -1154,10 +1134,10 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
             positive = false;
         }
         for (; offset < int64.length(); offset++) {
-            if (int64.charAt(offset) >= '0' && int64.charAt(offset) <= '9') {
-                value = (value * 10 + int64.charAt(offset) - '0');
-            } else if (int64.charAt(offset) == 'i' && int64.charAt(offset + 1) == '6' && int64.charAt(offset + 2) == '4'
-                    && offset + 3 == int64.length()) {
+            if ((int64.charAt(offset) >= '0') && (int64.charAt(offset) <= '9')) {
+                value = (((value * 10) + int64.charAt(offset)) - '0');
+            } else if ((int64.charAt(offset) == 'i') && (int64.charAt(offset + 1) == '6')
+                    && (int64.charAt(offset + 2) == '4') && ((offset + 3) == int64.length())) {
                 break;
             } else {
                 throw new ParseException(errorMessage);
@@ -1166,7 +1146,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
         if (value < 0) {
             throw new ParseException(errorMessage);
         }
-        if (value > 0 && !positive) {
+        if ((value > 0) && !positive) {
             value *= -1;
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java
index 7e231a4..725ef22 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java
@@ -22,19 +22,14 @@ import java.io.DataOutput;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
-import java.util.Map;
 
 import org.apache.asterix.builders.IARecordBuilder;
 import org.apache.asterix.builders.RecordBuilder;
-import org.apache.asterix.dataflow.data.nontagged.serde.ANullSerializerDeserializer;
 import org.apache.asterix.external.api.IDataParser;
-import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 import org.apache.asterix.external.api.IRawRecord;
 import org.apache.asterix.external.api.IRecordDataParser;
 import org.apache.asterix.external.api.IStreamDataParser;
-import org.apache.asterix.external.util.ExternalDataUtils;
 import org.apache.asterix.om.base.AMutableString;
-import org.apache.asterix.om.base.ANull;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.ATypeTag;
 import org.apache.asterix.om.util.NonTaggedFormatUtil;
@@ -46,7 +41,6 @@ import org.apache.hyracks.dataflow.std.file.FieldCursorForDelimitedDataParser;
 
 public class DelimitedDataParser extends AbstractDataParser implements IStreamDataParser, IRecordDataParser<char[]> {
 
-    private final IValueParserFactory[] valueParserFactories;
     private final char fieldDelimiter;
     private final char quote;
     private final boolean hasHeader;
@@ -60,14 +54,50 @@ public class DelimitedDataParser extends AbstractDataParser implements IStreamDa
     private int[] fldIds;
     private ArrayBackedValueStorage[] nameBuffers;
     private boolean areAllNullFields;
-    private boolean isStreamParser = true;
 
     public DelimitedDataParser(IValueParserFactory[] valueParserFactories, char fieldDelimter, char quote,
-            boolean hasHeader) {
-        this.valueParserFactories = valueParserFactories;
+            boolean hasHeader, ARecordType recordType, boolean isStreamParser) throws HyracksDataException {
         this.fieldDelimiter = fieldDelimter;
         this.quote = quote;
         this.hasHeader = hasHeader;
+        this.recordType = recordType;
+        valueParsers = new IValueParser[valueParserFactories.length];
+        for (int i = 0; i < valueParserFactories.length; ++i) {
+            valueParsers[i] = valueParserFactories[i].createValueParser();
+        }
+
+        fieldValueBuffer = new ArrayBackedValueStorage();
+        fieldValueBufferOutput = fieldValueBuffer.getDataOutput();
+        recBuilder = new RecordBuilder();
+        recBuilder.reset(recordType);
+        recBuilder.init();
+
+        int n = recordType.getFieldNames().length;
+        fieldTypeTags = new byte[n];
+        for (int i = 0; i < n; i++) {
+            ATypeTag tag = recordType.getFieldTypes()[i].getTypeTag();
+            fieldTypeTags[i] = tag.serialize();
+        }
+
+        fldIds = new int[n];
+        nameBuffers = new ArrayBackedValueStorage[n];
+        AMutableString str = new AMutableString(null);
+        for (int i = 0; i < n; i++) {
+            String name = recordType.getFieldNames()[i];
+            fldIds[i] = recBuilder.getFieldId(name);
+            if (fldIds[i] < 0) {
+                if (!recordType.isOpen()) {
+                    throw new HyracksDataException("Illegal field " + name + " in closed type " + recordType);
+                } else {
+                    nameBuffers[i] = new ArrayBackedValueStorage();
+                    str.setValue(name);
+                    IDataParser.toBytes(str, nameBuffers[i], stringSerde);
+                }
+            }
+        }
+        if (!isStreamParser) {
+            cursor = new FieldCursorForDelimitedDataParser(null, fieldDelimiter, quote);
+        }
     }
 
     @Override
@@ -102,8 +132,7 @@ public class DelimitedDataParser extends AbstractDataParser implements IStreamDa
                     throw new HyracksDataException("At record: " + cursor.recordCount + " - Field " + cursor.fieldCount
                             + " is not an optional type so it cannot accept null value. ");
                 }
-                fieldValueBufferOutput.writeByte(ATypeTag.NULL.serialize());
-                ANullSerializerDeserializer.INSTANCE.serialize(ANull.NULL, out);
+                fieldValueBufferOutput.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
             } else {
                 fieldValueBufferOutput.writeByte(fieldTypeTags[i]);
                 // Eliminate doule quotes in the field that we are going to parse
@@ -125,54 +154,6 @@ public class DelimitedDataParser extends AbstractDataParser implements IStreamDa
     }
 
     @Override
-    public DataSourceType getDataSourceType() {
-        return isStreamParser ? DataSourceType.STREAM : DataSourceType.RECORDS;
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration, ARecordType recordType) throws HyracksDataException {
-        this.recordType = recordType;
-        valueParsers = new IValueParser[valueParserFactories.length];
-        for (int i = 0; i < valueParserFactories.length; ++i) {
-            valueParsers[i] = valueParserFactories[i].createValueParser();
-        }
-
-        fieldValueBuffer = new ArrayBackedValueStorage();
-        fieldValueBufferOutput = fieldValueBuffer.getDataOutput();
-        recBuilder = new RecordBuilder();
-        recBuilder.reset(recordType);
-        recBuilder.init();
-
-        int n = recordType.getFieldNames().length;
-        fieldTypeTags = new byte[n];
-        for (int i = 0; i < n; i++) {
-            ATypeTag tag = recordType.getFieldTypes()[i].getTypeTag();
-            fieldTypeTags[i] = tag.serialize();
-        }
-
-        fldIds = new int[n];
-        nameBuffers = new ArrayBackedValueStorage[n];
-        AMutableString str = new AMutableString(null);
-        for (int i = 0; i < n; i++) {
-            String name = recordType.getFieldNames()[i];
-            fldIds[i] = recBuilder.getFieldId(name);
-            if (fldIds[i] < 0) {
-                if (!recordType.isOpen()) {
-                    throw new HyracksDataException("Illegal field " + name + " in closed type " + recordType);
-                } else {
-                    nameBuffers[i] = new ArrayBackedValueStorage();
-                    str.setValue(name);
-                    IDataParser.toBytes(str, nameBuffers[i], stringSerde);
-                }
-            }
-        }
-        isStreamParser = ExternalDataUtils.isDataSourceStreamProvider(configuration);
-        if (!isStreamParser) {
-            cursor = new FieldCursorForDelimitedDataParser(null, fieldDelimiter, quote);
-        }
-    }
-
-    @Override
     public void parse(IRawRecord<? extends char[]> record, DataOutput out) throws IOException {
         cursor.nextRecord(record.get(), record.size());
         parseRecord(out);
@@ -182,11 +163,6 @@ public class DelimitedDataParser extends AbstractDataParser implements IStreamDa
     }
 
     @Override
-    public Class<? extends char[]> getRecordClass() {
-        return char[].class;
-    }
-
-    @Override
     public void setInputStream(InputStream in) throws IOException {
         cursor = new FieldCursorForDelimitedDataParser(new InputStreamReader(in), fieldDelimiter, quote);
         if (in != null && hasHeader) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/parser/HiveRecordParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/HiveRecordParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/HiveRecordParser.java
index 1c91130..6e7e7f3 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/HiveRecordParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/HiveRecordParser.java
@@ -21,18 +21,14 @@ package org.apache.asterix.external.parser;
 import java.io.DataOutput;
 import java.io.IOException;
 import java.util.List;
-import java.util.Map;
 import java.util.Properties;
 
 import org.apache.asterix.builders.IARecordBuilder;
 import org.apache.asterix.builders.OrderedListBuilder;
 import org.apache.asterix.builders.RecordBuilder;
 import org.apache.asterix.builders.UnorderedListBuilder;
-import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 import org.apache.asterix.external.api.IRawRecord;
 import org.apache.asterix.external.api.IRecordDataParser;
-import org.apache.asterix.external.util.ExternalDataConstants;
-import org.apache.asterix.external.util.HDFSUtils;
 import org.apache.asterix.om.base.temporal.GregorianCalendarSystem;
 import org.apache.asterix.om.types.AOrderedListType;
 import org.apache.asterix.om.types.ARecordType;
@@ -66,7 +62,7 @@ import org.apache.hyracks.util.string.UTF8StringWriter;
 @SuppressWarnings("deprecation")
 public class HiveRecordParser implements IRecordDataParser<Writable> {
 
-    private ARecordType aRecord;
+    private ARecordType recordType;
     private SerDe hiveSerde;
     private StructObjectInspector oi;
     private IARecordBuilder recBuilder;
@@ -79,43 +75,37 @@ public class HiveRecordParser implements IRecordDataParser<Writable> {
     private List<? extends StructField> fieldRefs;
     private UTF8StringWriter utf8Writer = new UTF8StringWriter();
 
-    @Override
-    public DataSourceType getDataSourceType() {
-        return DataSourceType.RECORDS;
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration, ARecordType recordType) throws HyracksDataException {
+    public HiveRecordParser(ARecordType recordType, JobConf hadoopConfiguration, String hiveSerdeClassName)
+            throws HyracksDataException {
         try {
-            this.aRecord = recordType;
-            int n = aRecord.getFieldNames().length;
-            fieldTypes = aRecord.getFieldTypes();
-            JobConf hadoopConfiguration = HDFSUtils.configureHDFSJobConf(configuration);
+            this.recordType = recordType;
+            int n = recordType.getFieldNames().length;
+            fieldTypes = recordType.getFieldTypes();
             //create the hive table schema.
             Properties tbl = new Properties();
-            tbl.put(Constants.LIST_COLUMNS, getCommaDelimitedColNames(aRecord));
-            tbl.put(Constants.LIST_COLUMN_TYPES, getColTypes(aRecord));
-            String hiveSerdeClassName = configuration.get(ExternalDataConstants.KEY_HIVE_SERDE);
-            if (hiveSerdeClassName == null) {
-                throw new IllegalArgumentException("no hive serde provided for hive deserialized records");
-            }
+            tbl.put(Constants.LIST_COLUMNS, getCommaDelimitedColNames(this.recordType));
+            tbl.put(Constants.LIST_COLUMN_TYPES, getColTypes(this.recordType));
             hiveSerde = (SerDe) Class.forName(hiveSerdeClassName).newInstance();
             hiveSerde.initialize(hadoopConfiguration, tbl);
             oi = (StructObjectInspector) hiveSerde.getObjectInspector();
-
             fieldValueBuffer = new ArrayBackedValueStorage();
             recBuilder = new RecordBuilder();
-            recBuilder.reset(aRecord);
+            recBuilder.reset(recordType);
             recBuilder.init();
             fieldTypeTags = new byte[n];
             for (int i = 0; i < n; i++) {
-                ATypeTag tag = aRecord.getFieldTypes()[i].getTypeTag();
+                ATypeTag tag = recordType.getFieldTypes()[i].getTypeTag();
                 fieldTypeTags[i] = tag.serialize();
             }
             fieldRefs = oi.getAllStructFieldRefs();
-        } catch (Exception e) {
+        } catch (
+
+        Exception e)
+
+        {
             throw new HyracksDataException(e);
         }
+
     }
 
     @Override
@@ -123,9 +113,9 @@ public class HiveRecordParser implements IRecordDataParser<Writable> {
         try {
             Writable hiveRawRecord = record.get();
             Object hiveObject = hiveSerde.deserialize(hiveRawRecord);
-            int n = aRecord.getFieldNames().length;
+            int n = recordType.getFieldNames().length;
             List<Object> attributesValues = oi.getStructFieldsDataAsList(hiveObject);
-            recBuilder.reset(aRecord);
+            recBuilder.reset(recordType);
             recBuilder.init();
             for (int i = 0; i < n; i++) {
                 final Object value = attributesValues.get(i);
@@ -196,11 +186,6 @@ public class HiveRecordParser implements IRecordDataParser<Writable> {
         }
     }
 
-    @Override
-    public Class<? extends Writable> getRecordClass() {
-        return Writable.class;
-    }
-
     private Object getColTypes(ARecordType record) throws Exception {
         int n = record.getFieldTypes().length;
         if (n < 1) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RSSParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RSSParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RSSParser.java
index 3a3bd7d..5336c30 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RSSParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RSSParser.java
@@ -20,17 +20,14 @@ package org.apache.asterix.external.parser;
 
 import java.io.DataOutput;
 import java.io.IOException;
-import java.util.Map;
 
 import org.apache.asterix.builders.RecordBuilder;
 import org.apache.asterix.external.api.IDataParser;
-import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 import org.apache.asterix.external.api.IRawRecord;
 import org.apache.asterix.external.api.IRecordDataParser;
 import org.apache.asterix.om.base.AMutableRecord;
 import org.apache.asterix.om.base.AMutableString;
 import org.apache.asterix.om.types.ARecordType;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 import com.sun.syndication.feed.synd.SyndEntryImpl;
 
@@ -43,14 +40,7 @@ public class RSSParser implements IRecordDataParser<SyndEntryImpl> {
     private RecordBuilder recordBuilder = new RecordBuilder();
     private int numFields;
 
-    @Override
-    public DataSourceType getDataSourceType() {
-        return DataSourceType.RECORDS;
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration, ARecordType recordType)
-            throws HyracksDataException, IOException {
+    public RSSParser(ARecordType recordType) {
         mutableFields = new AMutableString[] { new AMutableString(null), new AMutableString(null),
                 new AMutableString(null), new AMutableString(null) };
         mutableRecord = new AMutableRecord(recordType, mutableFields);
@@ -74,10 +64,4 @@ public class RSSParser implements IRecordDataParser<SyndEntryImpl> {
         IDataParser.writeRecord(mutableRecord, out, recordBuilder);
         id++;
     }
-
-    @Override
-    public Class<? extends SyndEntryImpl> getRecordClass() {
-        return SyndEntryImpl.class;
-    }
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RecordWithMetadataParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RecordWithMetadataParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RecordWithMetadataParser.java
index 67d84b5..38932d3 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RecordWithMetadataParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RecordWithMetadataParser.java
@@ -20,92 +20,93 @@ package org.apache.asterix.external.parser;
 
 import java.io.DataOutput;
 import java.io.IOException;
-import java.util.Map;
 
 import org.apache.asterix.builders.RecordBuilder;
 import org.apache.asterix.external.api.IDataParser;
-import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 import org.apache.asterix.external.api.IRawRecord;
+import org.apache.asterix.external.api.IRecordConverter;
 import org.apache.asterix.external.api.IRecordDataParser;
-import org.apache.asterix.external.input.record.RecordWithMetadata;
+import org.apache.asterix.external.api.IRecordWithMetaDataParser;
+import org.apache.asterix.external.input.record.RecordWithMetadataAndPK;
 import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.om.base.AMutableString;
 import org.apache.asterix.om.base.AString;
 import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.ATypeTag;
 import org.apache.asterix.om.types.BuiltinType;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
+import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 
-public class RecordWithMetadataParser<T> implements IRecordDataParser<RecordWithMetadata<T>> {
+public class RecordWithMetadataParser<T, O> implements IRecordWithMetaDataParser<T> {
 
-    private final Class<? extends RecordWithMetadata<T>> clazz;
-    private final int[] metaIndexes;
-    private final int valueIndex;
-    private ARecordType recordType;
-    private IRecordDataParser<T> valueParser;
-    private RecordBuilder recBuilder;
-    private ArrayBackedValueStorage[] nameBuffers;
-    private int numberOfFields;
-    private ArrayBackedValueStorage valueBuffer = new ArrayBackedValueStorage();
+    private final IRecordConverter<T, RecordWithMetadataAndPK<O>> converter;
+    private RecordWithMetadataAndPK<O> rwm;
+    private final IRecordDataParser<O> recordParser;
+    private final ARecordType metaType;
+    private final RecordBuilder metaBuilder;
+    private final ArrayBackedValueStorage[] metaFieldsNamesBuffers;
+    private final int numberOfMetaFields;
     @SuppressWarnings("unchecked")
-    private ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
+    private final ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
             .getSerializerDeserializer(BuiltinType.ASTRING);
 
-    public RecordWithMetadataParser(Class<? extends RecordWithMetadata<T>> clazz, int[] metaIndexes,
-            IRecordDataParser<T> valueParser, int valueIndex) {
-        this.clazz = clazz;
-        this.metaIndexes = metaIndexes;
-        this.valueParser = valueParser;
-        this.valueIndex = valueIndex;
-    }
-
-    @Override
-    public DataSourceType getDataSourceType() {
-        return DataSourceType.RECORDS;
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration, ARecordType recordType)
-            throws HyracksDataException, IOException {
-        this.recordType = recordType;
-        this.numberOfFields = recordType.getFieldNames().length;
-        recBuilder = new RecordBuilder();
-        recBuilder.reset(recordType);
-        recBuilder.init();
-        nameBuffers = new ArrayBackedValueStorage[numberOfFields];
+    public RecordWithMetadataParser(ARecordType metaType, IRecordDataParser<O> valueParser,
+            IRecordConverter<T, RecordWithMetadataAndPK<O>> converter) throws HyracksDataException {
+        this.recordParser = valueParser;
+        this.converter = converter;
+        this.metaType = metaType;
+        this.numberOfMetaFields = metaType.getFieldNames().length;
+        metaBuilder = new RecordBuilder();
+        metaBuilder.reset(metaType);
+        metaBuilder.init();
+        metaFieldsNamesBuffers = new ArrayBackedValueStorage[numberOfMetaFields];
         AMutableString str = new AMutableString(null);
-        for (int i = 0; i < numberOfFields; i++) {
-            String name = recordType.getFieldNames()[i];
-            nameBuffers[i] = new ArrayBackedValueStorage();
+        for (int i = 0; i < numberOfMetaFields; i++) {
+            String name = metaType.getFieldNames()[i];
+            metaFieldsNamesBuffers[i] = new ArrayBackedValueStorage();
             str.setValue(name);
-            IDataParser.toBytes(str, nameBuffers[i], stringSerde);
+            IDataParser.toBytes(str, metaFieldsNamesBuffers[i], stringSerde);
         }
     }
 
     @Override
-    public Class<? extends RecordWithMetadata<T>> getRecordClass() {
-        return clazz;
+    public void parse(IRawRecord<? extends T> record, DataOutput out) throws HyracksDataException {
+        try {
+            rwm = converter.convert(record);
+            if (rwm.getRecord().size() == 0) {
+                // null record
+                out.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
+            } else {
+                recordParser.parse(rwm.getRecord(), out);
+            }
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
     }
 
     @Override
-    public void parse(IRawRecord<? extends RecordWithMetadata<T>> record, DataOutput out) throws HyracksDataException {
+    public void parseMeta(DataOutput out) throws HyracksDataException {
         try {
-            recBuilder.reset(recordType);
-            valueBuffer.reset();
-            recBuilder.init();
-            RecordWithMetadata<T> rwm = record.get();
-            for (int i = 0; i < numberOfFields; i++) {
-                if (i == valueIndex) {
-                    valueParser.parse(rwm.getRecord(), valueBuffer.getDataOutput());
-                    recBuilder.addField(i, valueBuffer);
-                } else {
-                    recBuilder.addField(i, rwm.getMetadata(metaIndexes[i]));
+            if (rwm.getRecord().size() == 0) {
+                out.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
+            } else {
+                metaBuilder.reset(metaType);
+                metaBuilder.init();
+                // parse meta-Fields
+                for (int i = 0; i < numberOfMetaFields; i++) {
+                    metaBuilder.addField(i, rwm.getMetadata(i));
                 }
+                // write the meta record
+                metaBuilder.write(out, true);
             }
-            recBuilder.write(out, true);
         } catch (IOException e) {
             throw new HyracksDataException(e);
         }
     }
+
+    public void appendPK(ArrayTupleBuilder tb) throws IOException {
+        rwm.appendPk(tb);
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RecordWithPKDataParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RecordWithPKDataParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RecordWithPKDataParser.java
new file mode 100644
index 0000000..b8c265c
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RecordWithPKDataParser.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.parser;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.asterix.external.api.IRawRecord;
+import org.apache.asterix.external.api.IRecordDataParser;
+import org.apache.asterix.external.api.IRecordWithPKDataParser;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.IAType;
+import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+
+public class RecordWithPKDataParser<T> implements IRecordWithPKDataParser<T> {
+    private final IRecordDataParser<T> recordParser;
+
+    public RecordWithPKDataParser(IRecordDataParser<T> recordParser, IAType[] pkTypes) {
+        this.recordParser = recordParser;
+    }
+
+    @Override
+    public void parse(IRawRecord<? extends T> record, DataOutput out) throws IOException {
+        if (record.size() == 0) {
+            out.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
+        } else {
+            recordParser.parse(record, out);
+        }
+    }
+
+    @Override
+    public void appendKeys(ArrayTupleBuilder tb, IRawRecord<? extends T> record) throws IOException {
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/parser/TestRecordWithPKParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/TestRecordWithPKParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/TestRecordWithPKParser.java
new file mode 100644
index 0000000..3d86abd
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/TestRecordWithPKParser.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.parser;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.asterix.external.api.IRawRecord;
+import org.apache.asterix.external.api.IRecordDataParser;
+import org.apache.asterix.external.api.IRecordWithPKDataParser;
+import org.apache.asterix.external.input.record.RecordWithPK;
+import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+
+public class TestRecordWithPKParser<T> implements IRecordWithPKDataParser<RecordWithPK<T>> {
+
+    private final IRecordDataParser<T> recordParser;
+
+    public TestRecordWithPKParser(final IRecordDataParser<T> recordParser) {
+        this.recordParser = recordParser;
+    }
+
+    @Override
+    public void parse(final IRawRecord<? extends RecordWithPK<T>> record, final DataOutput out) throws IOException {
+        recordParser.parse(record.get().getRecord(), out);
+    }
+
+    @Override
+    public void appendKeys(final ArrayTupleBuilder tb, final IRawRecord<? extends RecordWithPK<T>> record)
+            throws IOException {
+        record.get().appendPk(tb);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/parser/TweetParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/TweetParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/TweetParser.java
index 7ae3303..522da06 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/TweetParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/TweetParser.java
@@ -19,13 +19,11 @@
 package org.apache.asterix.external.parser;
 
 import java.io.DataOutput;
-import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.asterix.builders.RecordBuilder;
 import org.apache.asterix.external.api.IDataParser;
-import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 import org.apache.asterix.external.api.IRawRecord;
 import org.apache.asterix.external.api.IRecordDataParser;
 import org.apache.asterix.external.library.java.JObjectUtil;
@@ -53,14 +51,7 @@ public class TweetParser implements IRecordDataParser<Status> {
     private final Map<String, Integer> tweetFieldNameMap = new HashMap<>();
     private RecordBuilder recordBuilder = new RecordBuilder();
 
-    @Override
-    public DataSourceType getDataSourceType() {
-        return DataSourceType.RECORDS;
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration, ARecordType recordType)
-            throws HyracksDataException, IOException {
+    public TweetParser(ARecordType recordType) {
         initFieldNames(recordType);
         mutableUserFields = new IAObject[] { new AMutableString(null), new AMutableString(null), new AMutableInt32(0),
                 new AMutableInt32(0), new AMutableString(null), new AMutableInt32(0) };
@@ -70,7 +61,6 @@ public class TweetParser implements IRecordDataParser<Status> {
         mutableTweetFields = new IAObject[] { new AMutableString(null), mutableUser, new AMutableDouble(0),
                 new AMutableDouble(0), new AMutableString(null), new AMutableString(null) };
         mutableRecord = new AMutableRecord(recordType, mutableTweetFields);
-
     }
 
     // Initialize the hashmap values for the field names and positions
@@ -135,10 +125,4 @@ public class TweetParser implements IRecordDataParser<Status> {
         recordBuilder.init();
         IDataParser.writeRecord(mutableRecord, out, recordBuilder);
     }
-
-    @Override
-    public Class<? extends Status> getRecordClass() {
-        return Status.class;
-    }
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/ADMDataParserFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/ADMDataParserFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/ADMDataParserFactory.java
index 4634278..7732820 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/ADMDataParserFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/ADMDataParserFactory.java
@@ -18,9 +18,12 @@
  */
 package org.apache.asterix.external.parser.factory;
 
+import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 import org.apache.asterix.external.api.IRecordDataParser;
 import org.apache.asterix.external.api.IStreamDataParser;
 import org.apache.asterix.external.parser.ADMDataParser;
+import org.apache.asterix.external.util.ExternalDataUtils;
+import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
@@ -35,8 +38,8 @@ public class ADMDataParserFactory extends AbstractRecordStreamParserFactory<char
 
     private ADMDataParser createParser() throws HyracksDataException {
         try {
-            ADMDataParser parser = new ADMDataParser();
-            parser.configure(configuration, recordType);
+            ADMDataParser parser = new ADMDataParser(recordType,
+                    ExternalDataUtils.getDataSourceType(configuration).equals(DataSourceType.STREAM));
             return parser;
         } catch (Exception e) {
             throw new HyracksDataException(e);
@@ -53,4 +56,9 @@ public class ADMDataParserFactory extends AbstractRecordStreamParserFactory<char
             throws HyracksDataException {
         return createParser();
     }
+
+    @Override
+    public void setMetaType(ARecordType metaType) {
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/AbstractRecordStreamParserFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/AbstractRecordStreamParserFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/AbstractRecordStreamParserFactory.java
index 43af455..8fd02dd 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/AbstractRecordStreamParserFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/AbstractRecordStreamParserFactory.java
@@ -20,7 +20,6 @@ package org.apache.asterix.external.parser.factory;
 
 import java.util.Map;
 
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 import org.apache.asterix.external.api.IRecordDataParserFactory;
 import org.apache.asterix.external.api.IStreamDataParserFactory;
@@ -35,7 +34,7 @@ public abstract class AbstractRecordStreamParserFactory<T>
     protected Map<String, String> configuration;
 
     @Override
-    public DataSourceType getDataSourceType() throws AsterixException {
+    public DataSourceType getDataSourceType() {
         return ExternalDataUtils.getDataSourceType(configuration);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/DelimitedDataParserFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/DelimitedDataParserFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/DelimitedDataParserFactory.java
index fa63d45..f724b48 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/DelimitedDataParserFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/DelimitedDataParserFactory.java
@@ -20,12 +20,13 @@ package org.apache.asterix.external.parser.factory;
 
 import java.util.Map;
 
-import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 import org.apache.asterix.external.api.IRecordDataParser;
 import org.apache.asterix.external.api.IStreamDataParser;
 import org.apache.asterix.external.parser.DelimitedDataParser;
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.external.util.ExternalDataUtils;
+import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.data.parsers.IValueParserFactory;
@@ -35,19 +36,17 @@ public class DelimitedDataParserFactory extends AbstractRecordStreamParserFactor
     private static final long serialVersionUID = 1L;
 
     @Override
-    public IRecordDataParser<char[]> createRecordParser(IHyracksTaskContext ctx)
-            throws HyracksDataException, AsterixException {
+    public IRecordDataParser<char[]> createRecordParser(IHyracksTaskContext ctx) throws HyracksDataException {
         return createParser();
     }
 
-    private DelimitedDataParser createParser() throws HyracksDataException, AsterixException {
+    private DelimitedDataParser createParser() throws HyracksDataException {
         IValueParserFactory[] valueParserFactories = ExternalDataUtils.getValueParserFactories(recordType);
         Character delimiter = DelimitedDataParserFactory.getDelimiter(configuration);
         char quote = DelimitedDataParserFactory.getQuote(configuration, delimiter);
         boolean hasHeader = ExternalDataUtils.hasHeader(configuration);
-        DelimitedDataParser parser = new DelimitedDataParser(valueParserFactories, delimiter, quote, hasHeader);
-        parser.configure(configuration, recordType);
-        return parser;
+        return new DelimitedDataParser(valueParserFactories, delimiter, quote, hasHeader, recordType,
+                ExternalDataUtils.getDataSourceType(configuration).equals(DataSourceType.STREAM));
     }
 
     @Override
@@ -57,17 +56,17 @@ public class DelimitedDataParserFactory extends AbstractRecordStreamParserFactor
 
     @Override
     public IStreamDataParser createInputStreamParser(IHyracksTaskContext ctx, int partition)
-            throws HyracksDataException, AsterixException {
+            throws HyracksDataException {
         return createParser();
     }
 
     // Get a delimiter from the given configuration
-    public static char getDelimiter(Map<String, String> configuration) throws AsterixException {
+    public static char getDelimiter(Map<String, String> configuration) throws HyracksDataException {
         String delimiterValue = configuration.get(ExternalDataConstants.KEY_DELIMITER);
         if (delimiterValue == null) {
             delimiterValue = ExternalDataConstants.DEFAULT_DELIMITER;
         } else if (delimiterValue.length() != 1) {
-            throw new AsterixException(
+            throw new HyracksDataException(
                     "'" + delimiterValue + "' is not a valid delimiter. The length of a delimiter should be 1.");
         }
         return delimiterValue.charAt(0);
@@ -75,21 +74,27 @@ public class DelimitedDataParserFactory extends AbstractRecordStreamParserFactor
 
     // Get a quote from the given configuration when the delimiter is given
     // Need to pass delimiter to check whether they share the same character
-    public static char getQuote(Map<String, String> configuration, char delimiter) throws AsterixException {
+    public static char getQuote(Map<String, String> configuration, char delimiter) throws HyracksDataException {
         String quoteValue = configuration.get(ExternalDataConstants.KEY_QUOTE);
         if (quoteValue == null) {
             quoteValue = ExternalDataConstants.DEFAULT_QUOTE;
         } else if (quoteValue.length() != 1) {
-            throw new AsterixException("'" + quoteValue + "' is not a valid quote. The length of a quote should be 1.");
+            throw new HyracksDataException(
+                    "'" + quoteValue + "' is not a valid quote. The length of a quote should be 1.");
         }
 
         // Since delimiter (char type value) can't be null,
         // we only check whether delimiter and quote use the same character
         if (quoteValue.charAt(0) == delimiter) {
-            throw new AsterixException(
+            throw new HyracksDataException(
                     "Quote '" + quoteValue + "' cannot be used with the delimiter '" + delimiter + "'. ");
         }
 
         return quoteValue.charAt(0);
     }
+
+    @Override
+    public void setMetaType(ARecordType metaType) {
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/HiveDataParserFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/HiveDataParserFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/HiveDataParserFactory.java
index f07ba4c..a4c8679 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/HiveDataParserFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/HiveDataParserFactory.java
@@ -20,11 +20,12 @@ package org.apache.asterix.external.parser.factory;
 
 import java.util.Map;
 
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
-import org.apache.asterix.external.parser.HiveRecordParser;
 import org.apache.asterix.external.api.IRecordDataParser;
 import org.apache.asterix.external.api.IRecordDataParserFactory;
+import org.apache.asterix.external.parser.HiveRecordParser;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.external.util.HDFSUtils;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.hadoop.io.Writable;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -35,6 +36,7 @@ public class HiveDataParserFactory implements IRecordDataParserFactory<Writable>
     private static final long serialVersionUID = 1L;
     private Map<String, String> configuration;
     private ARecordType recordType;
+    private String hiveSerdeClassName;
 
     @Override
     public DataSourceType getDataSourceType() {
@@ -44,6 +46,10 @@ public class HiveDataParserFactory implements IRecordDataParserFactory<Writable>
     @Override
     public void configure(Map<String, String> configuration) {
         this.configuration = configuration;
+        hiveSerdeClassName = configuration.get(ExternalDataConstants.KEY_HIVE_SERDE);
+        if (hiveSerdeClassName == null) {
+            throw new IllegalArgumentException("no hive serde provided for hive deserialized records");
+        }
     }
 
     @Override
@@ -52,11 +58,8 @@ public class HiveDataParserFactory implements IRecordDataParserFactory<Writable>
     }
 
     @Override
-    public IRecordDataParser<Writable> createRecordParser(IHyracksTaskContext ctx)
-            throws HyracksDataException, AsterixException {
-        HiveRecordParser hiveParser = new HiveRecordParser();
-        hiveParser.configure(configuration, recordType);
-        return hiveParser;
+    public IRecordDataParser<Writable> createRecordParser(IHyracksTaskContext ctx) throws HyracksDataException {
+        return new HiveRecordParser(recordType, HDFSUtils.configureHDFSJobConf(configuration), hiveSerdeClassName);
     }
 
     @Override
@@ -64,4 +67,8 @@ public class HiveDataParserFactory implements IRecordDataParserFactory<Writable>
         return Writable.class;
     }
 
+    @Override
+    public void setMetaType(ARecordType metaType) {
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/RSSParserFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/RSSParserFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/RSSParserFactory.java
index fecb0de..4f699fc 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/RSSParserFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/RSSParserFactory.java
@@ -18,11 +18,8 @@
  */
 package org.apache.asterix.external.parser.factory;
 
-import java.io.IOException;
 import java.util.Map;
 
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 import org.apache.asterix.external.api.IRecordDataParser;
 import org.apache.asterix.external.api.IRecordDataParserFactory;
 import org.apache.asterix.external.parser.RSSParser;
@@ -35,16 +32,9 @@ public class RSSParserFactory implements IRecordDataParserFactory<SyndEntryImpl>
 
     private static final long serialVersionUID = 1L;
     private ARecordType recordType;
-    private Map<String, String> configuration;
 
     @Override
-    public DataSourceType getDataSourceType() throws AsterixException {
-        return DataSourceType.RECORDS;
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration) throws Exception {
-        this.configuration = configuration;
+    public void configure(Map<String, String> configuration) {
     }
 
     @Override
@@ -53,10 +43,8 @@ public class RSSParserFactory implements IRecordDataParserFactory<SyndEntryImpl>
     }
 
     @Override
-    public IRecordDataParser<SyndEntryImpl> createRecordParser(IHyracksTaskContext ctx)
-            throws AsterixException, IOException {
-        RSSParser dataParser = new RSSParser();
-        dataParser.configure(configuration, recordType);
+    public IRecordDataParser<SyndEntryImpl> createRecordParser(IHyracksTaskContext ctx) {
+        RSSParser dataParser = new RSSParser(recordType);
         return dataParser;
     }
 
@@ -65,4 +53,8 @@ public class RSSParserFactory implements IRecordDataParserFactory<SyndEntryImpl>
         return SyndEntryImpl.class;
     }
 
+    @Override
+    public void setMetaType(ARecordType metaType) {
+    }
+
 }


[13/19] incubator-asterixdb git commit: Support Change Feeds and Ingestion of Records with MetaData

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/results/feeds/feed-with-external-parser/feed-with-external-parser.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feed-with-external-parser/feed-with-external-parser.1.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feed-with-external-parser/feed-with-external-parser.1.adm
index c0709a0..02535c5 100644
--- a/asterix-app/src/test/resources/runtimets/results/feeds/feed-with-external-parser/feed-with-external-parser.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feed-with-external-parser/feed-with-external-parser.1.adm
@@ -1,99 +1,99 @@
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#48968872.0#1445354636", "StatsLifetimeStarter": 572059, "JobStartDate": 1445362267, "SubmitEventNotes": "DAG Node: fabp4-0002+fabp4-0002", "JobStatus": 4, "LeaveJobInQueue": false, "WantGlidein": true, "StartdPrincipal": "execute-side@matchsession/128.104.119.175", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1445561276, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 125000, "ScheddBday": 1445383086, "RemoteWallClockTime": 769511.0d, "WantCheckpoint": false, "In": "/dev/null", "LastVacateTime": 1445546251, "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 32543, "EnteredCurrentStatus": 1446133322, "ResidentSetSize_RAW": 100432, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/ssericksen/dude-14-xdock/ChtcRun/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID
 : 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 1023 ) / 1024 );\n", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 571737.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 690056, "BytesSent": 3113566.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133322, "ProcId": 0, "ImageSize": 750000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 12, "RecentBlockReads": 0, "SpooledOutputFiles": "ChtcWrapperfabp4-0002.out,AuditLog.fabp4-0002,poses.mol2,CURLTIME_4057178,harvest.log,time_elapsed.log,surf_scores.txt,CURLTIME_38803,count.log,fabp4-0002.out,CURLTIME_253463", "NumJobReconnects": 1, "WantFlocking": true, "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT_OR_EVICT", "JobCurrentStartExecutingDate": 1445561278, "ExitBySignal": false, "LastMatc
 hTime": 1445561276, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 6, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 48940805, "MemoryUsage": 122, "PeriodicReleaseExpr": "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\n", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 6, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 572046, "ExecutableSize_RAW": 6, "LastRejMatchReason": "no match found", "LastSuspensionTime": 0, "UserLog": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-out-esr1/fabp4-0002/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 769511.0d, "LastJobLeaseRenewal": 1446133322, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 8.7351688E7d, "Cond
 orPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "fabp4-0002+fabp4-0002", "PeriodicRelease": "error", "JobRunCount": 7, "LastRemotePool": "condor.biochem.wisc.edu:9618?sock=collector", "JobLastStartDate": 1445546257, "LastRemoteHost": "slot1@cluster-0008.biochem.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 0.0d, "TransferInput": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-in/fabp4-0002/,/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-in/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133322, "StreamErr": false, "is_resumable": true, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 
 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );\n", "NumShadowStarts": 7, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-out-esr1/./mydag.dag.nodes.log", "Owner": "ssericksen", "Requirements": "undefined", "DiskUsage": 35000, "LastRejMatchTime": 1445375317, "JobLeaseDuration": 2400, "ClusterId": 48968872, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 572046.0d, "Args": "--type=Other --cmdtorun=surflex_run_DUDE_v1.8_esr1.sh --unique=fabp4-0002 --", "Environment": "", "LastPublicClaimId": "<128.104.119.175:9618>#1444067179#3317#...", "Iwd": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-
 surf-out-esr1/fabp4-0002", "QDate": 1445354636, "CurrentHosts": 0, "User": "ssericksen@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49573720.0#1445938922", "StatsLifetimeStarter": 190245, "JobStartDate": 1445943852, "SubmitEventNotes": "DAG Node: 180+180", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.72", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1445943852, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 125000, "RemoteWallClockTime": 190247.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446134099, "ResidentSetSize_RAW": 123680, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 
 1023 ) / 1024 );\n", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 185236.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30766.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446134099, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_3753852,ChtcWrapper180.out,AuditLog.180,simu_3_180.txt,harvest.log,180.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1445943853, "ExitBySignal": false, "LastMatchTime": 1445943852, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49572657, "MemoryUsage": 122, "PeriodicReleaseExpr":
  "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\n", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 190247, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally/Simulation_condor/model_3/180/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 190247.0d, "LastJobLeaseRenewal": 1446134099, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 284367.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "180+180", "P
 eriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e272.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 1835.0d, "TransferInput": "/home/xguo23/finally/Simulation_condor/data/180/,/home/xguo23/finally/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446134099, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );\n", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally/Simulat
 ion_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49573720, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 190247.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=180 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.72:29075>#1444753997#6000#...", "Iwd": "/home/xguo23/finally/Simulation_condor/model_3/180", "QDate": 1445938922, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49581952.0#1446105329", "StatsLifetimeStarter": 27674, "JobStartDate": 1446106061, "SubmitEventNotes": "DAG Node: 40+40", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.86", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106061, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 27676.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133737, "ResidentSetSize_RAW": 127252, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 10
 23 ) / 1024 );\n", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 27510.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30584.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133737, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_369560,ChtcWrapper40.out,AuditLog.40,simu_3_40.txt,harvest.log,40.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106063, "ExitBySignal": false, "LastMatchTime": 1446106061, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "expr=(
  JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\n", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27676, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/40/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27676.0d, "LastJobLeaseRenewal": 1446133737, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "40+40", "PeriodicRele
 ase": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e286.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 105.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/40/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133737, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );\n", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_con
 dor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581952, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27676.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=40 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.86:32129>#1444759888#6329#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/40", "QDate": 1446105329, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49581985.0#1446105368", "StatsLifetimeStarter": 26354, "JobStartDate": 1446106289, "SubmitEventNotes": "DAG Node: 36+36", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.244.249", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106289, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26357.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132646, "ResidentSetSize_RAW": 127452, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 1
 023 ) / 1024 );\n", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26239.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31898.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132646, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1151700,ChtcWrapper36.out,AuditLog.36,simu_3_36.txt,harvest.log,36.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106291, "ExitBySignal": false, "LastMatchTime": 1446106289, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "expr
 =( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\n", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26357, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/36/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26357.0d, "LastJobLeaseRenewal": 1446132646, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "36+36", "PeriodicRe
 lease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e457.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 96.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/36/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132646, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );\n", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_co
 ndor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581985, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26357.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=36 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.244.249:28476>#1444685646#10655#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/36", "QDate": 1446105368, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49581989.0#1446105374", "StatsLifetimeStarter": 27490, "JobStartDate": 1446106290, "SubmitEventNotes": "DAG Node: 82+82", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.233", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106290, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 27491.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133781, "ResidentSetSize_RAW": 126932, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( 
 ( ResidentSetSize + 1023 ) / 1024 );\n", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 27288.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30553.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,Partiti
 onableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy
 ,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_Expec
 tedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_J
 obStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDr
 ainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,Residen
 tSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133782, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_4096502,ChtcWrapper82.out,AuditLog.82,simu_3_82.txt,harvest.log,82.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106291, "ExitBySignal": false, "LastMatchTime": 1446106290, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\n", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUse
 rCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27491, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/82/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27491.0d, "LastJobLeaseRenewal": 1446133781, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "82+82", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e433.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 173.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/82/,/home/xguo23/finally_2/Simulation_condor/da
 ta/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133781, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );\n", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581989, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27491.0d, "Arg
 s": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=82 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.233:28601>#1443991451#13496#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/82", "QDate": 1446105374, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582049.0#1446105441", "StatsLifetimeStarter": 26296, "JobStartDate": 1446106482, "SubmitEventNotes": "DAG Node: 112+112", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.245", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106482, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26298.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132780, "ResidentSetSize_RAW": 126892, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 );\n", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26097.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31904.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132780, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_2601607,ChtcWrapper112.out,AuditLog.112,simu_3_112.txt,harvest.log,112.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106484, "ExitBySignal": false, "LastMatchTime": 1446106482, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr":
  "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\n", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26298, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/112/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26298.0d, "LastJobLeaseRenewal": 1446132780, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "112+112", "P
 eriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e445.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 164.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/112/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132780, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );\n", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Si
 mulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582049, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26298.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=112 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.245:48407>#1443991450#14631#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/112", "QDate": 1446105441, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582050.0#1446105441", "StatsLifetimeStarter": 27141, "JobStartDate": 1446106482, "SubmitEventNotes": "DAG Node: 301+301", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.172", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106482, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 27143.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133625, "ResidentSetSize_RAW": 126464, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 );\n", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26895.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31905.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133625, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_2158419,ChtcWrapper301.out,AuditLog.301,simu_3_301.txt,harvest.log,301.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106484, "ExitBySignal": false, "LastMatchTime": 1446106482, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr":
  "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\n", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27143, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/301/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27143.0d, "LastJobLeaseRenewal": 1446133625, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "301+301", "P
 eriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e372.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 201.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/301/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133625, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );\n", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Si
 mulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582050, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27143.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=301 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.172:19856>#1444760019#9307#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/301", "QDate": 1446105441, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582065.0#1446105458", "StatsLifetimeStarter": 25606, "JobStartDate": 1446107042, "SubmitEventNotes": "DAG Node: 401+401", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.206", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107042, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 25607.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132649, "ResidentSetSize_RAW": 126608, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=
 ( ( ResidentSetSize + 1023 ) / 1024 );\n", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25478.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30661.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,Parti
 tionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBu
 sy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_Exp
 ectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19
 _JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGraceful
 DrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,Resid
 entSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132649, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1160521,ChtcWrapper401.out,AuditLog.401,simu_3_401.txt,harvest.log,401.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107043, "ExitBySignal": false, "LastMatchTime": 1446107042, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\n", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "Lo
 calUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25607, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/401/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25607.0d, "LastJobLeaseRenewal": 1446132649, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "401+401", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e406.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 89.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/401/,/home/xguo23/finally_2/Simulation_
 condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132649, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );\n", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582065, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25607
 .0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=401 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.206:27946>#1443991437#15826#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/401", "QDate": 1446105458, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582094.0#1446105491", "StatsLifetimeStarter": 25168, "JobStartDate": 1446107489, "SubmitEventNotes": "DAG Node: 106+106", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.104.55.83", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107489, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 75000, "RemoteWallClockTime": 25169.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 4, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132658, "ResidentSetSize_RAW": 72016, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 102
 3 ) / 1024 );\n", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24949.0d, "BlockWrites": 1, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 119520, "BytesSent": 30486.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 1, "JobFinishedHookDone": 1446132658, "ProcId": 0, "ImageSize": 125000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 86, "SpooledOutputFiles": "CURLTIME_122139,ChtcWrapper106.out,AuditLog.106,simu_3_106.txt,harvest.log,106.out", "BlockWriteKbytes": 4, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107490, "ExitBySignal": false, "LastMatchTime": 1446107489, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 665, "DAGManJobId": 49581933, "MemoryUsage": 73, "PeriodicReleaseExpr": "ex
 pr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\n", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 26620, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25169, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/106/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25169.0d, "LastJobLeaseRenewal": 1446132658, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "106+106", "P
 eriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@c064.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 204.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/106/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132658, "StreamErr": false, "RecentBlockReadKbytes": 960, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );\n", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/
 Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582094, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25169.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=106 -- 3", "Environment": "", "LastPublicClaimId": "<128.104.55.83:25899>#1445308581#1240#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/106", "QDate": 1446105491, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582098.0#1446105492", "StatsLifetimeStarter": 26020, "JobStartDate": 1446107489, "SubmitEventNotes": "DAG Node: 304+304", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.223", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107489, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26022.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133511, "ResidentSetSize_RAW": 128776, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 );\n", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25844.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31801.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133511, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_3651606,ChtcWrapper304.out,AuditLog.304,simu_3_304.txt,harvest.log,304.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107489, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr":
  "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\n", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26022, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/304/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26022.0d, "LastJobLeaseRenewal": 1446133511, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "304+304", "P
 eriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e423.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 143.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/304/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133511, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );\n", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Si
 mulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582098, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26022.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=304 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.223:13467>#1444760039#6376#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/304", "QDate": 1446105492, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582113.0#1446105509", "StatsLifetimeStarter": 26044, "JobStartDate": 1446107490, "SubmitEventNotes": "DAG Node: 206+206", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.120", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107490, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26045.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133535, "ResidentSetSize_RAW": 126460, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 );\n", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25939.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30596.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133535, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_522843,ChtcWrapper206.out,AuditLog.206,simu_3_206.txt,harvest.log,206.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107490, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": 
 "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\n", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26045, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/206/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26045.0d, "LastJobLeaseRenewal": 1446133535, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "206+206", "Pe
 riodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e320.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 87.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/206/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133535, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );\n", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simu
 lation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582113, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26045.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=206 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.120:45185>#1443991409#14238#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/206", "QDate": 1446105509, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582119.0#1446105519", "StatsLifetimeStarter": 24928, "JobStartDate": 1446107490, "SubmitEventNotes": "DAG Node: 152+152", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.242", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107490, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 24930.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132420, "ResidentSetSize_RAW": 128972, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 );\n", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24742.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30431.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132420, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_674,ChtcWrapper152.out,AuditLog.152,simu_3_152.txt,harvest.log,152.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107490, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "ex
 pr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\n", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 24930, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/152/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 24930.0d, "LastJobLeaseRenewal": 1446132420, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "152+152", "Perio
 dicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e442.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 156.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/152/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132420, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );\n", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simula
 tion_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582119, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 24930.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=152 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.242:38884>#1443991450#10374#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/152", "QDate": 1446105519, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582148.0#1446105547", "StatsLifetimeStarter": 26230, "JobStartDate": 1446107686, "SubmitEventNotes": "DAG Node: 162+162", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.170", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107686, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 26233.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133919, "ResidentSetSize_RAW": 126384, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=
 ( ( ResidentSetSize + 1023 ) / 1024 );\n", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26088.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30612.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,Parti
 tionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBu
 sy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_Exp
 ectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19
 _JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGraceful
 DrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,Resid
 entSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133919, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1114551,ChtcWrapper162.out,AuditLog.162,simu_3_162.txt,harvest.log,162.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107688, "ExitBySignal": false, "LastMatchTime": 1446107686, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\n", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "Lo
 calUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26233, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/162/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26233.0d, "LastJobLeaseRenewal": 1446133919, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "162+162", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e370.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 96.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/162/,/home/xguo23/finally_2/Simulation_
 condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133919, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );\n", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582148, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26233
 .0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=162 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.170:9482>#1443991414#13008#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/162", "QDate": 1446105547, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582154.0#1446105553", "StatsLifetimeStarter": 25874, "JobStartDate": 1446107686, "SubmitEventNotes": "DAG Node: 333+333", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.120", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107686, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 25876.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133562, "ResidentSetSize_RAW": 125740, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 );\n", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25692.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30542.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133562, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_523030,ChtcWrapper333.out,AuditLog.333,simu_3_333.txt,harvest.log,333.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107688, "ExitBySignal": false, "LastMatchTime": 1446107686, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": 
 "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\n", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25876, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/333/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25876.0d, "LastJobLeaseRenewal": 1446133562, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "333+333", "Pe
 riodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e320.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 157.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/333/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133562, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );\n", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Sim
 ulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582154, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25876.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=333 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.120:45185>#1443991409#14242#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/333", "QDate": 1446105553, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582177.0#1446105581", "StatsLifetimeStarter": 25025, "JobStartDate": 1446108665, "SubmitEventNotes": "DAG Node: 145+145", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.104.55.57", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446108665, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 75000, "RemoteWallClockTime": 25026.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 4, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133691, "ResidentSetSize_RAW": 73308, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 102
 3 ) / 1024 );\n", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24770.0d, "BlockWrites": 1, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 120972, "BytesSent": 28290.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 1, "JobFinishedHookDone": 1446133691, "ProcId": 0, "ImageSize": 125000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 146, "SpooledOutputFiles": "CURLTIME_4179033,ChtcWrapper145.out,AuditLog.145,simu_3_145.txt,harvest.log,145.out", "BlockWriteKbytes": 4, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446108666, "ExitBySignal": false, "LastMatchTime": 1446108665, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 796, "DAGManJobId": 49581933, "MemoryUsage": 73, "PeriodicReleaseExpr": "
 expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\n", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 28476, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25026, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/145/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25026.0d, "LastJobLeaseRenewal": 1446133691, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "145+145", 
 "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@c038.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 217.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/145/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133691, "StreamErr": false, "RecentBlockReadKbytes": 1932, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );\n", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally
 _2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582177, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25026.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=145 -- 3", "Environment": "", "LastPublicClaimId": "<128.104.55.57:49793>#1445322694#1541#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/145", "QDate": 1446105581, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582178.0#1446105581", "StatsLifetimeStarter": 24871, "JobStartDate": 1446108666, "SubmitEventNotes": "DAG Node: 154+154", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.158", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446108666, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 24874.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133540, "ResidentSetSize_RAW": 125792, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 );\n", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24626.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30559.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133540, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1382128,ChtcWrapper154.out,AuditLog.154,simu_3_154.txt,harvest.log,154.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446108668, "ExitBySignal": false, "LastMatchTime": 1446108666, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr":
  "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\n", "ExitCode": 0, "JobNoti

<TRUNCATED>


[15/19] incubator-asterixdb git commit: Support Change Feeds and Ingestion of Records with MetaData

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser/classad-parser.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser/classad-parser.1.adm b/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser/classad-parser.1.adm
index 3b2225d..858285e 100644
--- a/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser/classad-parser.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser/classad-parser.1.adm
@@ -1,100 +1,100 @@
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#48968872.0#1445354636", "StatsLifetimeStarter": 572059, "JobStartDate": 1445362267, "SubmitEventNotes": "DAG Node: fabp4-0002+fabp4-0002", "JobStatus": 4, "LeaveJobInQueue": false, "WantGlidein": true, "StartdPrincipal": "execute-side@matchsession/128.104.119.175", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1445561276, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 125000, "ScheddBday": 1445383086, "RemoteWallClockTime": 769511.0d, "WantCheckpoint": false, "In": "/dev/null", "LastVacateTime": 1445546251, "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 32543, "EnteredCurrentStatus": 1446133322, "ResidentSetSize_RAW": 100432, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/ssericksen/dude-14-xdock/ChtcRun/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID
 : 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 571737.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 690056, "BytesSent": 3113566.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133322, "ProcId": 0, "ImageSize": 750000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 12, "RecentBlockReads": 0, "SpooledOutputFiles": "ChtcWrapperfabp4-0002.out,AuditLog.fabp4-0002,poses.mol2,CURLTIME_4057178,harvest.log,time_elapsed.log,surf_scores.txt,CURLTIME_38803,count.log,fabp4-0002.out,CURLTIME_253463", "NumJobReconnects": 1, "WantFlocking": true, "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT_OR_EVICT", "JobCurrentStartExecutingDate": 1445561278, "ExitBySignal": false, "LastMatch
 Time": 1445561276, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 6, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 48940805, "MemoryUsage": 122, "PeriodicReleaseExpr": "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 6, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 572046, "ExecutableSize_RAW": 6, "LastRejMatchReason": "no match found", "LastSuspensionTime": 0, "UserLog": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-out-esr1/fabp4-0002/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 769511.0d, "LastJobLeaseRenewal": 1446133322, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 8.7351688E7d, "Condor
 Platform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "fabp4-0002+fabp4-0002", "PeriodicRelease": "error", "JobRunCount": 7, "LastRemotePool": "condor.biochem.wisc.edu:9618?sock=collector", "JobLastStartDate": 1445546257, "LastRemoteHost": "slot1@cluster-0008.biochem.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 0.0d, "TransferInput": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-in/fabp4-0002/,/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-in/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133322, "StreamErr": false, "is_resumable": true, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) 
 ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 7, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-out-esr1/./mydag.dag.nodes.log", "Owner": "ssericksen", "Requirements": "undefined", "DiskUsage": 35000, "LastRejMatchTime": 1445375317, "JobLeaseDuration": 2400, "ClusterId": 48968872, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 572046.0d, "Args": "--type=Other --cmdtorun=surflex_run_DUDE_v1.8_esr1.sh --unique=fabp4-0002 --", "Environment": "", "LastPublicClaimId": "<128.104.119.175:9618>#1444067179#3317#...", "Iwd": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-sur
 f-out-esr1/fabp4-0002", "QDate": 1445354636, "CurrentHosts": 0, "User": "ssericksen@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49573720.0#1445938922", "StatsLifetimeStarter": 190245, "JobStartDate": 1445943852, "SubmitEventNotes": "DAG Node: 180+180", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.72", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1445943852, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 125000, "RemoteWallClockTime": 190247.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446134099, "ResidentSetSize_RAW": 123680, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 
 1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 185236.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30766.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446134099, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_3753852,ChtcWrapper180.out,AuditLog.180,simu_3_180.txt,harvest.log,180.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1445943853, "ExitBySignal": false, "LastMatchTime": 1445943852, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49572657, "MemoryUsage": 122, "PeriodicReleaseExpr": 
 "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 190247, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally/Simulation_condor/model_3/180/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 190247.0d, "LastJobLeaseRenewal": 1446134099, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 284367.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "180+180", "Per
 iodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e272.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 1835.0d, "TransferInput": "/home/xguo23/finally/Simulation_condor/data/180/,/home/xguo23/finally/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446134099, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally/Simulation
 _condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49573720, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 190247.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=180 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.72:29075>#1444753997#6000#...", "Iwd": "/home/xguo23/finally/Simulation_condor/model_3/180", "QDate": 1445938922, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49581952.0#1446105329", "StatsLifetimeStarter": 27674, "JobStartDate": 1446106061, "SubmitEventNotes": "DAG Node: 40+40", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.86", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106061, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 27676.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133737, "ResidentSetSize_RAW": 127252, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 10
 23 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 27510.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30584.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133737, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_369560,ChtcWrapper40.out,AuditLog.40,simu_3_40.txt,harvest.log,40.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106063, "ExitBySignal": false, "LastMatchTime": 1446106061, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "expr=( 
 JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27676, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/40/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27676.0d, "LastJobLeaseRenewal": 1446133737, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "40+40", "PeriodicReleas
 e": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e286.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 105.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/40/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133737, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor
 /model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581952, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27676.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=40 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.86:32129>#1444759888#6329#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/40", "QDate": 1446105329, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49581985.0#1446105368", "StatsLifetimeStarter": 26354, "JobStartDate": 1446106289, "SubmitEventNotes": "DAG Node: 36+36", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.244.249", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106289, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26357.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132646, "ResidentSetSize_RAW": 127452, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 1
 023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26239.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31898.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132646, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1151700,ChtcWrapper36.out,AuditLog.36,simu_3_36.txt,harvest.log,36.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106291, "ExitBySignal": false, "LastMatchTime": 1446106289, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "expr=
 ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26357, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/36/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26357.0d, "LastJobLeaseRenewal": 1446132646, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "36+36", "PeriodicRele
 ase": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e457.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 96.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/36/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132646, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condo
 r/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581985, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26357.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=36 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.244.249:28476>#1444685646#10655#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/36", "QDate": 1446105368, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49581989.0#1446105374", "StatsLifetimeStarter": 27490, "JobStartDate": 1446106290, "SubmitEventNotes": "DAG Node: 82+82", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.233", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106290, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 27491.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133781, "ResidentSetSize_RAW": 126932, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( 
 ( ResidentSetSize + 1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 27288.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30553.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,Partitio
 nableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,
 Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_Expect
 edMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_Jo
 bStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDra
 iningCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,Resident
 SetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133782, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_4096502,ChtcWrapper82.out,AuditLog.82,simu_3_82.txt,harvest.log,82.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106291, "ExitBySignal": false, "LastMatchTime": 1446106290, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserC
 pu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27491, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/82/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27491.0d, "LastJobLeaseRenewal": 1446133781, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "82+82", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e433.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 173.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/82/,/home/xguo23/finally_2/Simulation_condor/data
 /shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133781, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581989, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27491.0d, "Args":
  "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=82 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.233:28601>#1443991451#13496#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/82", "QDate": 1446105374, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582049.0#1446105441", "StatsLifetimeStarter": 26296, "JobStartDate": 1446106482, "SubmitEventNotes": "DAG Node: 112+112", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.245", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106482, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26298.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132780, "ResidentSetSize_RAW": 126892, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26097.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31904.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132780, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_2601607,ChtcWrapper112.out,AuditLog.112,simu_3_112.txt,harvest.log,112.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106484, "ExitBySignal": false, "LastMatchTime": 1446106482, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": 
 "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26298, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/112/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26298.0d, "LastJobLeaseRenewal": 1446132780, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "112+112", "Per
 iodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e445.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 164.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/112/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132780, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simul
 ation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582049, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26298.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=112 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.245:48407>#1443991450#14631#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/112", "QDate": 1446105441, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582050.0#1446105441", "StatsLifetimeStarter": 27141, "JobStartDate": 1446106482, "SubmitEventNotes": "DAG Node: 301+301", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.172", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106482, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 27143.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133625, "ResidentSetSize_RAW": 126464, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26895.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31905.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133625, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_2158419,ChtcWrapper301.out,AuditLog.301,simu_3_301.txt,harvest.log,301.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106484, "ExitBySignal": false, "LastMatchTime": 1446106482, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": 
 "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27143, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/301/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27143.0d, "LastJobLeaseRenewal": 1446133625, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "301+301", "Per
 iodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e372.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 201.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/301/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133625, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simul
 ation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582050, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27143.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=301 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.172:19856>#1444760019#9307#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/301", "QDate": 1446105441, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582065.0#1446105458", "StatsLifetimeStarter": 25606, "JobStartDate": 1446107042, "SubmitEventNotes": "DAG Node: 401+401", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.206", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107042, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 25607.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132649, "ResidentSetSize_RAW": 126608, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=
 ( ( ResidentSetSize + 1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25478.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30661.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,Partit
 ionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBus
 y,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_Expe
 ctedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_
 JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulD
 rainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,Reside
 ntSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132649, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1160521,ChtcWrapper401.out,AuditLog.401,simu_3_401.txt,harvest.log,401.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107043, "ExitBySignal": false, "LastMatchTime": 1446107042, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "Loca
 lUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25607, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/401/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25607.0d, "LastJobLeaseRenewal": 1446132649, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "401+401", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e406.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 89.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/401/,/home/xguo23/finally_2/Simulation_co
 ndor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132649, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582065, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25607.0d
 , "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=401 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.206:27946>#1443991437#15826#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/401", "QDate": 1446105458, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582094.0#1446105491", "StatsLifetimeStarter": 25168, "JobStartDate": 1446107489, "SubmitEventNotes": "DAG Node: 106+106", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.104.55.83", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107489, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 75000, "RemoteWallClockTime": 25169.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 4, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132658, "ResidentSetSize_RAW": 72016, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 102
 3 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24949.0d, "BlockWrites": 1, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 119520, "BytesSent": 30486.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 1, "JobFinishedHookDone": 1446132658, "ProcId": 0, "ImageSize": 125000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 86, "SpooledOutputFiles": "CURLTIME_122139,ChtcWrapper106.out,AuditLog.106,simu_3_106.txt,harvest.log,106.out", "BlockWriteKbytes": 4, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107490, "ExitBySignal": false, "LastMatchTime": 1446107489, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 665, "DAGManJobId": 49581933, "MemoryUsage": 73, "PeriodicReleaseExpr": "exp
 r=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 26620, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25169, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/106/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25169.0d, "LastJobLeaseRenewal": 1446132658, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "106+106", "Per
 iodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@c064.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 204.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/106/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132658, "StreamErr": false, "RecentBlockReadKbytes": 960, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Sim
 ulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582094, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25169.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=106 -- 3", "Environment": "", "LastPublicClaimId": "<128.104.55.83:25899>#1445308581#1240#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/106", "QDate": 1446105491, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582098.0#1446105492", "StatsLifetimeStarter": 26020, "JobStartDate": 1446107489, "SubmitEventNotes": "DAG Node: 304+304", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.223", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107489, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26022.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133511, "ResidentSetSize_RAW": 128776, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25844.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31801.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133511, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_3651606,ChtcWrapper304.out,AuditLog.304,simu_3_304.txt,harvest.log,304.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107489, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": 
 "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26022, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/304/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26022.0d, "LastJobLeaseRenewal": 1446133511, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "304+304", "Per
 iodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e423.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 143.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/304/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133511, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simul
 ation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582098, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26022.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=304 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.223:13467>#1444760039#6376#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/304", "QDate": 1446105492, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582113.0#1446105509", "StatsLifetimeStarter": 26044, "JobStartDate": 1446107490, "SubmitEventNotes": "DAG Node: 206+206", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.120", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107490, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26045.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133535, "ResidentSetSize_RAW": 126460, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25939.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30596.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133535, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_522843,ChtcWrapper206.out,AuditLog.206,simu_3_206.txt,harvest.log,206.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107490, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "
 expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26045, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/206/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26045.0d, "LastJobLeaseRenewal": 1446133535, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "206+206", "Peri
 odicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e320.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 87.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/206/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133535, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulat
 ion_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582113, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26045.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=206 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.120:45185>#1443991409#14238#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/206", "QDate": 1446105509, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582119.0#1446105519", "StatsLifetimeStarter": 24928, "JobStartDate": 1446107490, "SubmitEventNotes": "DAG Node: 152+152", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.242", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107490, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 24930.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132420, "ResidentSetSize_RAW": 128972, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24742.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30431.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132420, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_674,ChtcWrapper152.out,AuditLog.152,simu_3_152.txt,harvest.log,152.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107490, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "exp
 r=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 24930, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/152/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 24930.0d, "LastJobLeaseRenewal": 1446132420, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "152+152", "Periodi
 cRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e442.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 156.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/152/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132420, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulatio
 n_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582119, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 24930.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=152 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.242:38884>#1443991450#10374#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/152", "QDate": 1446105519, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582124.0#1446105525", "StatsLifetimeStarter": 24745, "JobStartDate": 1446107685, "SubmitEventNotes": "DAG Node: 323+323", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 13, "StartdPrincipal": "execute-side@matchsession/128.104.55.89", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107685, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 75000, "RemoteWallClockTime": 24748.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132433, "ResidentSetSize_RAW": 71248, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( 
 ResidentSetSize + 1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 21145.0d, "BlockWrites": 1, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 118000, "BytesSent": 30560.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,Partitiona
 bleSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Sl
 ot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_Expected
 MachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobS
 tarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrain
 ingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSe
 tSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132434, "ProcId": 0, "ImageSize": 125000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 314, "SpooledOutputFiles": "harvest.log,CURLTIME_3853266,ChtcWrapper323.out,AuditLog.323,simu_3_323.txt,323.out", "BlockWriteKbytes": 4, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107686, "ExitBySignal": false, "LastMatchTime": 1446107685, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 1142, "DAGManJobId": 49581933, "MemoryUsage": 73, "PeriodicReleaseExpr": "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 43788, "NumJobMatches": 1, "L
 ocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 24748, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/323/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 24748.0d, "LastJobLeaseRenewal": 1446132433, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "323+323", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@c070.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 175.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/323/,/home/xguo23/finally_2/Simulatio
 n_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132433, "StreamErr": false, "RecentBlockReadKbytes": 4224, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582124, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 2
 4748.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=323 -- 3", "Environment": "", "LastPublicClaimId": "<128.104.55.89:32652>#1445371750#1302#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/323", "QDate": 1446105525, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582148.0#1446105547", "StatsLifetimeStarter": 26230, "JobStartDate": 1446107686, "SubmitEventNotes": "DAG Node: 162+162", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.170", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107686, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 26233.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133919, "ResidentSetSize_RAW": 126384, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=
 ( ( ResidentSetSize + 1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26088.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30612.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,Partit
 ionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBus
 y,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_Expe
 ctedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_
 JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulD
 rainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,Reside
 ntSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133919, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1114551,ChtcWrapper162.out,AuditLog.162,simu_3_162.txt,harvest.log,162.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107688, "ExitBySignal": false, "LastMatchTime": 1446107686, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "Loca
 lUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26233, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/162/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26233.0d, "LastJobLeaseRenewal": 1446133919, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "162+162", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e370.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 96.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/162/,/home/xguo23/finally_2/Simulation_co
 ndor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133919, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582148, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26233.0d
 , "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=162 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.170:9482>#1443991414#13008#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/162", "QDate": 1446105547, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582154.0#1446105553", "StatsLifetimeStarter": 25874, "JobStartDate": 1446107686, "SubmitEventNotes": "DAG Node: 333+333", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.120", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107686, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 25876.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133562, "ResidentSetSize_RAW": 125740, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25692.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "Im

<TRUNCATED>


[07/19] incubator-asterixdb git commit: Support Change Feeds and Ingestion of Records with MetaData

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/RecordWithMetadataParserFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/RecordWithMetadataParserFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/RecordWithMetadataParserFactory.java
index 88a0683..96c592a 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/RecordWithMetadataParserFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/RecordWithMetadataParserFactory.java
@@ -18,67 +18,57 @@
  */
 package org.apache.asterix.external.parser.factory;
 
-import java.io.IOException;
 import java.util.Map;
 
 import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 import org.apache.asterix.external.api.IRecordDataParser;
 import org.apache.asterix.external.api.IRecordDataParserFactory;
-import org.apache.asterix.external.input.record.RecordWithMetadata;
+import org.apache.asterix.external.input.record.RecordWithMetadataAndPK;
+import org.apache.asterix.external.input.record.converter.IRecordConverterFactory;
 import org.apache.asterix.external.parser.RecordWithMetadataParser;
 import org.apache.asterix.external.provider.ParserFactoryProvider;
+import org.apache.asterix.external.provider.RecordConverterFactoryProvider;
+import org.apache.asterix.external.util.ExternalDataCompatibilityUtils;
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
-public class RecordWithMetadataParserFactory<T> implements IRecordDataParserFactory<RecordWithMetadata<T>> {
+public class RecordWithMetadataParserFactory<I, O> implements IRecordDataParserFactory<I> {
 
     private static final long serialVersionUID = 1L;
-    private Class<? extends RecordWithMetadata<T>> recordClass;
+    private ARecordType metaType;
     private ARecordType recordType;
-    private int[] metaIndexes;
-    private IRecordDataParserFactory<T> valueParserFactory;
-    private int valueIndex;
-
-    @Override
-    public DataSourceType getDataSourceType() throws AsterixException {
-        return DataSourceType.RECORDS;
-    }
+    private IRecordDataParserFactory<O> recordParserFactory;
+    private IRecordConverterFactory<I, RecordWithMetadataAndPK<O>> converterFactory;
 
     @SuppressWarnings("unchecked")
     @Override
-    public void configure(Map<String, String> configuration) throws Exception {
-        // validation first
-        if (!configuration.containsKey(ExternalDataConstants.KEY_META_INDEXES)) {
-            throw new HyracksDataException(
-                    "the parser parameter (" + ExternalDataConstants.KEY_META_INDEXES + ") is missing");
-        }
-        if (!configuration.containsKey(ExternalDataConstants.KEY_VALUE_INDEX)) {
-            throw new HyracksDataException(
-                    "the parser parameter (" + ExternalDataConstants.KEY_VALUE_INDEX + ") is missing");
-        }
-        if (!configuration.containsKey(ExternalDataConstants.KEY_VALUE_FORMAT)) {
-            throw new HyracksDataException(
-                    "the parser parameter (" + ExternalDataConstants.KEY_VALUE_FORMAT + ") is missing");
+    public void configure(Map<String, String> configuration) throws AsterixException {
+        // validate first
+        String recordFormat = configuration.get(ExternalDataConstants.KEY_RECORD_FORMAT);
+        if (recordFormat == null) {
+            throw new AsterixException(
+                    "Unknown record format for a record with meta parser. Did you specify the parameter "
+                            + ExternalDataConstants.KEY_RECORD_FORMAT);
         }
-        // get meta field indexes
-        String[] stringMetaIndexes = configuration.get(ExternalDataConstants.KEY_META_INDEXES).split(",");
-        metaIndexes = new int[stringMetaIndexes.length];
-        for (int i = 0; i < stringMetaIndexes.length; i++) {
-            metaIndexes[i] = Integer.parseInt(stringMetaIndexes[i].trim());
+        String format = configuration.get(ExternalDataConstants.KEY_FORMAT);
+        if (format == null) {
+            throw new AsterixException("Unknown format for a record with meta parser. Did you specify the parameter "
+                    + ExternalDataConstants.KEY_FORMAT);
         }
-        // get value index
-        valueIndex = Integer.parseInt(configuration.get(ExternalDataConstants.KEY_VALUE_INDEX).trim());
-        // get value format
-        configuration.put(ExternalDataConstants.KEY_DATA_PARSER,
-                configuration.get(ExternalDataConstants.KEY_VALUE_FORMAT));
-        valueParserFactory = (IRecordDataParserFactory<T>) ParserFactoryProvider.getDataParserFactory(configuration);
-        valueParserFactory.setRecordType((ARecordType) recordType.getFieldTypes()[valueIndex]);
-        valueParserFactory.configure(configuration);
-        recordClass = (Class<? extends RecordWithMetadata<T>>) (new RecordWithMetadata<T>(
-                valueParserFactory.getRecordClass())).getClass();
+        // Create Parser Factory
+        recordParserFactory = (IRecordDataParserFactory<O>) ParserFactoryProvider.getDataParserFactory(recordFormat);
+        recordParserFactory.setRecordType(recordType);
+        recordParserFactory.setMetaType(metaType);
+        recordParserFactory.configure(configuration);
+        // Create Converter Factory
+        converterFactory = RecordConverterFactoryProvider.getConverterFactory(format, recordFormat);
+        converterFactory.setRecordType(recordType);
+        converterFactory.setMetaType(metaType);
+        converterFactory.configure(configuration);
+        // Validate Compatibility
+        ExternalDataCompatibilityUtils.validateCompatibility(recordParserFactory, converterFactory);
     }
 
     @Override
@@ -87,14 +77,19 @@ public class RecordWithMetadataParserFactory<T> implements IRecordDataParserFact
     }
 
     @Override
-    public IRecordDataParser<RecordWithMetadata<T>> createRecordParser(IHyracksTaskContext ctx)
-            throws HyracksDataException, AsterixException, IOException {
-        IRecordDataParser<T> valueParser = valueParserFactory.createRecordParser(ctx);
-        return new RecordWithMetadataParser<T>(recordClass, metaIndexes, valueParser, valueIndex);
+    public void setMetaType(ARecordType metaType) {
+        this.metaType = metaType;
     }
 
     @Override
-    public Class<? extends RecordWithMetadata<T>> getRecordClass() {
-        return recordClass;
+    public Class<?> getRecordClass() {
+        return converterFactory.getInputClass();
     }
+
+    @Override
+    public IRecordDataParser<I> createRecordParser(IHyracksTaskContext ctx) throws HyracksDataException {
+        IRecordDataParser<O> recordParser = recordParserFactory.createRecordParser(ctx);
+        return new RecordWithMetadataParser<I, O>(metaType, recordParser, converterFactory.createConverter());
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/TestRecordWithPKParserFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/TestRecordWithPKParserFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/TestRecordWithPKParserFactory.java
new file mode 100644
index 0000000..dae8a8f
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/TestRecordWithPKParserFactory.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.parser.factory;
+
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.api.IRecordDataParser;
+import org.apache.asterix.external.api.IRecordDataParserFactory;
+import org.apache.asterix.external.input.record.RecordWithPK;
+import org.apache.asterix.external.parser.TestRecordWithPKParser;
+import org.apache.asterix.external.provider.ParserFactoryProvider;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.IAType;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+@SuppressWarnings({ "unchecked" })
+public class TestRecordWithPKParserFactory<T> implements IRecordDataParserFactory<RecordWithPK<T>> {
+
+    private static final long serialVersionUID = 1L;
+    private ARecordType recordType;
+    private IRecordDataParserFactory<char[]> recordParserFactory;
+    private String format;
+    @SuppressWarnings("unused")
+    private IAType[] pkTypes;
+    @SuppressWarnings("unused")
+    private int[][] pkIndexes;
+
+    @Override
+    public void configure(Map<String, String> configuration) throws AsterixException {
+        TreeMap<String, String> parserConf = new TreeMap<String, String>();
+        format = configuration.get(ExternalDataConstants.KEY_RECORD_FORMAT);
+        parserConf.put(ExternalDataConstants.KEY_FORMAT, format);
+        recordParserFactory = (IRecordDataParserFactory<char[]>) ParserFactoryProvider.getDataParserFactory(parserConf);
+        recordParserFactory.setRecordType(recordType);
+        recordParserFactory.configure(configuration);
+    }
+
+    @Override
+    public void setRecordType(ARecordType recordType) {
+        this.recordType = recordType;
+    }
+
+    @SuppressWarnings("rawtypes")
+    @Override
+    public IRecordDataParser<RecordWithPK<T>> createRecordParser(IHyracksTaskContext ctx) throws HyracksDataException {
+        return new TestRecordWithPKParser(recordParserFactory.createRecordParser(ctx));
+    }
+
+    @Override
+    public Class<?> getRecordClass() {
+        return RecordWithPK.class;
+    }
+
+    @Override
+    public void setMetaType(ARecordType metaType) {
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/TweetParserFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/TweetParserFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/TweetParserFactory.java
index 0f3b309..f20f802 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/TweetParserFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/TweetParserFactory.java
@@ -18,11 +18,8 @@
  */
 package org.apache.asterix.external.parser.factory;
 
-import java.io.IOException;
 import java.util.Map;
 
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 import org.apache.asterix.external.api.IRecordDataParser;
 import org.apache.asterix.external.api.IRecordDataParserFactory;
 import org.apache.asterix.external.parser.TweetParser;
@@ -35,16 +32,9 @@ public class TweetParserFactory implements IRecordDataParserFactory<Status> {
 
     private static final long serialVersionUID = 1L;
     private ARecordType recordType;
-    private Map<String, String> configuration;
 
     @Override
-    public DataSourceType getDataSourceType() throws AsterixException {
-        return DataSourceType.RECORDS;
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration) throws Exception {
-        this.configuration = configuration;
+    public void configure(Map<String, String> configuration) {
     }
 
     @Override
@@ -53,9 +43,8 @@ public class TweetParserFactory implements IRecordDataParserFactory<Status> {
     }
 
     @Override
-    public IRecordDataParser<Status> createRecordParser(IHyracksTaskContext ctx) throws AsterixException, IOException {
-        TweetParser dataParser = new TweetParser();
-        dataParser.configure(configuration, recordType);
+    public IRecordDataParser<Status> createRecordParser(IHyracksTaskContext ctx) {
+        TweetParser dataParser = new TweetParser(recordType);
         return dataParser;
     }
 
@@ -64,4 +53,8 @@ public class TweetParserFactory implements IRecordDataParserFactory<Status> {
         return Status.class;
     }
 
+    @Override
+    public void setMetaType(ARecordType metaType) {
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
index efbc6bf..f40cd15 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
@@ -18,7 +18,6 @@
  */
 package org.apache.asterix.external.provider;
 
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
@@ -27,82 +26,43 @@ import org.apache.asterix.external.adapter.factory.GenericAdapterFactory;
 import org.apache.asterix.external.adapter.factory.LookupAdapterFactory;
 import org.apache.asterix.external.api.IAdapterFactory;
 import org.apache.asterix.external.api.IIndexingAdapterFactory;
-import org.apache.asterix.external.dataset.adapter.GenericAdapter;
 import org.apache.asterix.external.indexing.ExternalFile;
-import org.apache.asterix.external.library.ExternalLibraryManager;
 import org.apache.asterix.external.util.ExternalDataCompatibilityUtils;
-import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.external.util.ExternalDataUtils;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.api.dataflow.value.INullWriterFactory;
 
+/**
+ * This class represents the entry point to all things adapters
+ */
 public class AdapterFactoryProvider {
 
-    public static final Map<String, Class<? extends IAdapterFactory>> adapterFactories = initializeAdapterFactoryMapping();
-
-    private static Map<String, Class<? extends IAdapterFactory>> initializeAdapterFactoryMapping() {
-        Map<String, Class<? extends IAdapterFactory>> adapterFactories = new HashMap<String, Class<? extends IAdapterFactory>>();
-        // Class names
-        adapterFactories.put(GenericAdapter.class.getName(), GenericAdapterFactory.class);
-        // Aliases
-        adapterFactories.put(ExternalDataConstants.ALIAS_GENERIC_ADAPTER, GenericAdapterFactory.class);
-        adapterFactories.put(ExternalDataConstants.ALIAS_HDFS_ADAPTER, GenericAdapterFactory.class);
-        adapterFactories.put(ExternalDataConstants.ALIAS_LOCALFS_ADAPTER, GenericAdapterFactory.class);
-        adapterFactories.put(ExternalDataConstants.ALIAS_SOCKET_ADAPTER, GenericAdapterFactory.class);
-        adapterFactories.put(ExternalDataConstants.ALIAS_SOCKET_CLIENT_ADAPTER, GenericAdapterFactory.class);
-        adapterFactories.put(ExternalDataConstants.ALIAS_FILE_FEED_ADAPTER, GenericAdapterFactory.class);
-        adapterFactories.put(ExternalDataConstants.ALIAS_TWITTER_PULL_ADAPTER, GenericAdapterFactory.class);
-        adapterFactories.put(ExternalDataConstants.ALIAS_TWITTER_PUSH_ADAPTER, GenericAdapterFactory.class);
-        adapterFactories.put(ExternalDataConstants.ALIAS_LOCALFS_PUSH_ADAPTER, GenericAdapterFactory.class);
-
-        // Compatability
-        adapterFactories.put(ExternalDataConstants.ADAPTER_HDFS_CLASSNAME, GenericAdapterFactory.class);
-        adapterFactories.put(ExternalDataConstants.ADAPTER_LOCALFS_CLASSNAME, GenericAdapterFactory.class);
-        adapterFactories.put(ExternalDataConstants.ALIAS_TWITTER_FIREHOSE_ADAPTER, GenericAdapterFactory.class);
-        return adapterFactories;
-    }
-
-    public static IAdapterFactory getAdapterFactory(String adapterClassname, Map<String, String> configuration,
-            ARecordType itemType) throws Exception {
-        ExternalDataCompatibilityUtils.addCompatabilityParameters(adapterClassname, itemType, configuration);
-        if (!adapterFactories.containsKey(adapterClassname)) {
-            throw new AsterixException("Unknown adapter: " + adapterClassname);
-        }
-        IAdapterFactory adapterFactory = adapterFactories.get(adapterClassname).newInstance();
-        adapterFactory.configure(configuration, itemType);
+    // Internal Adapters
+    public static IAdapterFactory getAdapterFactory(String adapterName, Map<String, String> configuration,
+            ARecordType itemType, ARecordType metaType) throws AsterixException {
+        ExternalDataCompatibilityUtils.prepare(adapterName, configuration);
+        ExternalDataUtils.validateParameters(configuration);
+        GenericAdapterFactory adapterFactory = new GenericAdapterFactory();
+        adapterFactory.configure(configuration, itemType, metaType);
         return adapterFactory;
     }
 
-    public static IIndexingAdapterFactory getAdapterFactory(String adapterClassname, Map<String, String> configuration,
-            ARecordType itemType, List<ExternalFile> snapshot, boolean indexingOp)
-                    throws AsterixException, InstantiationException, IllegalAccessException {
-        ExternalDataCompatibilityUtils.addCompatabilityParameters(adapterClassname, itemType, configuration);
-        if (!adapterFactories.containsKey(adapterClassname)) {
-            throw new AsterixException("Unknown adapter");
-        }
-        try {
-            IIndexingAdapterFactory adapterFactory = (IIndexingAdapterFactory) adapterFactories.get(adapterClassname)
-                    .newInstance();
-            adapterFactory.setSnapshot(snapshot, indexingOp);
-            adapterFactory.configure(configuration, itemType);
-            return adapterFactory;
-        } catch (Exception e) {
-            throw new AsterixException("Failed to create indexing adapter factory.", e);
-        }
-    }
-
-    @SuppressWarnings("unchecked")
-    public static void addNewAdapter(String dataverseName, String adapterClassName, String adapterAlias,
-            String adapterFactoryClassName, String libraryName) throws ClassNotFoundException {
-        ClassLoader classLoader = ExternalLibraryManager.getLibraryClassLoader(dataverseName, libraryName);
-        Class<? extends IAdapterFactory> adapterFactoryClass = (Class<? extends IAdapterFactory>) classLoader
-                .loadClass(adapterFactoryClassName);
-        adapterFactories.put(adapterClassName, adapterFactoryClass);
-        adapterFactories.put(adapterAlias, adapterFactoryClass);
+    // Indexing Adapters
+    public static IIndexingAdapterFactory getIndexingAdapterFactory(String adapterName,
+            Map<String, String> configuration, ARecordType itemType, List<ExternalFile> snapshot, boolean indexingOp,
+            ARecordType metaType) throws AsterixException {
+        ExternalDataCompatibilityUtils.prepare(adapterName, configuration);
+        ExternalDataUtils.validateParameters(configuration);
+        GenericAdapterFactory adapterFactory = new GenericAdapterFactory();
+        adapterFactory.setSnapshot(snapshot, indexingOp);
+        adapterFactory.configure(configuration, itemType, metaType);
+        return adapterFactory;
     }
 
-    public static LookupAdapterFactory<?> getAdapterFactory(Map<String, String> configuration, ARecordType recordType,
-            int[] ridFields, boolean retainInput, boolean retainNull, INullWriterFactory iNullWriterFactory)
-                    throws Exception {
+    // Lookup Adapters
+    public static LookupAdapterFactory<?> getLookupAdapterFactory(Map<String, String> configuration,
+            ARecordType recordType, int[] ridFields, boolean retainInput, boolean retainNull,
+            INullWriterFactory iNullWriterFactory) throws AsterixException {
         LookupAdapterFactory<?> adapterFactory = new LookupAdapterFactory<>(recordType, ridFields, retainInput,
                 retainNull, iNullWriterFactory);
         adapterFactory.configure(configuration);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DataflowControllerProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DataflowControllerProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DataflowControllerProvider.java
index d4d9a18..159ea73 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DataflowControllerProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DataflowControllerProvider.java
@@ -18,113 +18,115 @@
  */
 package org.apache.asterix.external.provider;
 
+import java.io.IOException;
 import java.util.Map;
 
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.api.IDataFlowController;
 import org.apache.asterix.external.api.IDataParserFactory;
 import org.apache.asterix.external.api.IExternalDataSourceFactory;
+import org.apache.asterix.external.api.IIndexingDatasource;
 import org.apache.asterix.external.api.IInputStreamProvider;
 import org.apache.asterix.external.api.IInputStreamProviderFactory;
 import org.apache.asterix.external.api.IRecordDataParser;
 import org.apache.asterix.external.api.IRecordDataParserFactory;
 import org.apache.asterix.external.api.IRecordReader;
 import org.apache.asterix.external.api.IRecordReaderFactory;
+import org.apache.asterix.external.api.IRecordWithPKDataParser;
 import org.apache.asterix.external.api.IStreamDataParser;
 import org.apache.asterix.external.api.IStreamDataParserFactory;
-import org.apache.asterix.external.api.IStreamFlowController;
+import org.apache.asterix.external.dataflow.ChangeFeedDataFlowController;
+import org.apache.asterix.external.dataflow.ChangeFeedWithMetaDataFlowController;
 import org.apache.asterix.external.dataflow.FeedRecordDataFlowController;
 import org.apache.asterix.external.dataflow.FeedStreamDataFlowController;
+import org.apache.asterix.external.dataflow.FeedTupleForwarder;
+import org.apache.asterix.external.dataflow.FeedWithMetaDataFlowController;
 import org.apache.asterix.external.dataflow.IndexingDataFlowController;
 import org.apache.asterix.external.dataflow.RecordDataFlowController;
 import org.apache.asterix.external.dataflow.StreamDataFlowController;
 import org.apache.asterix.external.input.stream.AInputStream;
+import org.apache.asterix.external.parser.RecordWithMetadataParser;
 import org.apache.asterix.external.util.DataflowUtils;
+import org.apache.asterix.external.util.ExternalDataUtils;
 import org.apache.asterix.external.util.FeedLogManager;
 import org.apache.asterix.external.util.FeedUtils;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.std.file.FileSplit;
 
 public class DataflowControllerProvider {
 
-    /**
-     * Order of calls:
-     * 1. Constructor()
-     * 2. configure(configuration,ctx)
-     * 3. setTupleForwarder(forwarder)
-     * 4. if record flow controller
-     * |-a. Set record reader
-     * |-b. Set record parser
-     * else
-     * |-a. Set stream parser
-     * 5. start(writer)
-     * @param feedLogFileSplits
-     * @param isFeed
-     */
-
     // TODO: Instead, use a factory just like data source and data parser.
     @SuppressWarnings({ "rawtypes", "unchecked" })
     public static IDataFlowController getDataflowController(ARecordType recordType, IHyracksTaskContext ctx,
             int partition, IExternalDataSourceFactory dataSourceFactory, IDataParserFactory dataParserFactory,
             Map<String, String> configuration, boolean indexingOp, boolean isFeed, FileSplit[] feedLogFileSplits)
-                    throws Exception {
-        FeedLogManager feedLogManager = null;
-        if (isFeed) {
-            feedLogManager = FeedUtils.getFeedLogManager(ctx, partition, feedLogFileSplits);
-        }
-        switch (dataSourceFactory.getDataSourceType()) {
-            case RECORDS:
-                IDataFlowController recordDataFlowController = null;
-                IRecordReaderFactory<?> recordReaderFactory = (IRecordReaderFactory<?>) dataSourceFactory;
-                IRecordReader<?> recordReader = recordReaderFactory.createRecordReader(ctx, partition);
-                try {
-                    recordReader.configure(configuration);
+                    throws HyracksDataException {
+        try {
+            FeedLogManager feedLogManager = null;
+            if (isFeed) {
+                feedLogManager = FeedUtils.getFeedLogManager(ctx, partition, feedLogFileSplits);
+            }
+            switch (dataSourceFactory.getDataSourceType()) {
+                case RECORDS:
+                    IRecordReaderFactory<?> recordReaderFactory = (IRecordReaderFactory<?>) dataSourceFactory;
+                    IRecordReader<?> recordReader = recordReaderFactory.createRecordReader(ctx, partition);
                     IRecordDataParserFactory<?> recordParserFactory = (IRecordDataParserFactory<?>) dataParserFactory;
                     IRecordDataParser<?> dataParser = recordParserFactory.createRecordParser(ctx);
-                    dataParser.configure(configuration, recordType);
                     if (indexingOp) {
-                        recordDataFlowController = new IndexingDataFlowController(dataParser, recordReader);
+                        return new IndexingDataFlowController(ctx,
+                                DataflowUtils.getTupleForwarder(configuration, feedLogManager), dataParser,
+                                recordReader, ((IIndexingDatasource) recordReader).getIndexer());
                     } else if (isFeed) {
-                        recordDataFlowController = new FeedRecordDataFlowController(feedLogManager, dataParser,
-                                recordReader);
+                        FeedTupleForwarder tupleForwarder = (FeedTupleForwarder) DataflowUtils
+                                .getTupleForwarder(configuration, feedLogManager);
+                        boolean isChangeFeed = ExternalDataUtils.isChangeFeed(configuration);
+                        boolean isRecordWithMeta = ExternalDataUtils.isRecordWithMeta(configuration);
+                        if (isRecordWithMeta) {
+                            if (isChangeFeed) {
+                                int numOfKeys = ExternalDataUtils.getNumberOfKeys(configuration);
+                                return new ChangeFeedWithMetaDataFlowController(ctx, tupleForwarder, feedLogManager,
+                                        numOfKeys + 2, (RecordWithMetadataParser) dataParser, recordReader);
+                            } else {
+                                return new FeedWithMetaDataFlowController(ctx, tupleForwarder, feedLogManager, 2,
+                                        (RecordWithMetadataParser) dataParser, recordReader);
+                            }
+                        } else if (isChangeFeed) {
+                            int numOfKeys = ExternalDataUtils.getNumberOfKeys(configuration);
+                            return new ChangeFeedDataFlowController(ctx, tupleForwarder, feedLogManager, numOfKeys + 1,
+                                    (IRecordWithPKDataParser) dataParser, recordReader);
+                        } else {
+                            return new FeedRecordDataFlowController(ctx, tupleForwarder, feedLogManager, 1, dataParser,
+                                    recordReader);
+                        }
                     } else {
-                        recordDataFlowController = new RecordDataFlowController(dataParser, recordReader);
+                        return new RecordDataFlowController(ctx,
+                                DataflowUtils.getTupleForwarder(configuration, feedLogManager), dataParser,
+                                recordReader, 1);
                     }
-                    recordDataFlowController.configure(configuration, ctx);
-                    recordDataFlowController
-                            .setTupleForwarder(DataflowUtils.getTupleForwarder(configuration, feedLogManager));
-                    return recordDataFlowController;
-                } catch (Exception e) {
-                    recordReader.close();
-                    throw e;
-                }
-            case STREAM:
-                IStreamFlowController streamDataFlowController = null;
-                if (isFeed) {
-                    streamDataFlowController = new FeedStreamDataFlowController(feedLogManager);
-                } else {
-                    streamDataFlowController = new StreamDataFlowController();
-                }
-                streamDataFlowController.configure(configuration, ctx);
-                streamDataFlowController
-                        .setTupleForwarder(DataflowUtils.getTupleForwarder(configuration, feedLogManager));
-                IInputStreamProviderFactory streamProviderFactory = (IInputStreamProviderFactory) dataSourceFactory;
-                streamProviderFactory.configure(configuration);
-                IInputStreamProvider streamProvider = streamProviderFactory.createInputStreamProvider(ctx, partition);
-                streamProvider.setFeedLogManager(feedLogManager);
-                streamProvider.configure(configuration);
-                IStreamDataParserFactory streamParserFactory = (IStreamDataParserFactory) dataParserFactory;
-                streamParserFactory.configure(configuration);
-                IStreamDataParser streamParser = streamParserFactory.createInputStreamParser(ctx, partition);
-                streamParser.configure(configuration, recordType);
-                AInputStream inputStream = streamProvider.getInputStream();
-                streamParser.setInputStream(inputStream);
-                streamDataFlowController.setStreamParser(streamParser);
-                return streamDataFlowController;
-            default:
-                throw new AsterixException("Unknown data source type: " + dataSourceFactory.getDataSourceType());
+                case STREAM:
+                    IInputStreamProviderFactory streamProviderFactory = (IInputStreamProviderFactory) dataSourceFactory;
+                    IInputStreamProvider streamProvider = streamProviderFactory.createInputStreamProvider(ctx,
+                            partition);
+                    IStreamDataParserFactory streamParserFactory = (IStreamDataParserFactory) dataParserFactory;
+                    IStreamDataParser streamParser = streamParserFactory.createInputStreamParser(ctx, partition);
+                    AInputStream inputStream = streamProvider.getInputStream();
+                    streamParser.setInputStream(inputStream);
+                    if (isFeed) {
+                        return new FeedStreamDataFlowController(ctx,
+                                (FeedTupleForwarder) DataflowUtils.getTupleForwarder(configuration, feedLogManager),
+                                feedLogManager, FeedUtils.getNumOfFields(configuration), streamParser, inputStream);
+                    } else {
+                        return new StreamDataFlowController(ctx, DataflowUtils.getTupleForwarder(configuration, null),
+                                streamParser);
+                    }
+                default:
+                    throw new HyracksDataException(
+                            "Unknown data source type: " + dataSourceFactory.getDataSourceType());
+            }
+        } catch (IOException | AsterixException e) {
+            throw new HyracksDataException(e);
         }
     }
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
index 0d65f72..e9307e5 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
@@ -22,16 +22,19 @@ import java.util.Map;
 
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.api.IExternalDataSourceFactory;
+import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 import org.apache.asterix.external.api.IInputStreamProviderFactory;
 import org.apache.asterix.external.api.IRecordReaderFactory;
 import org.apache.asterix.external.input.HDFSDataSourceFactory;
-import org.apache.asterix.external.input.record.reader.couchbase.CouchbaseReaderFactory;
+import org.apache.asterix.external.input.record.reader.RecordWithPKTestReaderFactory;
+import org.apache.asterix.external.input.record.reader.kv.KVReaderFactory;
+import org.apache.asterix.external.input.record.reader.kv.KVTestReaderFactory;
 import org.apache.asterix.external.input.record.reader.stream.EmptyLineSeparatedRecordReaderFactory;
 import org.apache.asterix.external.input.record.reader.stream.LineRecordReaderFactory;
 import org.apache.asterix.external.input.record.reader.stream.SemiStructuredRecordReaderFactory;
 import org.apache.asterix.external.input.record.reader.twitter.TwitterRecordReaderFactory;
 import org.apache.asterix.external.input.stream.factory.LocalFSInputStreamProviderFactory;
-import org.apache.asterix.external.input.stream.factory.SocketInputStreamProviderFactory;
+import org.apache.asterix.external.input.stream.factory.SocketServerInputStreamProviderFactory;
 import org.apache.asterix.external.input.stream.factory.TwitterFirehoseStreamProviderFactory;
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.external.util.ExternalDataUtils;
@@ -39,82 +42,108 @@ import org.apache.asterix.external.util.ExternalDataUtils;
 public class DatasourceFactoryProvider {
 
     public static IExternalDataSourceFactory getExternalDataSourceFactory(Map<String, String> configuration)
-            throws Exception {
-        switch (ExternalDataUtils.getDataSourceType(configuration)) {
-            case RECORDS:
-                return DatasourceFactoryProvider.getRecordReaderFactory(configuration);
-            case STREAM:
-                return DatasourceFactoryProvider
-                        .getInputStreamFactory(configuration.get(ExternalDataConstants.KEY_STREAM), configuration);
+            throws AsterixException {
+        if (ExternalDataUtils.getDataSourceType(configuration).equals(DataSourceType.RECORDS)) {
+            String reader = configuration.get(ExternalDataConstants.KEY_READER);
+            return DatasourceFactoryProvider.getRecordReaderFactory(reader, configuration);
+        } else {
+            // get stream source
+            String streamSource = configuration.get(ExternalDataConstants.KEY_STREAM_SOURCE);
+            return DatasourceFactoryProvider.getInputStreamFactory(streamSource, configuration);
         }
-        return null;
     }
 
-    public static IInputStreamProviderFactory getInputStreamFactory(String stream, Map<String, String> configuration)
-            throws Exception {
-        IInputStreamProviderFactory streamFactory;
-        if (ExternalDataUtils.isExternal(stream)) {
+    public static IInputStreamProviderFactory getInputStreamFactory(String streamSource,
+            Map<String, String> configuration) throws AsterixException {
+        IInputStreamProviderFactory streamSourceFactory;
+        if (ExternalDataUtils.isExternal(streamSource)) {
             String dataverse = ExternalDataUtils.getDataverse(configuration);
-            streamFactory = ExternalDataUtils.createExternalInputStreamFactory(dataverse, stream);
+            streamSourceFactory = ExternalDataUtils.createExternalInputStreamFactory(dataverse, streamSource);
         } else {
-            switch (stream) {
+            switch (streamSource) {
                 case ExternalDataConstants.STREAM_HDFS:
-                    streamFactory = new HDFSDataSourceFactory();
+                    streamSourceFactory = new HDFSDataSourceFactory();
                     break;
                 case ExternalDataConstants.STREAM_LOCAL_FILESYSTEM:
-                    streamFactory = new LocalFSInputStreamProviderFactory();
+                    streamSourceFactory = new LocalFSInputStreamProviderFactory();
                     break;
                 case ExternalDataConstants.STREAM_SOCKET:
-                    streamFactory = new SocketInputStreamProviderFactory();
+                case ExternalDataConstants.ALIAS_SOCKET_ADAPTER:
+                    streamSourceFactory = new SocketServerInputStreamProviderFactory();
+                    break;
+                case ExternalDataConstants.STREAM_SOCKET_CLIENT:
+                    streamSourceFactory = new SocketServerInputStreamProviderFactory();
                     break;
                 case ExternalDataConstants.ALIAS_TWITTER_FIREHOSE_ADAPTER:
-                    streamFactory = new TwitterFirehoseStreamProviderFactory();
+                    streamSourceFactory = new TwitterFirehoseStreamProviderFactory();
                     break;
                 default:
                     throw new AsterixException("unknown input stream factory");
             }
         }
-        return streamFactory;
+        return streamSourceFactory;
     }
 
-    public static IRecordReaderFactory<?> getRecordReaderFactory(Map<String, String> configuration) throws Exception {
-        String reader = configuration.get(ExternalDataConstants.KEY_READER);
-        IRecordReaderFactory<?> readerFactory;
-        if (ExternalDataUtils.isExternal(reader)) {
-            String dataverse = ExternalDataUtils.getDataverse(configuration);
-            readerFactory = ExternalDataUtils.createExternalRecordReaderFactory(dataverse, reader);
-        } else {
-            switch (reader) {
-                case ExternalDataConstants.READER_HDFS:
-                    readerFactory = new HDFSDataSourceFactory();
-                    break;
-                case ExternalDataConstants.READER_ADM:
-                case ExternalDataConstants.READER_SEMISTRUCTURED:
-                    readerFactory = new SemiStructuredRecordReaderFactory()
-                            .setInputStreamFactoryProvider(DatasourceFactoryProvider.getInputStreamFactory(
-                                    ExternalDataUtils.getRecordReaderStreamName(configuration), configuration));
-                    break;
-                case ExternalDataConstants.READER_DELIMITED:
-                    readerFactory = new LineRecordReaderFactory()
-                            .setInputStreamFactoryProvider(DatasourceFactoryProvider.getInputStreamFactory(
-                                    ExternalDataUtils.getRecordReaderStreamName(configuration), configuration));;
-                    break;
-                case ExternalDataConstants.READER_TWITTER_PULL:
-                case ExternalDataConstants.READER_TWITTER_PUSH:
-                    readerFactory = new TwitterRecordReaderFactory();
-                    break;
-                case ExternalDataConstants.READER_COUCHBASE:
-                    readerFactory = new CouchbaseReaderFactory();
-                    break;
-                case ExternalDataConstants.READER_LINE_SEPARATED:
-                    readerFactory = new EmptyLineSeparatedRecordReaderFactory()
-                            .setInputStreamFactoryProvider(DatasourceFactoryProvider.getInputStreamFactory(
-                                    ExternalDataUtils.getRecordReaderStreamName(configuration), configuration));
-                    break;
-                default:
-                    throw new AsterixException("unknown record reader factory: " + reader);
+    public static IRecordReaderFactory<?> getRecordReaderFactory(String reader, Map<String, String> configuration)
+            throws AsterixException {
+        if (reader.equals(ExternalDataConstants.EXTERNAL)) {
+            return ExternalDataUtils.createExternalRecordReaderFactory(configuration);
+        }
+        String parser = configuration.get(ExternalDataConstants.KEY_PARSER);
+        IInputStreamProviderFactory inputStreamFactory;
+        switch (parser) {
+            case ExternalDataConstants.FORMAT_ADM:
+            case ExternalDataConstants.FORMAT_JSON:
+            case ExternalDataConstants.FORMAT_SEMISTRUCTURED:
+                inputStreamFactory = DatasourceFactoryProvider.getInputStreamFactory(reader, configuration);
+                return new SemiStructuredRecordReaderFactory().setInputStreamFactoryProvider(inputStreamFactory);
+            case ExternalDataConstants.FORMAT_LINE_SEPARATED:
+                inputStreamFactory = DatasourceFactoryProvider.getInputStreamFactory(reader, configuration);
+                return new EmptyLineSeparatedRecordReaderFactory().setInputStreamFactoryProvider(inputStreamFactory);
+            case ExternalDataConstants.FORMAT_DELIMITED_TEXT:
+            case ExternalDataConstants.FORMAT_CSV:
+                inputStreamFactory = DatasourceFactoryProvider.getInputStreamFactory(reader, configuration);
+                return new LineRecordReaderFactory().setInputStreamFactoryProvider(inputStreamFactory);
+            case ExternalDataConstants.FORMAT_RECORD_WITH_METADATA:
+                switch (reader) {
+                    case ExternalDataConstants.READER_KV:
+                        return new KVReaderFactory();
+                    case ExternalDataConstants.READER_KV_TEST:
+                        return new KVTestReaderFactory();
+                }
+        }
+        String format = configuration.get(ExternalDataConstants.KEY_FORMAT);
+        if (format != null) {
+            switch (format) {
+                case ExternalDataConstants.FORMAT_ADM:
+                case ExternalDataConstants.FORMAT_JSON:
+                case ExternalDataConstants.FORMAT_SEMISTRUCTURED:
+                    inputStreamFactory = DatasourceFactoryProvider.getInputStreamFactory(reader, configuration);
+                    return new SemiStructuredRecordReaderFactory().setInputStreamFactoryProvider(inputStreamFactory);
+                case ExternalDataConstants.FORMAT_LINE_SEPARATED:
+                    inputStreamFactory = DatasourceFactoryProvider.getInputStreamFactory(reader, configuration);
+                    return new EmptyLineSeparatedRecordReaderFactory()
+                            .setInputStreamFactoryProvider(inputStreamFactory);
+                case ExternalDataConstants.FORMAT_DELIMITED_TEXT:
+                case ExternalDataConstants.FORMAT_CSV:
+                    inputStreamFactory = DatasourceFactoryProvider.getInputStreamFactory(reader, configuration);
+                    return new LineRecordReaderFactory().setInputStreamFactoryProvider(inputStreamFactory);
             }
         }
-        return readerFactory;
+        switch (reader) {
+            case ExternalDataConstants.READER_HDFS:
+                return new HDFSDataSourceFactory();
+            case ExternalDataConstants.READER_TWITTER_PULL:
+            case ExternalDataConstants.READER_TWITTER_PUSH:
+                return new TwitterRecordReaderFactory();
+            case ExternalDataConstants.READER_KV:
+                return new KVReaderFactory();
+            case ExternalDataConstants.READER_KV_TEST:
+                return new KVTestReaderFactory();
+            case ExternalDataConstants.TEST_RECORD_WITH_PK:
+                return new RecordWithPKTestReaderFactory();
+            default:
+                throw new AsterixException("unknown record reader factory: " + reader);
+        }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/provider/LookupReaderFactoryProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/LookupReaderFactoryProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/LookupReaderFactoryProvider.java
index 18b9cb5..a131ae7 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/LookupReaderFactoryProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/LookupReaderFactoryProvider.java
@@ -29,7 +29,8 @@ import org.apache.asterix.external.util.HDFSUtils;
 public class LookupReaderFactoryProvider {
 
     @SuppressWarnings("rawtypes")
-    public static ILookupReaderFactory getLookupReaderFactory(Map<String, String> configuration) throws Exception {
+    public static ILookupReaderFactory getLookupReaderFactory(Map<String, String> configuration)
+            throws AsterixException {
         String inputFormat = HDFSUtils.getInputFormatClassName(configuration);
         if (inputFormat.equals(ExternalDataConstants.CLASS_NAME_TEXT_INPUT_FORMAT)
                 || inputFormat.equals(ExternalDataConstants.CLASS_NAME_SEQUENCE_INPUT_FORMAT)

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/provider/ParserFactoryProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/ParserFactoryProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/ParserFactoryProvider.java
index 30595db..06928b3 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/ParserFactoryProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/ParserFactoryProvider.java
@@ -20,49 +20,57 @@ package org.apache.asterix.external.provider;
 
 import java.util.Map;
 
+import javax.annotation.Nonnull;
+
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.api.IDataParserFactory;
-import org.apache.asterix.external.input.record.RecordWithMetadata;
 import org.apache.asterix.external.parser.factory.ADMDataParserFactory;
 import org.apache.asterix.external.parser.factory.DelimitedDataParserFactory;
 import org.apache.asterix.external.parser.factory.HiveDataParserFactory;
 import org.apache.asterix.external.parser.factory.RSSParserFactory;
 import org.apache.asterix.external.parser.factory.RecordWithMetadataParserFactory;
+import org.apache.asterix.external.parser.factory.TestRecordWithPKParserFactory;
 import org.apache.asterix.external.parser.factory.TweetParserFactory;
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.external.util.ExternalDataUtils;
 
 public class ParserFactoryProvider {
-    public static IDataParserFactory getDataParserFactory(Map<String, String> configuration)
-            throws InstantiationException, IllegalAccessException, ClassNotFoundException, AsterixException {
+    public static IDataParserFactory getDataParserFactory(Map<String, String> configuration) throws AsterixException {
         IDataParserFactory parserFactory = null;
         String parserFactoryName = configuration.get(ExternalDataConstants.KEY_DATA_PARSER);
-        if (parserFactoryName != null && ExternalDataUtils.isExternal(parserFactoryName)) {
+        if ((parserFactoryName != null) && ExternalDataUtils.isExternal(parserFactoryName)) {
             return ExternalDataUtils.createExternalParserFactory(ExternalDataUtils.getDataverse(configuration),
                     parserFactoryName);
         } else {
-            parserFactory = ParserFactoryProvider.getParserFactory(ExternalDataUtils.getRecordFormat(configuration));
+            parserFactory = ParserFactoryProvider
+                    .getDataParserFactory(ExternalDataUtils.getRecordFormat(configuration));
         }
         return parserFactory;
     }
 
-    private static IDataParserFactory getParserFactory(String recordFormat) throws AsterixException {
-        switch (recordFormat) {
+    @SuppressWarnings("rawtypes")
+    public static IDataParserFactory getDataParserFactory(@Nonnull String parser) throws AsterixException {
+        switch (parser) {
             case ExternalDataConstants.FORMAT_ADM:
             case ExternalDataConstants.FORMAT_JSON:
+            case ExternalDataConstants.FORMAT_SEMISTRUCTURED:
                 return new ADMDataParserFactory();
             case ExternalDataConstants.FORMAT_DELIMITED_TEXT:
+            case ExternalDataConstants.FORMAT_CSV:
                 return new DelimitedDataParserFactory();
             case ExternalDataConstants.FORMAT_HIVE:
+            case ExternalDataConstants.PARSER_HIVE:
                 return new HiveDataParserFactory();
             case ExternalDataConstants.FORMAT_TWEET:
                 return new TweetParserFactory();
             case ExternalDataConstants.FORMAT_RSS:
                 return new RSSParserFactory();
-            case ExternalDataConstants.FORMAT_RECORD_WITH_META:
-                return new RecordWithMetadataParserFactory<RecordWithMetadata<?>>();
+            case ExternalDataConstants.FORMAT_RECORD_WITH_METADATA:
+                return new RecordWithMetadataParserFactory();
+            case ExternalDataConstants.TEST_RECORD_WITH_PK:
+                return new TestRecordWithPKParserFactory();
             default:
-                throw new AsterixException("Unknown data format");
+                throw new AsterixException("Unknown parser " + parser);
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/provider/RecordConverterFactoryProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/RecordConverterFactoryProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/RecordConverterFactoryProvider.java
new file mode 100644
index 0000000..77e634f
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/RecordConverterFactoryProvider.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.provider;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.input.record.converter.CSVWithRecordConverterFactory;
+import org.apache.asterix.external.input.record.converter.DCPConverterFactory;
+import org.apache.asterix.external.input.record.converter.IRecordConverterFactory;
+import org.apache.asterix.external.util.ExternalDataConstants;
+
+@SuppressWarnings("rawtypes")
+public class RecordConverterFactoryProvider {
+
+    public static IRecordConverterFactory getConverterFactory(String format, String recordFormat)
+            throws AsterixException {
+        switch (recordFormat) {
+            case ExternalDataConstants.FORMAT_ADM:
+            case ExternalDataConstants.FORMAT_JSON:
+                // converter that produces records of adm/json type
+                switch (format) {
+                    case ExternalDataConstants.FORMAT_CSV:
+                    case ExternalDataConstants.FORMAT_DELIMITED_TEXT:
+                        return new CSVWithRecordConverterFactory();
+                    case ExternalDataConstants.FORMAT_DCP:
+                        return new DCPConverterFactory();
+                }
+        }
+        throw new AsterixException("Unknown Converter Factory that can convert from " + format + " to " + recordFormat);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/util/DataflowUtils.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/DataflowUtils.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/DataflowUtils.java
index cab8a69..ad945f2 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/DataflowUtils.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/DataflowUtils.java
@@ -20,7 +20,6 @@ package org.apache.asterix.external.util;
 
 import java.util.Map;
 
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.api.ITupleForwarder;
 import org.apache.asterix.external.api.ITupleForwarder.TupleForwardPolicy;
 import org.apache.asterix.external.dataflow.CounterTimerTupleForwarder;
@@ -44,8 +43,7 @@ public class DataflowUtils {
     }
 
     public static ITupleForwarder getTupleForwarder(Map<String, String> configuration, FeedLogManager feedLogManager)
-            throws AsterixException {
-        ITupleForwarder policy = null;
+            throws HyracksDataException {
         ITupleForwarder.TupleForwardPolicy policyType = null;
         String propValue = configuration.get(ITupleForwarder.FORWARD_POLICY);
         if (ExternalDataUtils.isFeed(configuration)) {
@@ -58,20 +56,15 @@ public class DataflowUtils {
         }
         switch (policyType) {
             case FEED:
-                policy = new FeedTupleForwarder(feedLogManager);
-                break;
+                return new FeedTupleForwarder(feedLogManager);
             case FRAME_FULL:
-                policy = new FrameFullTupleForwarder();
-                break;
+                return new FrameFullTupleForwarder();
             case COUNTER_TIMER_EXPIRED:
-                policy = new CounterTimerTupleForwarder();
-                break;
+                return CounterTimerTupleForwarder.create(configuration);
             case RATE_CONTROLLED:
-                policy = new RateControlledTupleForwarder();
-                break;
+                return RateControlledTupleForwarder.create(configuration);
             default:
-                throw new AsterixException("Unknown tuple forward policy");
+                throw new HyracksDataException("Unknown tuple forward policy");
         }
-        return policy;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataCompatibilityUtils.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataCompatibilityUtils.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataCompatibilityUtils.java
index 035c1c3..e222e99 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataCompatibilityUtils.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataCompatibilityUtils.java
@@ -26,7 +26,7 @@ import org.apache.asterix.external.api.IExternalDataSourceFactory;
 import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 import org.apache.asterix.external.api.IRecordDataParserFactory;
 import org.apache.asterix.external.api.IRecordReaderFactory;
-import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.external.input.record.converter.IRecordConverterFactory;
 
 public class ExternalDataCompatibilityUtils {
 
@@ -48,78 +48,25 @@ public class ExternalDataCompatibilityUtils {
         }
     }
 
-    // TODO:Add remaining aliases
-    public static void addCompatabilityParameters(String adapterName, ARecordType itemType,
-            Map<String, String> configuration) throws AsterixException {
-        // HDFS
-        if (adapterName.equals(ExternalDataConstants.ALIAS_HDFS_ADAPTER)
-                || adapterName.equalsIgnoreCase(ExternalDataConstants.ADAPTER_HDFS_CLASSNAME)) {
-            if (configuration.get(ExternalDataConstants.KEY_FORMAT) == null) {
-                throw new AsterixException("Unspecified format parameter for HDFS adapter");
-            }
-            if (configuration.get(ExternalDataConstants.KEY_FORMAT).equals(ExternalDataConstants.FORMAT_BINARY)
-                    || configuration.get(ExternalDataConstants.KEY_FORMAT).equals(ExternalDataConstants.FORMAT_HIVE)) {
-                configuration.put(ExternalDataConstants.KEY_READER, ExternalDataConstants.READER_HDFS);
-            } else {
-                configuration.put(ExternalDataConstants.KEY_READER,
-                        configuration.get(ExternalDataConstants.KEY_FORMAT));
-                configuration.put(ExternalDataConstants.KEY_READER_STREAM, ExternalDataConstants.ALIAS_HDFS_ADAPTER);
-            }
-        }
-
-        // Local Filesystem
-        if (adapterName.equals(ExternalDataConstants.ALIAS_LOCALFS_ADAPTER)
-                || adapterName.contains(ExternalDataConstants.ADAPTER_LOCALFS_CLASSNAME)
-                || adapterName.contains(ExternalDataConstants.ALIAS_LOCALFS_PUSH_ADAPTER)) {
-            if (configuration.get(ExternalDataConstants.KEY_READER) == null) {
-                if (configuration.get(ExternalDataConstants.KEY_FORMAT) == null) {
-                    // If reader is specified, we will use the selected reader. If format is
-                    // specified, we will assign a suitable reader for the format.
-                    // TODO: better error message
-                    throw new AsterixException(
-                            "Unspecified (\"reader\" or \"format\") parameter for local filesystem adapter");
-                }
-                configuration.put(ExternalDataConstants.KEY_READER,
-                        configuration.get(ExternalDataConstants.KEY_FORMAT));
-                configuration.put(ExternalDataConstants.KEY_READER_STREAM, ExternalDataConstants.ALIAS_LOCALFS_ADAPTER);
-            }
-        }
-
-        // Socket
-        if (adapterName.equalsIgnoreCase(ExternalDataConstants.ALIAS_SOCKET_ADAPTER)
-                || adapterName.equalsIgnoreCase(ExternalDataConstants.ALIAS_SOCKET_CLIENT_ADAPTER)) {
-            if (configuration.get(ExternalDataConstants.KEY_FORMAT) == null) {
-                throw new AsterixException("Unspecified format parameter for socket adapter");
-            }
-            configuration.put(ExternalDataConstants.KEY_READER, configuration.get(ExternalDataConstants.KEY_FORMAT));
-            configuration.put(ExternalDataConstants.KEY_READER_STREAM, ExternalDataConstants.STREAM_SOCKET);
-        }
-        // Twitter (Pull)
-        if (adapterName.equals(ExternalDataConstants.ALIAS_TWITTER_PULL_ADAPTER)) {
-            configuration.put(ExternalDataConstants.KEY_READER, ExternalDataConstants.READER_TWITTER_PULL);
-            configuration.put(ExternalDataConstants.KEY_PULL, ExternalDataConstants.TRUE);
-            ExternalDataUtils.setRecordFormat(configuration, ExternalDataConstants.FORMAT_TWEET);
-        }
-
-        // Twitter (Push)
-        if (adapterName.equals(ExternalDataConstants.ALIAS_TWITTER_PUSH_ADAPTER)) {
-            configuration.put(ExternalDataConstants.KEY_READER, ExternalDataConstants.READER_TWITTER_PUSH);
-            configuration.put(ExternalDataConstants.KEY_PUSH, ExternalDataConstants.TRUE);
-            ExternalDataUtils.setRecordFormat(configuration, ExternalDataConstants.FORMAT_TWEET);
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    public static void validateCompatibility(IRecordDataParserFactory recordParserFactory,
+            IRecordConverterFactory converterFactory) throws AsterixException {
+        if (!recordParserFactory.getRecordClass().isAssignableFrom(converterFactory.getOutputClass())) {
+            throw new AsterixException(
+                    "datasource converter-record parser mismatch. converter produces records of type "
+                            + converterFactory.getOutputClass() + " and parser expects records of type "
+                            + recordParserFactory.getRecordClass());
         }
+    }
 
-        // Hive Parser
-        if (configuration.get(ExternalDataConstants.KEY_PARSER) != null
-                && configuration.get(ExternalDataConstants.KEY_PARSER).equals(ExternalDataConstants.PARSER_HIVE)) {
-            configuration.put(ExternalDataConstants.KEY_PARSER, ExternalDataConstants.FORMAT_HIVE);
+    public static void prepare(String adapterName, Map<String, String> configuration) {
+        if (!configuration.containsKey(ExternalDataConstants.KEY_READER)) {
+            configuration.put(ExternalDataConstants.KEY_READER, adapterName);
         }
-
-        // FileSystem for Feed adapter
-        if (configuration.get(ExternalDataConstants.KEY_FILESYSTEM) != null) {
-            configuration.put(ExternalDataConstants.KEY_STREAM,
-                    configuration.get(ExternalDataConstants.KEY_FILESYSTEM));
-            if (adapterName.equalsIgnoreCase(ExternalDataConstants.ALIAS_FILE_FEED_ADAPTER)) {
-                configuration.put(ExternalDataConstants.KEY_WAIT_FOR_DATA, ExternalDataConstants.FALSE);
+        if (!configuration.containsKey(ExternalDataConstants.KEY_PARSER)) {
+            if (configuration.containsKey(ExternalDataConstants.KEY_FORMAT)) {
+                configuration.put(ExternalDataConstants.KEY_PARSER,
+                        configuration.get(ExternalDataConstants.KEY_FORMAT));
             }
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
index 4b2826c..0e68698 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
@@ -36,15 +36,15 @@ public class ExternalDataConstants {
     public static final String KEY_SOCKETS = "sockets";
     // specify whether the socket address points to an NC or an IP
     public static final String KEY_MODE = "address-type";
-    // specify the hdfs name node address when reading hdfs data
+    // specify the HDFS name node address when reading HDFS data
     public static final String KEY_HDFS_URL = "hdfs";
     // specify the path when reading from a file system
     public static final String KEY_PATH = "path";
-    // specify the hdfs input format when reading data from HDFS
+    // specify the HDFS input format when reading data from HDFS
     public static final String KEY_INPUT_FORMAT = "input-format";
-    // specifies the filesystem (localfs or hdfs) when using a filesystem data source
+    // specifies the filesystem (localfs or HDFS) when using a filesystem data source
     public static final String KEY_FILESYSTEM = "fs";
-    // specifies the address of the hdfs name node
+    // specifies the address of the HDFS name node
     public static final String KEY_HADOOP_FILESYSTEM_URI = "fs.defaultFS";
     // specifies the class implementation of the accessed instance of HDFS
     public static final String KEY_HADOOP_FILESYSTEM_CLASS = "fs.hdfs.impl";
@@ -59,7 +59,7 @@ public class ExternalDataConstants {
     public static final String KEY_DATA_PARSER = "parser";
     public static final String KEY_HEADER = "header";
     public static final String KEY_READER = "reader";
-    public static final String KEY_READER_STREAM = "reader-stream";
+    public static final String KEY_READER_STREAM = "stream";
     public static final String KEY_TYPE_NAME = "type-name";
     public static final String KEY_RECORD_START = "record-start";
     public static final String KEY_RECORD_END = "record-end";
@@ -83,7 +83,7 @@ public class ExternalDataConstants {
     public static final String KEY_NODES = "nodes";
     // a string representing the password used to authenticate with the external data source
     public static final String KEY_PASSWORD = "password";
-    // an integer representing the number of raw records that can be bufferred in the parsing queue
+    // an integer representing the number of raw records that can be buffered in the parsing queue
     public static final String KEY_QUEUE_SIZE = "queue-size";
     // a comma delimited integers representing the indexes of the meta fields in the raw record (i,e: "3,1,0,2" denotes that the first meta field is in index 3 in the actual record)
     public static final String KEY_META_INDEXES = "meta-indexes";
@@ -91,6 +91,16 @@ public class ExternalDataConstants {
     public static final String KEY_VALUE_INDEX = "value-index";
     // a string representing the format of the raw record in the value field in the data type
     public static final String KEY_VALUE_FORMAT = "value-format";
+    // a boolean indicating whether the feed is a change feed
+    public static final String KEY_IS_CHANGE_FEED = "change-feed";
+    // an integer representing the number of keys in a change feed
+    public static final String KEY_KEY_SIZE = "key-size";
+    // a boolean indicating whether the feed produces records with metadata
+    public static final String FORMAT_RECORD_WITH_METADATA = "record-with-metadata";
+    // a string representing the format of the record (for adapters which produces records with additional information like pk or metadata)
+    public static final String KEY_RECORD_FORMAT = "record-format";
+    public static final String KEY_META_TYPE_NAME = "meta-type-name";
+    public static final String READER_STREAM = "stream";
     /**
      * HDFS class names
      */
@@ -112,13 +122,9 @@ public class ExternalDataConstants {
      * Builtin record readers
      */
     public static final String READER_HDFS = "hdfs";
-    public static final String READER_ADM = "adm";
-    public static final String READER_COUCHBASE = "couchbase";
-    public static final String READER_SEMISTRUCTURED = "semi-structured";
-    public static final String READER_DELIMITED = "delimited-text";
+    public static final String READER_KV = "key-value";
     public static final String READER_TWITTER_PUSH = "twitter-push";
     public static final String READER_TWITTER_PULL = "twitter-pull";
-    public static final String READER_LINE_SEPARATED = "line-separated";
 
     public static final String CLUSTER_LOCATIONS = "cluster-locations";
     public static final String SCHEDULER = "hdfs-scheduler";
@@ -138,9 +144,12 @@ public class ExternalDataConstants {
     public static final String FORMAT_ADM = "adm";
     public static final String FORMAT_JSON = "json";
     public static final String FORMAT_DELIMITED_TEXT = "delimited-text";
-    public static final String FORMAT_TWEET = "tweet";
+    public static final String FORMAT_TWEET = "twitter-status";
     public static final String FORMAT_RSS = "rss";
-    public static final String FORMAT_RECORD_WITH_META = "record-with-meta";
+    public static final String FORMAT_SEMISTRUCTURED = "semi-structured";
+    public static final String FORMAT_LINE_SEPARATED = "line-separated";
+    public static final String FORMAT_HDFS_WRITABLE = "hdfs-writable";
+    public static final String FORMAT_KV = "kv";
 
     /**
      * input streams
@@ -148,6 +157,7 @@ public class ExternalDataConstants {
     public static final String STREAM_HDFS = "hdfs";
     public static final String STREAM_LOCAL_FILESYSTEM = "localfs";
     public static final String STREAM_SOCKET = "socket";
+    public static final String STREAM_SOCKET_CLIENT = "socket-client";
 
     /**
      * adapter aliases
@@ -164,12 +174,10 @@ public class ExternalDataConstants {
     public static final String ALIAS_TWITTER_PUSH_ADAPTER = "push_twitter";
     public static final String ALIAS_TWITTER_PULL_ADAPTER = "pull_twitter";
     public static final String ALIAS_CNN_ADAPTER = "cnn_feed";
-
-    /**
-     * For backward compatability
-     */
-    public static final String ADAPTER_LOCALFS_CLASSNAME = "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter";
-    public static final String ADAPTER_HDFS_CLASSNAME = "org.apache.asterix.external.dataset.adapter.HDFSAdapter";
+    public static final String ALIAS_FEED_WITH_META_ADAPTER = "feed_with_meta";
+    public static final String ALIAS_CHANGE_FEED_WITH_META_ADAPTER = "change_feed_with_meta";
+    // for testing purposes
+    public static final String ALIAS_TEST_CHANGE_ADAPTER = "test_change_feed";
 
     /**
      * Constant String values
@@ -206,4 +214,16 @@ public class ExternalDataConstants {
      */
     public static final String PARAMETER_OF_SIZE_ONE = "Value of size 1";
     public static final String LARGE_RECORD_ERROR_MESSAGE = "Record is too large";
+    public static final String KEY_RECORD_INDEX = "record-index";
+    public static final String FORMAT_DCP = "dcp";
+    public static final String KEY_KEY_INDEXES = "key-indexes";
+    public static final String KEY_KEY_INDICATORS = "key-indicators";
+    public static final String KEY_STREAM_SOURCE = "stream-source";
+    public static final String EXTERNAL = "external";
+    public static final String KEY_READER_FACTORY = "reader-factory";
+    public static final String READER_KV_TEST = "kv_test";
+    public static final String READER_RSS = "rss";
+    public static final String FORMAT_CSV = "csv";
+    public static final String TEST_RECORD_WITH_PK = "test-record-with-pk";
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
index c36b629..32139f1 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
@@ -79,22 +79,29 @@ public class ExternalDataUtils {
         return Boolean.parseBoolean(configuration.get(ExternalDataConstants.KEY_HEADER));
     }
 
-    public static DataSourceType getDataSourceType(Map<String, String> configuration) throws AsterixException {
-        if (isDataSourceStreamProvider(configuration)) {
+    public static void validateParameters(Map<String, String> configuration) throws AsterixException {
+        String reader = configuration.get(ExternalDataConstants.KEY_READER);
+        if (reader == null) {
+            throw new AsterixException("The parameter " + ExternalDataConstants.KEY_READER + " must be specified.");
+        }
+        String parser = configuration.get(ExternalDataConstants.KEY_PARSER);
+        if (parser == null) {
+            throw new AsterixException("The parameter " + ExternalDataConstants.KEY_PARSER + " must be specified.");
+        }
+    }
+
+    public static DataSourceType getDataSourceType(Map<String, String> configuration) {
+        String reader = configuration.get(ExternalDataConstants.KEY_READER);
+        if ((reader != null) && reader.equals(ExternalDataConstants.READER_STREAM)) {
             return DataSourceType.STREAM;
-        } else if (isDataSourceRecordReader(configuration)) {
-            return DataSourceType.RECORDS;
         } else {
-            throw new AsterixException(
-                    "unable to determine whether input is a stream provider or a record reader. parameters: "
-                            + ExternalDataConstants.KEY_STREAM + " or " + ExternalDataConstants.KEY_READER
-                            + " must be specified");
+            return DataSourceType.RECORDS;
         }
     }
 
     public static boolean isExternal(String aString) {
-        return (aString != null && aString.contains(ExternalDataConstants.EXTERNAL_LIBRARY_SEPARATOR)
-                && aString.trim().length() > 1);
+        return ((aString != null) && aString.contains(ExternalDataConstants.EXTERNAL_LIBRARY_SEPARATOR)
+                && (aString.trim().length() > 1));
     }
 
     public static ClassLoader getClassLoader(String dataverse, String library) {
@@ -110,25 +117,21 @@ public class ExternalDataUtils {
     }
 
     public static IInputStreamProviderFactory createExternalInputStreamFactory(String dataverse, String stream)
-            throws InstantiationException, IllegalAccessException, ClassNotFoundException {
-        String libraryName = getLibraryName(stream);
-        String className = getExternalClassName(stream);
-        ClassLoader classLoader = getClassLoader(dataverse, libraryName);
-        return ((IInputStreamProviderFactory) (classLoader.loadClass(className).newInstance()));
+            throws AsterixException {
+        try {
+            String libraryName = getLibraryName(stream);
+            String className = getExternalClassName(stream);
+            ClassLoader classLoader = getClassLoader(dataverse, libraryName);
+            return ((IInputStreamProviderFactory) (classLoader.loadClass(className).newInstance()));
+        } catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
+            throw new AsterixException("Failed to create stream factory", e);
+        }
     }
 
     public static String getDataverse(Map<String, String> configuration) {
         return configuration.get(ExternalDataConstants.KEY_DATAVERSE);
     }
 
-    public static boolean isDataSourceStreamProvider(Map<String, String> configuration) {
-        return configuration.containsKey(ExternalDataConstants.KEY_STREAM);
-    }
-
-    private static boolean isDataSourceRecordReader(Map<String, String> configuration) {
-        return configuration.containsKey(ExternalDataConstants.KEY_READER);
-    }
-
     public static String getRecordFormat(Map<String, String> configuration) {
         String parserFormat = configuration.get(ExternalDataConstants.KEY_DATA_PARSER);
         return parserFormat != null ? parserFormat : configuration.get(ExternalDataConstants.KEY_FORMAT);
@@ -162,7 +165,7 @@ public class ExternalDataUtils {
             ATypeTag tag = null;
             if (recordType.getFieldTypes()[i].getTypeTag() == ATypeTag.UNION) {
                 List<IAType> unionTypes = ((AUnionType) recordType.getFieldTypes()[i]).getUnionList();
-                if (unionTypes.size() != 2 && unionTypes.get(0).getTypeTag() != ATypeTag.NULL) {
+                if ((unionTypes.size() != 2) && (unionTypes.get(0).getTypeTag() != ATypeTag.NULL)) {
                     throw new NotImplementedException("Non-optional UNION type is not supported.");
                 }
                 tag = unionTypes.get(1).getTypeTag();
@@ -213,24 +216,46 @@ public class ExternalDataUtils {
         return Boolean.parseBoolean(push);
     }
 
-    public static IRecordReaderFactory<?> createExternalRecordReaderFactory(String dataverse, String reader)
-            throws InstantiationException, IllegalAccessException, ClassNotFoundException {
-        String library = reader.substring(0, reader.indexOf(ExternalDataConstants.EXTERNAL_LIBRARY_SEPARATOR));
-        ClassLoader classLoader = ExternalLibraryManager.getLibraryClassLoader(dataverse, library);
-        return (IRecordReaderFactory<?>) classLoader
-                .loadClass(reader.substring(reader.indexOf(ExternalDataConstants.EXTERNAL_LIBRARY_SEPARATOR) + 1))
-                .newInstance();
+    public static IRecordReaderFactory<?> createExternalRecordReaderFactory(Map<String, String> configuration)
+            throws AsterixException {
+        String readerFactory = configuration.get(ExternalDataConstants.KEY_READER_FACTORY);
+        if (readerFactory == null) {
+            throw new AsterixException("to use " + ExternalDataConstants.EXTERNAL + " reader, the parameter "
+                    + ExternalDataConstants.KEY_READER_FACTORY + " must be specified.");
+        }
+        String[] libraryAndFactory = readerFactory.split(ExternalDataConstants.EXTERNAL_LIBRARY_SEPARATOR);
+        if (libraryAndFactory.length != 2) {
+            throw new AsterixException("The parameter " + ExternalDataConstants.KEY_READER_FACTORY
+                    + " must follow the format \"DataverseName.LibraryName#ReaderFactoryFullyQualifiedName\"");
+        }
+        String[] dataverseAndLibrary = libraryAndFactory[0].split(".");
+        if (dataverseAndLibrary.length != 2) {
+            throw new AsterixException("The parameter " + ExternalDataConstants.KEY_READER_FACTORY
+                    + " must follow the format \"DataverseName.LibraryName#ReaderFactoryFullyQualifiedName\"");
+        }
+
+        ClassLoader classLoader = ExternalLibraryManager.getLibraryClassLoader(dataverseAndLibrary[0],
+                dataverseAndLibrary[1]);
+        try {
+            return (IRecordReaderFactory<?>) classLoader.loadClass(libraryAndFactory[1]).newInstance();
+        } catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
+            throw new AsterixException("Failed to create record reader factory", e);
+        }
     }
 
     public static IDataParserFactory createExternalParserFactory(String dataverse, String parserFactoryName)
-            throws InstantiationException, IllegalAccessException, ClassNotFoundException {
-        String library = parserFactoryName.substring(0,
-                parserFactoryName.indexOf(ExternalDataConstants.EXTERNAL_LIBRARY_SEPARATOR));
-        ClassLoader classLoader = ExternalLibraryManager.getLibraryClassLoader(dataverse, library);
-        return (IDataParserFactory) classLoader
-                .loadClass(parserFactoryName
-                        .substring(parserFactoryName.indexOf(ExternalDataConstants.EXTERNAL_LIBRARY_SEPARATOR) + 1))
-                .newInstance();
+            throws AsterixException {
+        try {
+            String library = parserFactoryName.substring(0,
+                    parserFactoryName.indexOf(ExternalDataConstants.EXTERNAL_LIBRARY_SEPARATOR));
+            ClassLoader classLoader = ExternalLibraryManager.getLibraryClassLoader(dataverse, library);
+            return (IDataParserFactory) classLoader
+                    .loadClass(parserFactoryName
+                            .substring(parserFactoryName.indexOf(ExternalDataConstants.EXTERNAL_LIBRARY_SEPARATOR) + 1))
+                    .newInstance();
+        } catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
+            throw new AsterixException("Failed to create an external parser factory", e);
+        }
     }
 
     public static boolean isFeed(Map<String, String> configuration) {
@@ -265,4 +290,53 @@ public class ExternalDataUtils {
                 ? Integer.parseInt(configuration.get(ExternalDataConstants.KEY_QUEUE_SIZE))
                 : ExternalDataConstants.DEFAULT_QUEUE_SIZE;
     }
+
+    public static boolean isRecordWithMeta(Map<String, String> configuration) {
+        return configuration.containsKey(ExternalDataConstants.KEY_META_TYPE_NAME);
+    }
+
+    public static void setRecordWithMeta(Map<String, String> configuration, String booleanString) {
+        configuration.put(ExternalDataConstants.FORMAT_RECORD_WITH_METADATA, booleanString);
+    }
+
+    public static boolean isChangeFeed(Map<String, String> configuration) {
+        return Boolean.parseBoolean(configuration.get(ExternalDataConstants.KEY_IS_CHANGE_FEED));
+    }
+
+    public static int getNumberOfKeys(Map<String, String> configuration) throws AsterixException {
+        String keyIndexes = configuration.get(ExternalDataConstants.KEY_KEY_INDEXES);
+        if (keyIndexes == null) {
+            throw new AsterixException(
+                    "A change feed must have the parameter " + ExternalDataConstants.KEY_KEY_INDEXES);
+        }
+        return keyIndexes.split(",").length;
+    }
+
+    public static void setNumberOfKeys(Map<String, String> configuration, int value) {
+        configuration.put(ExternalDataConstants.KEY_KEY_SIZE, String.valueOf(value));
+    }
+
+    public static void setChangeFeed(Map<String, String> configuration, String booleanString) {
+        configuration.put(ExternalDataConstants.KEY_IS_CHANGE_FEED, booleanString);
+    }
+
+    public static int[] getPKIndexes(Map<String, String> configuration) {
+        String keyIndexes = configuration.get(ExternalDataConstants.KEY_KEY_INDEXES);
+        String[] stringIndexes = keyIndexes.split(",");
+        int[] intIndexes = new int[stringIndexes.length];
+        for (int i = 0; i < stringIndexes.length; i++) {
+            intIndexes[i] = Integer.parseInt(stringIndexes[i]);
+        }
+        return intIndexes;
+    }
+
+    public static int[] getPKSourceIndicators(Map<String, String> configuration) {
+        String keyIndicators = configuration.get(ExternalDataConstants.KEY_KEY_INDICATORS);
+        String[] stringIndicators = keyIndicators.split(",");
+        int[] intIndicators = new int[stringIndicators.length];
+        for (int i = 0; i < stringIndicators.length; i++) {
+            intIndicators[i] = Integer.parseInt(stringIndicators[i]);
+        }
+        return intIndicators;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedLogManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedLogManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedLogManager.java
index 4737727..fc15d3c 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedLogManager.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedLogManager.java
@@ -30,15 +30,16 @@ import java.nio.file.StandardOpenOption;
 import java.util.TreeSet;
 
 import org.apache.commons.io.FileUtils;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public class FeedLogManager {
 
     public enum LogEntryType {
-        START,      // partition start
-        END,        // partition end
-        COMMIT,     // a record commit within a partition
-        SNAPSHOT    // an identifier that partitions with identifiers before this one should be
-                    // ignored
+        START, // partition start
+        END, // partition end
+        COMMIT, // a record commit within a partition
+        SNAPSHOT // an identifier that partitions with identifiers before this one should be
+                 // ignored
     }
 
     public static final String PROGRESS_LOG_FILE_NAME = "progress.log";
@@ -48,20 +49,24 @@ public class FeedLogManager {
     public static final String END_PREFIX = "e:";
     public static final int PREFIX_SIZE = 2;
     private String currentPartition;
-    private TreeSet<String> completed;
-    private Path dir;
+    private final TreeSet<String> completed;
+    private final Path dir;
     private BufferedWriter progressLogger;
     private BufferedWriter errorLogger;
     private BufferedWriter recordLogger;
-    private StringBuilder stringBuilder = new StringBuilder();
-
-    public FeedLogManager(File file) throws IOException {
-        this.dir = file.toPath();
-        this.completed = new TreeSet<String>();
-        if (!exists()) {
-            create();
+    private final StringBuilder stringBuilder = new StringBuilder();
+
+    public FeedLogManager(File file) throws HyracksDataException {
+        try {
+            this.dir = file.toPath();
+            this.completed = new TreeSet<String>();
+            if (!exists()) {
+                create();
+            }
+            open();
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
         }
-        open();
     }
 
     public void endPartition() throws IOException {


[18/19] incubator-asterixdb git commit: Support Change Feeds and Ingestion of Records with MetaData

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/pom.xml
----------------------------------------------------------------------
diff --git a/asterix-app/pom.xml b/asterix-app/pom.xml
index 09a4c4c..b4a1a40 100644
--- a/asterix-app/pom.xml
+++ b/asterix-app/pom.xml
@@ -95,29 +95,48 @@
                 </executions>
             </plugin>
             <plugin>
-    <artifactId>maven-resources-plugin</artifactId>
-    <executions>
-        <execution>
-            <id>copy-external-library</id>
-            <phase>generate-resources</phase>
-            <goals>
-                <goal>copy-resources</goal>
-            </goals>
-            <configuration>
-                <outputDirectory>src/test/resources/externallib</outputDirectory>
-                <overwrite>true</overwrite>
-                <resources>
-                    <resource>
-                        <directory>../asterix-external-data/target</directory>
-                        <includes>
-                            <include>testlib-zip-binary-assembly.zip</include>
-                        </includes>
-                    </resource>
-                </resources>
-            </configuration>
-        </execution>
-    </executions>
-</plugin>
+                <artifactId>maven-resources-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>copy-external-library</id>
+                        <phase>generate-resources</phase>
+                        <goals>
+                            <goal>copy-resources</goal>
+                        </goals>
+                        <configuration>
+                            <outputDirectory>src/test/resources/externallib</outputDirectory>
+                            <overwrite>true</overwrite>
+                            <resources>
+                                <resource>
+                                    <directory>../asterix-external-data/target</directory>
+                                    <includes>
+                                        <include>testlib-zip-binary-assembly.zip</include>
+                                    </includes>
+                                </resource>
+                            </resources>
+                        </configuration>
+                    </execution>
+                    <execution>
+                        <id>copy-beer-csv</id>
+                        <phase>generate-resources</phase>
+                        <goals>
+                            <goal>copy-resources</goal>
+                        </goals>
+                        <configuration>
+                            <outputDirectory>data/csv</outputDirectory>
+                            <overwrite>true</overwrite>
+                            <resources>
+                                <resource>
+                                    <directory>../asterix-external-data/src/test/resources</directory>
+                                    <includes>
+                                        <include>beer.csv</include>
+                                    </includes>
+                                </resource>
+                            </resources>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
         </plugins>
     </build>
     <dependencies>

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalIndexingOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalIndexingOperations.java b/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalIndexingOperations.java
index 2f497f9..2b2aa90 100644
--- a/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalIndexingOperations.java
+++ b/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalIndexingOperations.java
@@ -142,7 +142,7 @@ public class ExternalIndexingOperations {
 
     public static IBinaryComparatorFactory[] getComparatorFactories(Dataset dataset) {
         ExternalDatasetDetails dsd = ((ExternalDatasetDetails) dataset.getDatasetDetails());
-        return IndexingConstants.getComparatorFactories((dsd.getProperties().get(IndexingConstants.KEY_INPUT_FORMAT)));
+        return IndexingConstants.getComparatorFactories(dsd.getProperties().get(IndexingConstants.KEY_INPUT_FORMAT));
     }
 
     public static IBinaryComparatorFactory[] getBuddyBtreeComparatorFactories() {
@@ -266,8 +266,8 @@ public class ExternalIndexingOperations {
             RecordDescriptor indexerDesc, AqlMetadataProvider metadataProvider) throws Exception {
         ExternalDatasetDetails externalDatasetDetails = (ExternalDatasetDetails) dataset.getDatasetDetails();
         Map<String, String> configuration = externalDatasetDetails.getProperties();
-        IAdapterFactory adapterFactory = AdapterFactoryProvider.getAdapterFactory(externalDatasetDetails.getAdapter(),
-                configuration, (ARecordType) itemType, files, true);
+        IAdapterFactory adapterFactory = AdapterFactoryProvider.getIndexingAdapterFactory(
+                externalDatasetDetails.getAdapter(), configuration, (ARecordType) itemType, files, true, null);
         return new Pair<ExternalDataScanOperatorDescriptor, AlgebricksPartitionConstraint>(
                 new ExternalDataScanOperatorDescriptor(jobSpec, indexerDesc, adapterFactory),
                 adapterFactory.getPartitionConstraint());

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java b/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java
index 362be2e..6b54fbc 100644
--- a/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java
+++ b/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java
@@ -2401,7 +2401,7 @@ public class QueryTranslator extends AbstractLangTranslator {
         bfs.initialize(metadataProvider.getMetadataTxnContext());
 
         CompiledSubscribeFeedStatement csfs = new CompiledSubscribeFeedStatement(bfs.getSubscriptionRequest(),
-                bfs.getQuery(), bfs.getVarCounter());
+                bfs.getVarCounter());
         metadataProvider.getConfig().put(FunctionUtil.IMPORT_PRIVATE_FUNCTIONS, "" + Boolean.TRUE);
         metadataProvider.getConfig().put(FeedActivityDetails.FEED_POLICY_NAME, "" + bfs.getPolicy());
         metadataProvider.getConfig().put(FeedActivityDetails.COLLECT_LOCATIONS,

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java b/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
index 85ff6be..06d2b71 100644
--- a/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
+++ b/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
@@ -151,10 +151,16 @@ public class DatasetOperations {
         boolean temp = dataset.getDatasetDetails().isTemp();
         ARecordType itemType = (ARecordType) metadata.findType(dataset.getItemTypeDataverseName(),
                 dataset.getItemTypeName());
+        // get meta item type
+        ARecordType metaItemType = null;
+        if (dataset.hasMetaPart()) {
+            metaItemType = (ARecordType) metadata.findType(dataset.getMetaItemTypeDataverseName(),
+                    dataset.getMetaItemTypeName());
+        }
         JobSpecification spec = JobSpecificationUtils.createJobSpecification();
         IBinaryComparatorFactory[] comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(dataset,
                 itemType, format.getBinaryComparatorFactoryProvider());
-        ITypeTraits[] typeTraits = DatasetUtils.computeTupleTypeTraits(dataset, itemType);
+        ITypeTraits[] typeTraits = DatasetUtils.computeTupleTypeTraits(dataset, itemType, metaItemType);
         int[] bloomFilterKeyFields = DatasetUtils.createBloomFilterKeyFields(dataset);
 
         ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, itemType);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java b/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java
index 60a543c..65f71b5 100644
--- a/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java
+++ b/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java
@@ -71,11 +71,8 @@ public class OptimizerTest {
 
     @BeforeClass
     public static void setUp() throws Exception {
-        // File outdir = new File(PATH_ACTUAL);
-        // outdir.mkdirs();
-
         System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, TEST_CONFIG_FILE_NAME);
-        File outdir = new File(PATH_ACTUAL);
+        final File outdir = new File(PATH_ACTUAL);
         outdir.mkdirs();
 
         HDFSCluster.getInstance().setup();
@@ -89,7 +86,6 @@ public class OptimizerTest {
 
     @AfterClass
     public static void tearDown() throws Exception {
-        // _bootstrap.stop();
         File outdir = new File(PATH_ACTUAL);
         File[] files = outdir.listFiles();
         if (files == null || files.length == 0) {
@@ -108,9 +104,7 @@ public class OptimizerTest {
                 suiteBuildPerFile(innerfile, testArgs, subdir);
             }
         }
-        if (file.isFile() && file.getName().endsWith(EXTENSION_QUERY)
-        // && !ignore.contains(path + file.getName())
-        ) {
+        if (file.isFile() && file.getName().endsWith(EXTENSION_QUERY)) {
             String resultFileName = AsterixTestHelper.extToResExt(file.getName(), EXTENSION_RESULT);
             File expectedFile = new File(PATH_EXPECTED + path + resultFileName);
             File actualFile = new File(PATH_ACTUAL + SEPARATOR + path.replace(SEPARATOR, "_") + resultFileName);
@@ -132,11 +126,11 @@ public class OptimizerTest {
         return testArgs;
     }
 
-    private File actualFile;
-    private File expectedFile;
-    private File queryFile;
+    private final File actualFile;
+    private final File expectedFile;
+    private final File queryFile;
 
-    public OptimizerTest(File queryFile, File expectedFile, File actualFile) {
+    public OptimizerTest(final File queryFile, final File expectedFile, final File actualFile) {
         this.queryFile = queryFile;
         this.expectedFile = expectedFile;
         this.actualFile = actualFile;
@@ -186,7 +180,6 @@ public class OptimizerTest {
             try {
                 while ((lineExpected = readerExpected.readLine()) != null) {
                     lineActual = readerActual.readLine();
-                    // Assert.assertEquals(lineExpected, lineActual);
                     if (lineActual == null) {
                         throw new Exception("Result for " + queryFile + " changed at line " + num + ":\n< "
                                 + lineExpected + "\n> ");
@@ -198,7 +191,6 @@ public class OptimizerTest {
                     ++num;
                 }
                 lineActual = readerActual.readLine();
-                // Assert.assertEquals(null, lineActual);
                 if (lineActual != null) {
                     throw new Exception(
                             "Result for " + queryFile + " changed at line " + num + ":\n< \n> " + lineActual);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/optimizerts/results/disjunction-to-join-delete-3.plan
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/optimizerts/results/disjunction-to-join-delete-3.plan b/asterix-app/src/test/resources/optimizerts/results/disjunction-to-join-delete-3.plan
index 37ccaf2..bca0381 100644
--- a/asterix-app/src/test/resources/optimizerts/results/disjunction-to-join-delete-3.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/disjunction-to-join-delete-3.plan
@@ -9,7 +9,7 @@
                 -- INSERT_DELETE  |PARTITIONED|
                   -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                     -- MATERIALIZE  |PARTITIONED|
-                      -- HASH_PARTITION_EXCHANGE [$$10]  |PARTITIONED|
+                      -- HASH_PARTITION_EXCHANGE [$$8]  |PARTITIONED|
                         -- ASSIGN  |PARTITIONED|
                           -- STREAM_PROJECT  |PARTITIONED|
                             -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
@@ -18,9 +18,9 @@
                                   -- UNNEST  |UNPARTITIONED|
                                     -- EMPTY_TUPLE_SOURCE  |UNPARTITIONED|
                                 -- HASH_PARTITION_EXCHANGE [$$9]  |PARTITIONED|
-                                  -- STREAM_PROJECT  |PARTITIONED|
-                                    -- ASSIGN  |PARTITIONED|
+                                  -- ASSIGN  |PARTITIONED|
+                                    -- STREAM_PROJECT  |PARTITIONED|
                                       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                         -- DATASOURCE_SCAN  |PARTITIONED|
                                           -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
-                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/dml/delete-dataset-with-meta/delete-dataset-with-meta.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/delete-dataset-with-meta/delete-dataset-with-meta.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/dml/delete-dataset-with-meta/delete-dataset-with-meta.1.ddl.aql
new file mode 100644
index 0000000..79b417e
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/delete-dataset-with-meta/delete-dataset-with-meta.1.ddl.aql
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Delete from a dataset with meta type
+ * Expected Res : Failure
+ * Date         : 15th Mar 2016
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type RecordType as open{
+};
+
+create type MetaType as open{
+id:string
+};
+
+create dataset DatasetWithMeta(RecordType) with meta(MetaType)primary key meta().id;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/dml/delete-dataset-with-meta/delete-dataset-with-meta.2.update.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/delete-dataset-with-meta/delete-dataset-with-meta.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/dml/delete-dataset-with-meta/delete-dataset-with-meta.2.update.aql
new file mode 100644
index 0000000..ac9c2f7
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/delete-dataset-with-meta/delete-dataset-with-meta.2.update.aql
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Delete from a dataset with meta type
+ * Expected Res : Failure
+ * Date         : 15th Mar 2016
+ */
+
+use dataverse test;
+
+delete $record from dataset DatasetWithMeta
+where $record.id="temp";
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/dml/insert-dataset-with-meta/insert-dataset-with-meta.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-dataset-with-meta/insert-dataset-with-meta.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-dataset-with-meta/insert-dataset-with-meta.1.ddl.aql
new file mode 100644
index 0000000..4ec814d
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-dataset-with-meta/insert-dataset-with-meta.1.ddl.aql
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Insert into a dataset with meta type
+ * Expected Res : Failure
+ * Date         : 15th Mar 2016
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type RecordType as open{
+};
+
+create type MetaType as open{
+id:string
+};
+
+create dataset DatasetWithMeta(RecordType) with meta(MetaType)primary key meta().id;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/dml/insert-dataset-with-meta/insert-dataset-with-meta.2.update.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-dataset-with-meta/insert-dataset-with-meta.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-dataset-with-meta/insert-dataset-with-meta.2.update.aql
new file mode 100644
index 0000000..27ddfea
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-dataset-with-meta/insert-dataset-with-meta.2.update.aql
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Insert into a dataset with meta type
+ * Expected Res : Failure
+ * Date         : 15th Mar 2016
+ */
+
+use dataverse test;
+
+insert into dataset DatasetWithMeta (
+{"id": 2, "name": "Person Two", "hobbies": {{"Rock", "Jazz"}}}
+);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/dml/upsert-dataset-with-meta/upsert-dataset-with-meta.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/upsert-dataset-with-meta/upsert-dataset-with-meta.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/dml/upsert-dataset-with-meta/upsert-dataset-with-meta.1.ddl.aql
new file mode 100644
index 0000000..b426c50
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/upsert-dataset-with-meta/upsert-dataset-with-meta.1.ddl.aql
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Upsert into a dataset with meta type
+ * Expected Res : Failure
+ * Date         : 15th Mar 2016
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type RecordType as open{
+};
+
+create type MetaType as open{
+id:string
+};
+
+create dataset DatasetWithMeta(RecordType) with meta(MetaType)primary key meta().id;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/dml/upsert-dataset-with-meta/upsert-dataset-with-meta.2.update.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/upsert-dataset-with-meta/upsert-dataset-with-meta.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/dml/upsert-dataset-with-meta/upsert-dataset-with-meta.2.update.aql
new file mode 100644
index 0000000..24b9230
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/upsert-dataset-with-meta/upsert-dataset-with-meta.2.update.aql
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Upsert into a dataset with meta type
+ * Expected Res : Failure
+ * Date         : 15th Mar 2016
+ */
+
+use dataverse test;
+
+upsert into dataset DatasetWithMeta (
+{"id": 2, "name": "Person Two", "hobbies": {{"Rock", "Jazz"}}}
+);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.3.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.3.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.3.ddl.aql
index 9d93457..9a7f043 100644
--- a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.3.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.3.ddl.aql
@@ -21,6 +21,7 @@ use dataverse externallibtest;
 
 create external dataset Condor(Classad) using localfs(
 ("path"="asterix_nc1://data/external-parser/jobads.new"),
-("reader"="semi-structured"),
-("parser"="testlib#org.apache.asterix.external.library.ClassAdParserFactory"),
-("reader-stream"="localfs"));
+("format"="semi-structured"),
+("record-start"="["),
+("record-end"="]"),
+("parser"="testlib#org.apache.asterix.external.library.ClassAdParserFactory"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.3.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.3.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.3.ddl.aql
index b47ccc3..5b2d50c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.3.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.3.ddl.aql
@@ -21,6 +21,5 @@ use dataverse externallibtest;
 
 create external dataset Condor(Classad) using localfs(
 ("path"="asterix_nc1://data/external-parser/jobads.old"),
-("reader"="line-separated"),
-("parser"="testlib#org.apache.asterix.external.library.ClassAdParserFactory"),
-("reader-stream"="localfs"));
+("format"="line-separated"),
+("parser"="testlib#org.apache.asterix.external.library.ClassAdParserFactory"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed-with-meta-pk-in-meta/change-feed-with-meta-pk-in-meta.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed-with-meta-pk-in-meta/change-feed-with-meta-pk-in-meta.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed-with-meta-pk-in-meta/change-feed-with-meta-pk-in-meta.1.ddl.aql
new file mode 100644
index 0000000..d3317e4
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed-with-meta-pk-in-meta/change-feed-with-meta-pk-in-meta.1.ddl.aql
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a change feed with meta-data and test ingestion of records
+ * Expected Res : Success
+ * Date         : 24th Feb 2016
+ */
+
+drop dataverse KeyVerse if exists;
+create dataverse KeyVerse;
+use dataverse KeyVerse;
+
+create type DocumentType as open{
+};
+
+create type KVMetaType as open{
+"key":string,
+bucket:string,
+vbucket:int32,
+seq:int64,
+cas:int64,
+creationTime:int64,
+expiration:int32,
+flags:int32,
+revSeq:int64,
+lockTime:int32
+};
+
+create dataset KVStore(DocumentType) with meta(KVMetaType)primary key meta()."key";
+
+create feed KVChangeStream using adapter(
+    ("type-name"="DocumentType"),
+    ("meta-type-name"="KVMetaType"),
+    ("reader"="kv_test"),
+    ("parser"="record-with-metadata"),
+    ("format"="dcp"),
+    ("record-format"="json"),
+    ("change-feed"="true"),
+    ("key-indexes"="0"),
+    ("key-indicators"="1"),
+    ("num-of-records"="1000")
+);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed-with-meta-pk-in-meta/change-feed-with-meta-pk-in-meta.2.update.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed-with-meta-pk-in-meta/change-feed-with-meta-pk-in-meta.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed-with-meta-pk-in-meta/change-feed-with-meta-pk-in-meta.2.update.aql
new file mode 100644
index 0000000..5951ac8
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed-with-meta-pk-in-meta/change-feed-with-meta-pk-in-meta.2.update.aql
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a change feed with meta-data and test ingestion of records
+ * Expected Res : Success
+ * Date         : 24th Feb 2016
+ */
+use dataverse KeyVerse;
+
+set wait-for-completion-feed "true";
+connect feed KVChangeStream to dataset KVStore;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed-with-meta-pk-in-meta/change-feed-with-meta-pk-in-meta.3.sleep.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed-with-meta-pk-in-meta/change-feed-with-meta-pk-in-meta.3.sleep.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed-with-meta-pk-in-meta/change-feed-with-meta-pk-in-meta.3.sleep.aql
new file mode 100644
index 0000000..db6954e
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed-with-meta-pk-in-meta/change-feed-with-meta-pk-in-meta.3.sleep.aql
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a change feed and test ingestion of records
+ * Expected Res : Success
+ * Date         : 24th Feb 2016
+ */
+4000
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed-with-meta-pk-in-meta/change-feed-with-meta-pk-in-meta.4.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed-with-meta-pk-in-meta/change-feed-with-meta-pk-in-meta.4.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed-with-meta-pk-in-meta/change-feed-with-meta-pk-in-meta.4.ddl.aql
new file mode 100644
index 0000000..d282e66
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed-with-meta-pk-in-meta/change-feed-with-meta-pk-in-meta.4.ddl.aql
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a change feed and test ingestion of records
+ * Expected Res : Success
+ * Date         : 24th Feb 2016
+ */
+drop dataverse KeyVerse;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed/change-feed.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed/change-feed.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed/change-feed.1.ddl.aql
new file mode 100644
index 0000000..2dad901
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed/change-feed.1.ddl.aql
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a change feed and test ingestion of records
+ * Expected Res : Success
+ * Date         : 24th Feb 2016
+ */
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type FeedRecordType as closed {
+    id: int64,
+    name: string
+}
+
+create dataset FeedDataset(FeedRecordType) primary key id;
+create feed ChangeFeed using test_change_feed(
+    ("type-name"="FeedRecordType"),
+    ("record-format"="adm"),
+    ("format"="test-csv"),
+    ("delimiter"=","),
+    ("key-indexes"="0"),
+    ("reader"="test-record-with-pk"),
+    ("parser"="test-record-with-pk"),
+    ("change-feed"="true")
+);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed/change-feed.2.update.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed/change-feed.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed/change-feed.2.update.aql
new file mode 100644
index 0000000..ff6efcb
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed/change-feed.2.update.aql
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a change feed and test ingestion of records
+ * Expected Res : Success
+ * Date         : 24th Feb 2016
+ */
+use dataverse feeds;
+
+set wait-for-completion-feed "true";
+connect feed ChangeFeed to dataset FeedDataset;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed/change-feed.3.query.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed/change-feed.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed/change-feed.3.query.aql
new file mode 100644
index 0000000..471c5a6
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed/change-feed.3.query.aql
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a change feed and test ingestion of records
+ * Expected Res : Success
+ * Date         : 24th Feb 2016
+ */
+
+use dataverse feeds;
+
+for $x in dataset FeedDataset
+order by $x.id
+return $x;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed/change-feed.4.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed/change-feed.4.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed/change-feed.4.ddl.aql
new file mode 100644
index 0000000..7686b90
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/change-feed/change-feed.4.ddl.aql
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a change feed and test ingestion of records
+ * Expected Res : Success
+ * Date         : 24th Feb 2016
+ */
+drop dataverse feeds;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.3.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.3.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.3.ddl.aql
index 8f0756f..1ec1f45 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.3.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.3.ddl.aql
@@ -27,7 +27,7 @@ create dataset Condor(Classad) primary key GlobalJobId;
 
 create feed CondorFeed using push_localfs(
 ("path"="asterix_nc1://data/external-parser/dropbox/jobads1.txt,asterix_nc1://data/external-parser/dropbox/jobads2.txt"),
-("reader"="semi-structured"),
+("reader"="localfs"),
+("format"="semi-structured"),
 ("parser"="testlib#org.apache.asterix.external.library.ClassAdParserFactory"),
-("reader-stream"="localfs"),
 ("type-name"="Classad"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-filtered-dataset/feed-with-filtered-dataset.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-filtered-dataset/feed-with-filtered-dataset.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-filtered-dataset/feed-with-filtered-dataset.1.ddl.aql
index 260dd1c..8bd1e3d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-filtered-dataset/feed-with-filtered-dataset.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-filtered-dataset/feed-with-filtered-dataset.1.ddl.aql
@@ -39,7 +39,7 @@ create type FacebookMessageType as closed {
 create dataset FacebookMessages(FacebookMessageType)
 primary key message-id with filter on send-time;
 
-create feed MessageFeed using push_localfs(
+create feed MessageFeed using localfs(
 ("path"="asterix_nc1://data/fbm-with-send-time.adm"),
 ("format"="adm"),
 ("type-name"="FacebookMessageType"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-meta-pk-in-meta/feed-with-meta-pk-in-meta.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-meta-pk-in-meta/feed-with-meta-pk-in-meta.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-meta-pk-in-meta/feed-with-meta-pk-in-meta.1.ddl.aql
new file mode 100644
index 0000000..ec4a712
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-meta-pk-in-meta/feed-with-meta-pk-in-meta.1.ddl.aql
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a change feed with meta-data and test ingestion of records
+ * Expected Res : Success
+ * Date         : 24th Feb 2016
+ */
+
+drop dataverse KeyVerse if exists;
+create dataverse KeyVerse;
+use dataverse KeyVerse;
+
+create type DocumentType as open{
+};
+
+create type KVMetaType as open{
+id:string,
+flags:int32,
+expiration:int32,
+cas:int64,
+rev:int64,
+vbid:int32,
+dtype:int32
+};
+
+create dataset KVStore(DocumentType) with meta(KVMetaType)primary key meta().id;
+
+create feed KVStream using adapter(
+ ("reader"="localfs"),
+ ("parser"="record-with-metadata"),
+ ("type-name"="DocumentType"),
+ ("meta-type-name"="KVMetaType"),
+ ("path"="asterix_nc1://data/csv/beer.csv"),
+ ("format"="csv"),
+ ("delimiter"=","),
+ ("record-format"="adm"),
+ ("record-index"="4"),
+ ("header"="true")
+);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-meta-pk-in-meta/feed-with-meta-pk-in-meta.2.update.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-meta-pk-in-meta/feed-with-meta-pk-in-meta.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-meta-pk-in-meta/feed-with-meta-pk-in-meta.2.update.aql
new file mode 100644
index 0000000..c4748ea
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-meta-pk-in-meta/feed-with-meta-pk-in-meta.2.update.aql
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a change feed with meta-data and test ingestion of records
+ * Expected Res : Success
+ * Date         : 24th Feb 2016
+ */
+use dataverse KeyVerse;
+
+set wait-for-completion-feed "true";
+connect feed KVStream to dataset KVStore;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-meta-pk-in-meta/feed-with-meta-pk-in-meta.3.sleep.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-meta-pk-in-meta/feed-with-meta-pk-in-meta.3.sleep.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-meta-pk-in-meta/feed-with-meta-pk-in-meta.3.sleep.aql
new file mode 100644
index 0000000..db6954e
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-meta-pk-in-meta/feed-with-meta-pk-in-meta.3.sleep.aql
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a change feed and test ingestion of records
+ * Expected Res : Success
+ * Date         : 24th Feb 2016
+ */
+4000
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-meta-pk-in-meta/feed-with-meta-pk-in-meta.4.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-meta-pk-in-meta/feed-with-meta-pk-in-meta.4.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-meta-pk-in-meta/feed-with-meta-pk-in-meta.4.ddl.aql
new file mode 100644
index 0000000..d282e66
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-meta-pk-in-meta/feed-with-meta-pk-in-meta.4.ddl.aql
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a change feed and test ingestion of records
+ * Expected Res : Success
+ * Date         : 24th Feb 2016
+ */
+drop dataverse KeyVerse;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.1.ddl.aql
index 353b57e..589c93d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.1.ddl.aql
@@ -17,8 +17,8 @@
  * under the License.
  */
 /*
- * Description  : Create a feed dataset that uses the feed simulator adapter. 
-                  Begin ingestion and verify contents of the dataset post completion.  
+ * Description  : Create a feed dataset that uses the feed simulator adapter.
+                  Begin ingestion and verify contents of the dataset post completion.
  * Expected Res : Success
  * Date         : 24th Dec 2012
  */
@@ -27,18 +27,20 @@ drop dataverse feeds if exists;
 create dataverse feeds;
 use dataverse feeds;
 
-
 create type TweetType as closed {
   id: string,
   username : string,
   location : string,
   text : string,
   timestamp : string
-};      
+};
 
 create dataset Tweets(TweetType)
 primary key id;
 
 create feed  TweetFeed
-using file_feed
-(("fs"="localfs"),("path"="asterix_nc1://data/twitter/obamatweets.adm"),("format"="adm"),("type-name"="TweetType"),("tuple-interval"="10"));
+using localfs
+(("path"="asterix_nc1://data/twitter/obamatweets.adm"),
+("format"="adm"),
+("type-name"="TweetType"),
+("tuple-interval"="10"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_07/feeds_07.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_07/feeds_07.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_07/feeds_07.1.ddl.aql
index 10fa5bb..70ea8d6 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_07/feeds_07.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_07/feeds_07.1.ddl.aql
@@ -54,5 +54,12 @@ create dataset SyntheticTweets(TweetMessageType)
 primary key id;
 
 create feed  SyntheticTweetFeed
-using twitter_firehose
-(("duration"="5"),("tps"="50"),("type-name"="TweetMessageType"),("reader"="adm"),("format"="adm"),("reader-stream"="twitter_firehose"),("tput-duration"="5"),("dataverse-dataset"="feeds:SyntheticTweets"),("mode"="controlled"));
+using twitter_firehose(
+("duration"="5"),
+("tps"="50"),
+("type-name"="TweetMessageType"),
+("format"="adm"),
+("reader-stream"="twitter_firehose"),
+("tput-duration"="5"),
+("dataverse-dataset"="feeds:SyntheticTweets"),
+("mode"="controlled"));
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_08/feeds_08.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_08/feeds_08.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_08/feeds_08.1.ddl.aql
index 85b4747..af15e99 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_08/feeds_08.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_08/feeds_08.1.ddl.aql
@@ -57,4 +57,10 @@ create index ngram_index on SyntheticTweets(message_text) type ngram(3);
 
 create feed  SyntheticTweetFeed
 using twitter_firehose
-(("duration"="5"),("tps"="50"),("type-name"="TweetMessageType"),("tput-duration"="5"),("dataverse-dataset"="feeds:SyntheticTweets"),("reader"="adm"),("format"="adm"),("reader-stream"="twitter_firehose"),("mode"="controlled"));
+(("duration"="5"),
+("tps"="50"),
+("type-name"="TweetMessageType"),
+("tput-duration"="5"),
+("dataverse-dataset"="feeds:SyntheticTweets"),
+("format"="adm"),
+("mode"="controlled"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_09/feeds_09.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_09/feeds_09.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_09/feeds_09.1.ddl.aql
index 8664745..6714850 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_09/feeds_09.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_09/feeds_09.1.ddl.aql
@@ -57,4 +57,11 @@ create index message_text on SyntheticTweets(message_text) type btree;
 
 create feed  SyntheticTweetFeed
 using twitter_firehose
-(("duration"="5"),("tps"="50"),("tput-duration"="5"),("type-name"="TweetMessageType"),("dataverse-dataset"="feeds:SyntheticTweets"),("reader"="adm"),("format"="adm"),("reader-stream"="twitter_firehose"),("mode"="controlled"));
+(("duration"="5"),
+("tps"="50"),
+("tput-duration"="5"),
+("type-name"="TweetMessageType"),
+("dataverse-dataset"="feeds:SyntheticTweets"),
+("format"="adm"),
+("reader-stream"="twitter_firehose"),
+("mode"="controlled"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_10/feeds_10.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_10/feeds_10.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_10/feeds_10.1.ddl.aql
index 82f44d4..75410f4 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_10/feeds_10.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_10/feeds_10.1.ddl.aql
@@ -36,7 +36,7 @@ create type TweetType as closed {
   location : string,
   text : string,
   timestamp : string
-}      
+}
 
 create dataset Tweets(TweetType)
 primary key id;
@@ -44,5 +44,8 @@ primary key id;
 create index usernameIdx on Tweets(username) type btree;
 
 create feed TweetFeed
-using file_feed
-(("fs"="localfs"),("path"="asterix_nc1://data/twitter/obamatweets.adm"),("format"="adm"),("type-name"="TweetType"),("tuple-interval"="10"));
+using localfs
+(("path"="asterix_nc1://data/twitter/obamatweets.adm"),
+("format"="adm"),
+("type-name"="TweetType"),
+("tuple-interval"="10"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_11/feeds_11.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_11/feeds_11.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_11/feeds_11.1.ddl.aql
index f8ec383..f139d01 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_11/feeds_11.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_11/feeds_11.1.ddl.aql
@@ -33,13 +33,14 @@ create type TweetType as closed {
   location : string,
   text : string,
   timestamp : string
-}      
+}
 
 create dataset Tweets(TweetType)
 primary key id;
 
 create feed TweetFeed
-using file_feed
-(("fs"="localfs"),("path"="asterix_nc1://data/twitter/obamatweets.adm"),("format"="adm"),("type-name"="TweetType"),("tuple-interval"="10"));
-
-
+using localfs
+(("path"="asterix_nc1://data/twitter/obamatweets.adm"),
+("format"="adm"),
+("type-name"="TweetType"),
+("tuple-interval"="10"));
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_12/feeds_12.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_12/feeds_12.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_12/feeds_12.1.ddl.aql
index cf479fc..dbf77ef 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_12/feeds_12.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_12/feeds_12.1.ddl.aql
@@ -36,13 +36,14 @@ create type TweetType as closed {
   location : string,
   text : string,
   timestamp : string
-}      
+}
 
 create dataset Tweets(TweetType)
 primary key id;
 
 create feed TweetFeed
-using file_feed
-(("fs"="localfs"),("path"="asterix_nc1://data/twitter/obamatweets_duplicate.adm"),("format"="adm"),("type-name"="TweetType"),("tuple-interval"="10"));
-
-
+using localfs
+(("path"="asterix_nc1://data/twitter/obamatweets_duplicate.adm"),
+("format"="adm"),
+("type-name"="TweetType"),
+("tuple-interval"="10"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.1.ddl.aql
index f8ec383..237c949 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.1.ddl.aql
@@ -33,13 +33,16 @@ create type TweetType as closed {
   location : string,
   text : string,
   timestamp : string
-}      
+}
 
 create dataset Tweets(TweetType)
 primary key id;
 
 create feed TweetFeed
-using file_feed
-(("fs"="localfs"),("path"="asterix_nc1://data/twitter/obamatweets.adm"),("format"="adm"),("type-name"="TweetType"),("tuple-interval"="10"));
+using localfs
+(("path"="asterix_nc1://data/twitter/obamatweets.adm"),
+("format"="adm"),
+("type-name"="TweetType"),
+("tuple-interval"="10"));
 
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/hdfs/large-record/large-record.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/hdfs/large-record/large-record.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/hdfs/large-record/large-record.1.ddl.aql
index 5e86bc5..000ef5b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/hdfs/large-record/large-record.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/hdfs/large-record/large-record.1.ddl.aql
@@ -36,5 +36,10 @@ create type EmployeeType as closed {
 
 create external dataset EmployeeDataset(EmployeeType)
 using adapter
-(("reader"="delimited-text"),("reader-stream"="hdfs"),("parser"="delimited-text"),("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/large-record.txt"),("input-format"="text-input-format"),("delimiter"="|"));
+(("reader"="hdfs"),
+("parser"="delimited-text"),
+("hdfs"="hdfs://127.0.0.1:31888"),
+("path"="/asterix/large-record.txt"),
+("input-format"="text-input-format"),
+("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.1.ddl.aql
index 437980f..460c9b4 100644
--- a/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.1.ddl.aql
@@ -33,12 +33,15 @@ create type TweetType as closed {
   location : string,
   text : string,
   timestamp : string
-}      
+}
 
 create dataset Tweets(TweetType)
 primary key id
 hints(cardinality=200);
 
 create feed TweetFeed
-using file_feed
-(("fs"="localfs"),("path"="asterix_nc1://data/twitter/obamatweets.adm"),("format"="adm"),("type-name"="TweetType"),("tuple-interval"="10"));
+using localfs
+(("path"="asterix_nc1://data/twitter/obamatweets.adm"),
+("format"="adm"),
+("type-name"="TweetType"),
+("tuple-interval"="10"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/load/dataset-with-meta/dataset-with-meta.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/load/dataset-with-meta/dataset-with-meta.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/load/dataset-with-meta/dataset-with-meta.1.ddl.aql
new file mode 100644
index 0000000..31639bd
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/load/dataset-with-meta/dataset-with-meta.1.ddl.aql
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a dataset with meta type and load it
+ * Expected Res : Failure
+ * Date         : 15th Mar 2016
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type RecordType as open{
+};
+
+create type MetaType as open{
+id:string
+};
+
+create dataset DatasetWithMeta(RecordType) with meta(MetaType)primary key meta().id;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/load/dataset-with-meta/dataset-with-meta.2.update.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/load/dataset-with-meta/dataset-with-meta.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/load/dataset-with-meta/dataset-with-meta.2.update.aql
new file mode 100644
index 0000000..dd5a06d
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/load/dataset-with-meta/dataset-with-meta.2.update.aql
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a dataset with meta type and load it
+ * Expected Res : Failure
+ * Date         : 15th Mar 2016
+ */
+
+use dataverse test;
+
+load dataset DatasetWithMeta using localfs(("format"="adm"));
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/open-index-enforced/external-indexing/adm-format/adm-format.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-index-enforced/external-indexing/adm-format/adm-format.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/open-index-enforced/external-indexing/adm-format/adm-format.1.ddl.aql
index 4ee1f17..4b43a18 100644
--- a/asterix-app/src/test/resources/runtimets/queries/open-index-enforced/external-indexing/adm-format/adm-format.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/open-index-enforced/external-indexing/adm-format/adm-format.1.ddl.aql
@@ -41,6 +41,6 @@ create type MyRecord as open {
 
 create external dataset MyData(MyRecord)
 using hdfs
-(("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/spatialData.json"),("input-format"="text-input-format"),("input-format"="text-input-format"),("format"="adm"));
+(("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/spatialData.json"),("input-format"="text-input-format"),("format"="adm"));
 
 create index idx on MyData(id:int64) enforced;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries/upsert/nested-index/nested-index.2.update.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/upsert/nested-index/nested-index.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/upsert/nested-index/nested-index.2.update.aql
index 4ac89ed..9be3c1b 100644
--- a/asterix-app/src/test/resources/runtimets/queries/upsert/nested-index/nested-index.2.update.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/upsert/nested-index/nested-index.2.update.aql
@@ -20,11 +20,15 @@ use dataverse test;
 
 load dataset UpsertTo
 using localfs
-(("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+(("path"="asterix_nc1://data/tpch0.001/orders.tbl"),
+("format"="delimited-text"),
+("delimiter"="|")) pre-sorted;
 
 load dataset UpsertFrom
 using localfs
-(("path"="asterix_nc1://data/tpch0.001/other-orders.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+(("path"="asterix_nc1://data/tpch0.001/other-orders.tbl"),
+("format"="delimited-text"),
+("delimiter"="|")) pre-sorted;
 
 insert into dataset Orders
 (
@@ -40,4 +44,4 @@ upsert into dataset Orders
  return {
   "nested" : $c
  }
-);
+);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.1.ddl.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.1.ddl.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.1.ddl.sqlpp
index 8efdbd2..15aa43f 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.1.ddl.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.1.ddl.sqlpp
@@ -41,4 +41,8 @@ create type feeds.TweetType as
 
 create  table Tweets(TweetType) primary key id hints ("CARDINALITY"="200");
 
-create  primary feed TweetFeed using file_feed (("fs"="localfs"),("path"="asterix_nc1://data/twitter/obamatweets.adm"),("format"="adm"),("type-name"="TweetType"),("tuple-interval"="10"));
+create  primary feed TweetFeed using localfs(
+("path"="asterix_nc1://data/twitter/obamatweets.adm"),
+("format"="adm"),
+("type-name"="TweetType"),
+("tuple-interval"="10"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/distinct_by/distinct_by.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/distinct_by/distinct_by.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/distinct_by/distinct_by.2.update.sqlpp
index 8353a07..ffd9143 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/distinct_by/distinct_by.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/distinct_by/distinct_by.2.update.sqlpp
@@ -20,5 +20,5 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/nest_aggregate/nest_aggregate.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/nest_aggregate/nest_aggregate.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/nest_aggregate/nest_aggregate.2.update.sqlpp
index 24ff26a..8e61b76 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/nest_aggregate/nest_aggregate.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/nest_aggregate/nest_aggregate.2.update.sqlpp
@@ -26,15 +26,15 @@
 use tpch;
 
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table SelectedNation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/selectednation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table SelectedNation using localfs (("path"="asterix_nc1://data/tpch0.001/selectednation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/nest_aggregate2/nest_aggregate2.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/nest_aggregate2/nest_aggregate2.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/nest_aggregate2/nest_aggregate2.2.update.sqlpp
index 24ff26a..8e61b76 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/nest_aggregate2/nest_aggregate2.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/nest_aggregate2/nest_aggregate2.2.update.sqlpp
@@ -26,15 +26,15 @@
 use tpch;
 
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table SelectedNation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/selectednation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table SelectedNation using localfs (("path"="asterix_nc1://data/tpch0.001/selectednation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q01_pricing_summary_report_nt/q01_pricing_summary_report_nt.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q01_pricing_summary_report_nt/q01_pricing_summary_report_nt.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q01_pricing_summary_report_nt/q01_pricing_summary_report_nt.2.update.sqlpp
index 8353a07..ffd9143 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q01_pricing_summary_report_nt/q01_pricing_summary_report_nt.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q01_pricing_summary_report_nt/q01_pricing_summary_report_nt.2.update.sqlpp
@@ -20,5 +20,5 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q02_minimum_cost_supplier/q02_minimum_cost_supplier.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q02_minimum_cost_supplier/q02_minimum_cost_supplier.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q02_minimum_cost_supplier/q02_minimum_cost_supplier.2.update.sqlpp
index 824fc6b..97a0b31 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q02_minimum_cost_supplier/q02_minimum_cost_supplier.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q02_minimum_cost_supplier/q02_minimum_cost_supplier.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q03_shipping_priority_nt/q03_shipping_priority_nt.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q03_shipping_priority_nt/q03_shipping_priority_nt.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q03_shipping_priority_nt/q03_shipping_priority_nt.2.update.sqlpp
index e27ea4e..07983f0 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q03_shipping_priority_nt/q03_shipping_priority_nt.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q03_shipping_priority_nt/q03_shipping_priority_nt.2.update.sqlpp
@@ -20,9 +20,9 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
 


[17/19] incubator-asterixdb git commit: Support Change Feeds and Ingestion of Records with MetaData

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q04_order_priority/q04_order_priority.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q04_order_priority/q04_order_priority.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q04_order_priority/q04_order_priority.2.update.sqlpp
index 824fc6b..97a0b31 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q04_order_priority/q04_order_priority.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q04_order_priority/q04_order_priority.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q05_local_supplier_volume/q05_local_supplier_volume.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q05_local_supplier_volume/q05_local_supplier_volume.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q05_local_supplier_volume/q05_local_supplier_volume.2.update.sqlpp
index 824fc6b..97a0b31 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q05_local_supplier_volume/q05_local_supplier_volume.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q05_local_supplier_volume/q05_local_supplier_volume.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q06_forecast_revenue_change/q06_forecast_revenue_change.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q06_forecast_revenue_change/q06_forecast_revenue_change.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q06_forecast_revenue_change/q06_forecast_revenue_change.2.update.sqlpp
index 824fc6b..97a0b31 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q06_forecast_revenue_change/q06_forecast_revenue_change.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q06_forecast_revenue_change/q06_forecast_revenue_change.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q07_volume_shipping/q07_volume_shipping.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q07_volume_shipping/q07_volume_shipping.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q07_volume_shipping/q07_volume_shipping.2.update.sqlpp
index 824fc6b..97a0b31 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q07_volume_shipping/q07_volume_shipping.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q07_volume_shipping/q07_volume_shipping.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q08_national_market_share/q08_national_market_share.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q08_national_market_share/q08_national_market_share.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q08_national_market_share/q08_national_market_share.2.update.sqlpp
index 824fc6b..97a0b31 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q08_national_market_share/q08_national_market_share.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q08_national_market_share/q08_national_market_share.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q09_product_type_profit_nt/q09_product_type_profit_nt.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q09_product_type_profit_nt/q09_product_type_profit_nt.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q09_product_type_profit_nt/q09_product_type_profit_nt.2.update.sqlpp
index 824fc6b..97a0b31 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q09_product_type_profit_nt/q09_product_type_profit_nt.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q09_product_type_profit_nt/q09_product_type_profit_nt.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q10_returned_item/q10_returned_item.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q10_returned_item/q10_returned_item.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q10_returned_item/q10_returned_item.2.update.sqlpp
index 42bbece..ed34b74 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q10_returned_item/q10_returned_item.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q10_returned_item/q10_returned_item.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q10_returned_item_int64/q10_returned_item_int64.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q10_returned_item_int64/q10_returned_item_int64.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q10_returned_item_int64/q10_returned_item_int64.2.update.sqlpp
index 824fc6b..97a0b31 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q10_returned_item_int64/q10_returned_item_int64.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q10_returned_item_int64/q10_returned_item_int64.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q11_important_stock/q11_important_stock.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q11_important_stock/q11_important_stock.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q11_important_stock/q11_important_stock.2.update.sqlpp
index 824fc6b..97a0b31 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q11_important_stock/q11_important_stock.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q11_important_stock/q11_important_stock.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q12_shipping/q12_shipping.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q12_shipping/q12_shipping.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q12_shipping/q12_shipping.2.update.sqlpp
index 824fc6b..97a0b31 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q12_shipping/q12_shipping.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q12_shipping/q12_shipping.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q13_customer_distribution/q13_customer_distribution.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q13_customer_distribution/q13_customer_distribution.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q13_customer_distribution/q13_customer_distribution.2.update.sqlpp
index 824fc6b..97a0b31 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q13_customer_distribution/q13_customer_distribution.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q13_customer_distribution/q13_customer_distribution.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q14_promotion_effect/q14_promotion_effect.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q14_promotion_effect/q14_promotion_effect.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q14_promotion_effect/q14_promotion_effect.2.update.sqlpp
index 824fc6b..97a0b31 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q14_promotion_effect/q14_promotion_effect.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q14_promotion_effect/q14_promotion_effect.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q15_top_supplier/q15_top_supplier.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q15_top_supplier/q15_top_supplier.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q15_top_supplier/q15_top_supplier.2.update.sqlpp
index 824fc6b..97a0b31 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q15_top_supplier/q15_top_supplier.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q15_top_supplier/q15_top_supplier.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q16_parts_supplier_relationship/q16_parts_supplier_relationship.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q16_parts_supplier_relationship/q16_parts_supplier_relationship.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q16_parts_supplier_relationship/q16_parts_supplier_relationship.2.update.sqlpp
index 824fc6b..97a0b31 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q16_parts_supplier_relationship/q16_parts_supplier_relationship.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q16_parts_supplier_relationship/q16_parts_supplier_relationship.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q17_large_gby_variant/q17_large_gby_variant.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q17_large_gby_variant/q17_large_gby_variant.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q17_large_gby_variant/q17_large_gby_variant.2.update.sqlpp
index 824fc6b..97a0b31 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q17_large_gby_variant/q17_large_gby_variant.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q17_large_gby_variant/q17_large_gby_variant.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q17_small_quantity_order_revenue/q17_small_quantity_order_revenue.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q17_small_quantity_order_revenue/q17_small_quantity_order_revenue.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q17_small_quantity_order_revenue/q17_small_quantity_order_revenue.2.update.sqlpp
index 824fc6b..97a0b31 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q17_small_quantity_order_revenue/q17_small_quantity_order_revenue.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q17_small_quantity_order_revenue/q17_small_quantity_order_revenue.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q18_large_volume_customer/q18_large_volume_customer.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q18_large_volume_customer/q18_large_volume_customer.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q18_large_volume_customer/q18_large_volume_customer.2.update.sqlpp
index 824fc6b..97a0b31 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q18_large_volume_customer/q18_large_volume_customer.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q18_large_volume_customer/q18_large_volume_customer.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q19_discounted_revenue/q19_discounted_revenue.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q19_discounted_revenue/q19_discounted_revenue.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q19_discounted_revenue/q19_discounted_revenue.2.update.sqlpp
index 824fc6b..97a0b31 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q19_discounted_revenue/q19_discounted_revenue.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q19_discounted_revenue/q19_discounted_revenue.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q20_potential_part_promotion/q20_potential_part_promotion.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q20_potential_part_promotion/q20_potential_part_promotion.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q20_potential_part_promotion/q20_potential_part_promotion.2.update.sqlpp
index 824fc6b..97a0b31 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q20_potential_part_promotion/q20_potential_part_promotion.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q20_potential_part_promotion/q20_potential_part_promotion.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q21_suppliers_who_kept_orders_waiting/q21_suppliers_who_kept_orders_waiting.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q21_suppliers_who_kept_orders_waiting/q21_suppliers_who_kept_orders_waiting.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q21_suppliers_who_kept_orders_waiting/q21_suppliers_who_kept_orders_waiting.2.update.sqlpp
index 824fc6b..97a0b31 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q21_suppliers_who_kept_orders_waiting/q21_suppliers_who_kept_orders_waiting.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q21_suppliers_who_kept_orders_waiting/q21_suppliers_who_kept_orders_waiting.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q22_global_sales_opportunity/q22_global_sales_opportunity.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q22_global_sales_opportunity/q22_global_sales_opportunity.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q22_global_sales_opportunity/q22_global_sales_opportunity.2.update.sqlpp
index 824fc6b..97a0b31 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q22_global_sales_opportunity/q22_global_sales_opportunity.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q22_global_sales_opportunity/q22_global_sales_opportunity.2.update.sqlpp
@@ -20,19 +20,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue562/query-issue562.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue562/query-issue562.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue562/query-issue562.2.update.sqlpp
index 9d5125c..e3a990e 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue562/query-issue562.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue562/query-issue562.2.update.sqlpp
@@ -26,19 +26,19 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Part using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Part using localfs (("path"="asterix_nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Partsupp using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Partsupp using localfs (("path"="asterix_nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue601/query-issue601.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue601/query-issue601.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue601/query-issue601.2.update.sqlpp
index 4dab63a..8f6ad53 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue601/query-issue601.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue601/query-issue601.2.update.sqlpp
@@ -26,5 +26,5 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue785-2/query-issue785-2.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue785-2/query-issue785-2.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue785-2/query-issue785-2.2.update.sqlpp
index 1c14a94..bc3766e 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue785-2/query-issue785-2.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue785-2/query-issue785-2.2.update.sqlpp
@@ -26,15 +26,15 @@
 use tpch;
 
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table SelectedNation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/selectednation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table SelectedNation using localfs (("path"="asterix_nc1://data/tpch0.001/selectednation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue785/query-issue785.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue785/query-issue785.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue785/query-issue785.2.update.sqlpp
index 1c14a94..bc3766e 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue785/query-issue785.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue785/query-issue785.2.update.sqlpp
@@ -26,15 +26,15 @@
 use tpch;
 
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table SelectedNation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/selectednation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table SelectedNation using localfs (("path"="asterix_nc1://data/tpch0.001/selectednation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue786/query-issue786.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue786/query-issue786.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue786/query-issue786.2.update.sqlpp
index 90e2139..4289c4a 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue786/query-issue786.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue786/query-issue786.2.update.sqlpp
@@ -26,15 +26,15 @@
 use tpch;
 
 
-load  table Orders using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Orders using localfs (("path"="asterix_nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Supplier using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Supplier using localfs (("path"="asterix_nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Region using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Region using localfs (("path"="asterix_nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Nation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Nation using localfs (("path"="asterix_nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table Customer using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table Customer using localfs (("path"="asterix_nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|"));
 
-load  table SelectedNation using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/selectednation.tbl"),("format"="delimited-text"),("delimiter"="|"));
+load  table SelectedNation using localfs (("path"="asterix_nc1://data/tpch0.001/selectednation.tbl"),("format"="delimited-text"),("delimiter"="|"));
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810-2/query-issue810-2.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810-2/query-issue810-2.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810-2/query-issue810-2.2.update.sqlpp
index ab2b193..b315f19 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810-2/query-issue810-2.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810-2/query-issue810-2.2.update.sqlpp
@@ -26,5 +26,5 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
 



[02/19] incubator-asterixdb git commit: Support Change Feeds and Ingestion of Records with MetaData

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-installer/src/test/resources/integrationts/library/results/library-parsers/record-parser/record-parser.1.adm
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/resources/integrationts/library/results/library-parsers/record-parser/record-parser.1.adm b/asterix-installer/src/test/resources/integrationts/library/results/library-parsers/record-parser/record-parser.1.adm
new file mode 100644
index 0000000..858285e
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/results/library-parsers/record-parser/record-parser.1.adm
@@ -0,0 +1,100 @@
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#48968872.0#1445354636", "StatsLifetimeStarter": 572059, "JobStartDate": 1445362267, "SubmitEventNotes": "DAG Node: fabp4-0002+fabp4-0002", "JobStatus": 4, "LeaveJobInQueue": false, "WantGlidein": true, "StartdPrincipal": "execute-side@matchsession/128.104.119.175", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1445561276, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 125000, "ScheddBday": 1445383086, "RemoteWallClockTime": 769511.0d, "WantCheckpoint": false, "In": "/dev/null", "LastVacateTime": 1445546251, "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 32543, "EnteredCurrentStatus": 1446133322, "ResidentSetSize_RAW": 100432, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/ssericksen/dude-14-xdock/ChtcRun/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID
 : 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 571737.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 690056, "BytesSent": 3113566.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133322, "ProcId": 0, "ImageSize": 750000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 12, "RecentBlockReads": 0, "SpooledOutputFiles": "ChtcWrapperfabp4-0002.out,AuditLog.fabp4-0002,poses.mol2,CURLTIME_4057178,harvest.log,time_elapsed.log,surf_scores.txt,CURLTIME_38803,count.log,fabp4-0002.out,CURLTIME_253463", "NumJobReconnects": 1, "WantFlocking": true, "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT_OR_EVICT", "JobCurrentStartExecutingDate": 1445561278, "ExitBySignal": false, "LastMatchTime": 1445561276, "OnExitHold": false, "OrigMaxHosts": 1, "Request
 Memory": 1000, "NumJobStarts": 6, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 48940805, "MemoryUsage": 122, "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 6, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 572046, "ExecutableSize_RAW": 6, "LastRejMatchReason": "no match found", "LastSuspensionTime": 0, "UserLog": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-out-esr1/fabp4-0002/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 769511.0d, "LastJobLeaseRenewal": 1446133322, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 8.7351688E7d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "fabp4-0002+f
 abp4-0002", "PeriodicRelease": "error", "JobRunCount": 7, "LastRemotePool": "condor.biochem.wisc.edu:9618?sock=collector", "JobLastStartDate": 1445546257, "LastRemoteHost": "slot1@cluster-0008.biochem.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 0.0d, "TransferInput": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-in/fabp4-0002/,/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-in/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133322, "StreamErr": false, "is_resumable": true, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 7, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-out-esr1/./mydag.dag.nodes.log", "Owner": "ssericksen", "Requirements": "undefined", "DiskUsage": 35000, "LastRejMatchTime": 1445375317, "JobLeaseDuration": 2400, "ClusterId": 48968872, "BufferSize": 524288, "IsCHT
 CSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 572046.0d, "Args": "--type=Other --cmdtorun=surflex_run_DUDE_v1.8_esr1.sh --unique=fabp4-0002 --", "Environment": "", "LastPublicClaimId": "<128.104.119.175:9618>#1444067179#3317#...", "Iwd": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-out-esr1/fabp4-0002", "QDate": 1445354636, "CurrentHosts": 0, "User": "ssericksen@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49573720.0#1445938922", "StatsLifetimeStarter": 190245, "JobStartDate": 1445943852, "SubmitEventNotes": "DAG Node: 180+180", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.72", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1445943852, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 125000, "RemoteWallClockTime": 190247.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446134099, "ResidentSetSize_RAW": 123680, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkp
 ts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 185236.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30766.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446134099, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_3753852,ChtcWrapper180.out,AuditLog.180,simu_3_180.txt,harvest.log,180.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1445943853, "ExitBySignal": false, "LastMatchTime": 1445943852, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49572657, "MemoryUsage": 122, "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserC
 pu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 190247, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally/Simulation_condor/model_3/180/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 190247.0d, "LastJobLeaseRenewal": 1446134099, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 284367.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "180+180", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e272.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 1835.0d, "TransferInput": "/home/xguo23/finally/Simulation_condor/data/180/,/home/xguo23/finally/Simulation_condor/dat
 a/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446134099, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49573720, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 190247.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=180 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.72:29075>#1444753997#6000#...", "Iwd": "/home/xguo23/finally/Simulation_condor/model_3/180", "QDate": 1445938922, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49581952.0#1446105329", "StatsLifetimeStarter": 27674, "JobStartDate": 1446106061, "SubmitEventNotes": "DAG Node: 40+40", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.86", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106061, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 27676.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133737, "ResidentSetSize_RAW": 127252, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpts
 _RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 27510.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30584.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133737, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_369560,ChtcWrapper40.out,AuditLog.40,simu_3_40.txt,harvest.log,40.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106063, "ExitBySignal": false, "LastMatchTime": 1446106061, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0
 d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27676, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/40/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27676.0d, "LastJobLeaseRenewal": 1446133737, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "40+40", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e286.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 105.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/40/,/home/xguo23/finally_2/Simulation_condor/data/shared/
 ", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133737, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581952, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27676.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=40 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.86:32129>#1444759888#6329#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/40", "QDate": 1446105329, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49581985.0#1446105368", "StatsLifetimeStarter": 26354, "JobStartDate": 1446106289, "SubmitEventNotes": "DAG Node: 36+36", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.244.249", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106289, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26357.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132646, "ResidentSetSize_RAW": 127452, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpt
 s_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26239.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31898.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132646, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1151700,ChtcWrapper36.out,AuditLog.36,simu_3_36.txt,harvest.log,36.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106291, "ExitBySignal": false, "LastMatchTime": 1446106289, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0
 .0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26357, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/36/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26357.0d, "LastJobLeaseRenewal": 1446132646, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "36+36", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e457.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 96.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/36/,/home/xguo23/finally_2/Simulation_condor/data/shared
 /", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132646, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581985, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26357.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=36 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.244.249:28476>#1444685646#10655#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/36", "QDate": 1446105368, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49581989.0#1446105374", "StatsLifetimeStarter": 27490, "JobStartDate": 1446106290, "SubmitEventNotes": "DAG Node: 82+82", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.233", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106290, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 27491.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133781, "ResidentSetSize_RAW": 126932, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "M
 inHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 27288.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30553.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_Job
 Starts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_Total
 TimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMon
 itorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_To
 talTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_Total
 TimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites":
  0, "JobFinishedHookDone": 1446133782, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_4096502,ChtcWrapper82.out,AuditLog.82,simu_3_82.txt,harvest.log,82.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106291, "ExitBySignal": false, "LastMatchTime": 1446106290, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27491, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/82/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,
 17,24,27", "CumulativeSlotTime": 27491.0d, "LastJobLeaseRenewal": 1446133781, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "82+82", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e433.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 173.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/82/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133781, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLo
 g": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581989, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27491.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=82 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.233:28601>#1443991451#13496#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/82", "QDate": 1446105374, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582049.0#1446105441", "StatsLifetimeStarter": 26296, "JobStartDate": 1446106482, "SubmitEventNotes": "DAG Node: 112+112", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.245", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106482, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26298.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132780, "ResidentSetSize_RAW": 126892, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCk
 pts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26097.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31904.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132780, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_2601607,ChtcWrapper112.out,AuditLog.112,simu_3_112.txt,harvest.log,112.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106484, "ExitBySignal": false, "LastMatchTime": 1446106482, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserC
 pu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26298, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/112/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26298.0d, "LastJobLeaseRenewal": 1446132780, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "112+112", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e445.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 164.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/112/,/home/xguo23/finally_2/Simulation_condor/
 data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132780, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582049, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26298.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=112 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.245:48407>#1443991450#14631#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/112", "QDate": 1446105441, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582050.0#1446105441", "StatsLifetimeStarter": 27141, "JobStartDate": 1446106482, "SubmitEventNotes": "DAG Node: 301+301", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.172", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106482, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 27143.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133625, "ResidentSetSize_RAW": 126464, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCk
 pts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26895.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31905.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133625, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_2158419,ChtcWrapper301.out,AuditLog.301,simu_3_301.txt,harvest.log,301.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106484, "ExitBySignal": false, "LastMatchTime": 1446106482, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserC
 pu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27143, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/301/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27143.0d, "LastJobLeaseRenewal": 1446133625, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "301+301", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e372.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 201.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/301/,/home/xguo23/finally_2/Simulation_condor/
 data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133625, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582050, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27143.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=301 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.172:19856>#1444760019#9307#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/301", "QDate": 1446105441, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582065.0#1446105458", "StatsLifetimeStarter": 25606, "JobStartDate": 1446107042, "SubmitEventNotes": "DAG Node: 401+401", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.206", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107042, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 25607.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132649, "ResidentSetSize_RAW": 126608, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", 
 "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25478.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30661.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_J
 obStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_Tot
 alTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfM
 onitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_
 TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_Tot
 alTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites
 ": 0, "JobFinishedHookDone": 1446132649, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1160521,ChtcWrapper401.out,AuditLog.401,simu_3_401.txt,harvest.log,401.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107043, "ExitBySignal": false, "LastMatchTime": 1446107042, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25607, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/401/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12
 ,13,16,17,24,27", "CumulativeSlotTime": 25607.0d, "LastJobLeaseRenewal": 1446132649, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "401+401", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e406.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 89.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/401/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132649, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGM
 anNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582065, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25607.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=401 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.206:27946>#1443991437#15826#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/401", "QDate": 1446105458, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582094.0#1446105491", "StatsLifetimeStarter": 25168, "JobStartDate": 1446107489, "SubmitEventNotes": "DAG Node: 106+106", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.104.55.83", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107489, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 75000, "RemoteWallClockTime": 25169.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 4, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132658, "ResidentSetSize_RAW": 72016, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_
 RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24949.0d, "BlockWrites": 1, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 119520, "BytesSent": 30486.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 1, "JobFinishedHookDone": 1446132658, "ProcId": 0, "ImageSize": 125000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 86, "SpooledOutputFiles": "CURLTIME_122139,ChtcWrapper106.out,AuditLog.106,simu_3_106.txt,harvest.log,106.out", "BlockWriteKbytes": 4, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107490, "ExitBySignal": false, "LastMatchTime": 1446107489, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 665, "DAGManJobId": 49581933, "MemoryUsage": 73, "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 26620, "NumJobMatches": 1, "LocalUserC
 pu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25169, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/106/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25169.0d, "LastJobLeaseRenewal": 1446132658, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "106+106", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@c064.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 204.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/106/,/home/xguo23/finally_2/Simulation_condor/
 data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132658, "StreamErr": false, "RecentBlockReadKbytes": 960, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582094, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25169.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=106 -- 3", "Environment": "", "LastPublicClaimId": "<128.104.55.83:25899>#1445308581#1240#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/106", "QDate": 1446105491, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582098.0#1446105492", "StatsLifetimeStarter": 26020, "JobStartDate": 1446107489, "SubmitEventNotes": "DAG Node: 304+304", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.223", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107489, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26022.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133511, "ResidentSetSize_RAW": 128776, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCk
 pts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25844.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31801.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133511, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_3651606,ChtcWrapper304.out,AuditLog.304,simu_3_304.txt,harvest.log,304.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107489, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserC
 pu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26022, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/304/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26022.0d, "LastJobLeaseRenewal": 1446133511, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "304+304", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e423.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 143.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/304/,/home/xguo23/finally_2/Simulation_condor/
 data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133511, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582098, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26022.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=304 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.223:13467>#1444760039#6376#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/304", "QDate": 1446105492, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582113.0#1446105509", "StatsLifetimeStarter": 26044, "JobStartDate": 1446107490, "SubmitEventNotes": "DAG Node: 206+206", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.120", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107490, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26045.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133535, "ResidentSetSize_RAW": 126460, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCk
 pts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25939.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30596.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133535, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_522843,ChtcWrapper206.out,AuditLog.206,simu_3_206.txt,harvest.log,206.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107490, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCp
 u": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26045, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/206/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26045.0d, "LastJobLeaseRenewal": 1446133535, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "206+206", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e320.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 87.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/206/,/home/xguo23/finally_2/Simulation_condor/da
 ta/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133535, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582113, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26045.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=206 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.120:45185>#1443991409#14238#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/206", "QDate": 1446105509, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582119.0#1446105519", "StatsLifetimeStarter": 24928, "JobStartDate": 1446107490, "SubmitEventNotes": "DAG Node: 152+152", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.242", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107490, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 24930.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132420, "ResidentSetSize_RAW": 128972, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCk
 pts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24742.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30431.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132420, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_674,ChtcWrapper152.out,AuditLog.152,simu_3_152.txt,harvest.log,152.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107490, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu":
  0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 24930, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/152/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 24930.0d, "LastJobLeaseRenewal": 1446132420, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "152+152", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e442.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 156.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/152/,/home/xguo23/finally_2/Simulation_condor/data
 /shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132420, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582119, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 24930.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=152 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.242:38884>#1443991450#10374#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/152", "QDate": 1446105519, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582124.0#1446105525", "StatsLifetimeStarter": 24745, "JobStartDate": 1446107685, "SubmitEventNotes": "DAG Node: 323+323", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 13, "StartdPrincipal": "execute-side@matchsession/128.104.55.89", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107685, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 75000, "RemoteWallClockTime": 24748.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132433, "ResidentSetSize_RAW": 71248, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "Min
 Hosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 21145.0d, "BlockWrites": 1, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 118000, "BytesSent": 30560.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobSt
 arts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTi
 meUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonit
 orAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_Tota
 lTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTi
 meClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0
 , "JobFinishedHookDone": 1446132434, "ProcId": 0, "ImageSize": 125000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 314, "SpooledOutputFiles": "harvest.log,CURLTIME_3853266,ChtcWrapper323.out,AuditLog.323,simu_3_323.txt,323.out", "BlockWriteKbytes": 4, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107686, "ExitBySignal": false, "LastMatchTime": 1446107685, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 1142, "DAGManJobId": 49581933, "MemoryUsage": 73, "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 43788, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 24748, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/323/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11
 ,12,13,16,17,24,27", "CumulativeSlotTime": 24748.0d, "LastJobLeaseRenewal": 1446132433, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "323+323", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@c070.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 175.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/323/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132433, "StreamErr": false, "RecentBlockReadKbytes": 4224, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1
 , "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582124, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 24748.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=323 -- 3", "Environment": "", "LastPublicClaimId": "<128.104.55.89:32652>#1445371750#1302#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/323", "QDate": 1446105525, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582148.0#1446105547", "StatsLifetimeStarter": 26230, "JobStartDate": 1446107686, "SubmitEventNotes": "DAG Node: 162+162", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.170", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107686, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 26233.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133919, "ResidentSetSize_RAW": 126384, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", 
 "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26088.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30612.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_J
 obStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_Tot
 alTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfM
 onitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_
 TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_Tot
 alTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites
 ": 0, "JobFinishedHookDone": 1446133919, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1114551,ChtcWrapper162.out,AuditLog.162,simu_3_162.txt,harvest.log,162.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107688, "ExitBySignal": false, "LastMatchTime": 1446107686, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26233, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/162/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12
 ,13,16,17,24,27", "CumulativeSlotTime": 26233.0d, "LastJobLeaseRenewal": 1446133919, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "162+162", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e370.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 96.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/162/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133919, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGM
 anNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582148, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26233.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=162 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.170:9482>#1443991414#13008#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/162", "QDate": 1446105547, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582154.0#1446105553", "StatsLifetimeStarter": 25874, "JobStartDate": 1446107686, "SubmitEventNotes": "DAG Node: 333+333", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.120", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107686, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 25876.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133562, "ResidentSetSize_RAW": 125740, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCk
 pts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25692.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30542.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133562, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_523030,ChtcWrapper333.out,AuditLog.333,simu_3_333.txt,harvest.log,333.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107688, "ExitBySignal": false, "LastMatchTime": 1446107686, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCp
 u": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25876, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/333/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25876.0d, "LastJobLeaseRenewal": 1446133562, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "333+333", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e320.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 157.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/333/,/home/xguo23/finally_2/Simulation_condor/d
 ata/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133562, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582154, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25876.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=333 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.120:45185>#1443991409#14242#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/333", "QDate": 1446105553, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582177.0#1446105581", "StatsLifetimeStarter": 25025, "JobStartDate": 1446108665, "SubmitEventNotes": "DAG Node: 145+145", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.104.55.57", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446108665, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 75000, "RemoteWallClockTime": 25026.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 4, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133691, "ResidentSetSize_RAW": 73308, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_
 RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24770.0d, "BlockWrites": 1, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 120972, "BytesSent": 28290.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 1, "JobFinishedHookDone": 1446133691, "ProcId": 0, "ImageSize": 125000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 146, "SpooledOutputFiles": "CURLTIME_4179033,ChtcWrapper145.out,AuditLog.145,simu_3_145.txt,harvest.log,145.out", "BlockWriteKbytes": 4, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446108666, "ExitBySignal": false, "LastMatchTime": 1446108665, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 796, "DAGManJobId": 49581933, "MemoryUsage": 73, "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 28476, "NumJobMatches": 1, "LocalUse
 rCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25026, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/145/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25026.0d, "LastJobLeaseRenewal": 1446133691, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "145+145", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@c038.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 217.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/145/,/home/xguo23/finally_2/Simulation_condo
 r/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133691, "StreamErr": false, "RecentBlockReadKbytes": 1932, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582177, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25026.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=145 -- 3", "Environment": "", "LastPublicClaimId": "<128.104.55.57:49793>#1445322694#1541#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/145", "QDate": 1446105581, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582178.0#1446105581", "StatsLifetimeStarter": 24871, "JobStartDate": 1446108666, "SubmitEventNotes": "DAG Node: 154+154", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.158", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446108666, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 24874.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133540, "ResidentSetSize_RAW": 125792, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCk
 pts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24626.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30559.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133540, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1382128,ChtcWrapper154.out,AuditLog.154,simu_3_154.txt,harvest.log,154.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446108668, "ExitBySignal": false, "LastMatchTime": 1446108666, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserC
 pu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 24874, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/154/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 24874.0d, "LastJobLeaseRenewal": 1446133540, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "154+154", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e358.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 183.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_co

<TRUNCATED>


[16/19] incubator-asterixdb git commit: Support Change Feeds and Ingestion of Records with MetaData

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810-3/query-issue810-3.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810-3/query-issue810-3.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810-3/query-issue810-3.2.update.sqlpp
index ab2b193..b315f19 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810-3/query-issue810-3.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810-3/query-issue810-3.2.update.sqlpp
@@ -26,5 +26,5 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810/query-issue810.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810/query-issue810.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810/query-issue810.2.update.sqlpp
index ab2b193..b315f19 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810/query-issue810.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810/query-issue810.2.update.sqlpp
@@ -26,5 +26,5 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue827-2/query-issue827-2.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue827-2/query-issue827-2.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue827-2/query-issue827-2.2.update.sqlpp
index d3a8e68..eb90816 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue827-2/query-issue827-2.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue827-2/query-issue827-2.2.update.sqlpp
@@ -26,5 +26,5 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue827/query-issue827.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue827/query-issue827.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue827/query-issue827.2.update.sqlpp
index d3a8e68..eb90816 100644
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue827/query-issue827.2.update.sqlpp
+++ b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue827/query-issue827.2.update.sqlpp
@@ -26,5 +26,5 @@
 use tpch;
 
 
-load  table LineItem using "org.apache.asterix.external.dataset.adapter.NCFileSystemAdapter" (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
 


[12/19] incubator-asterixdb git commit: Support Change Feeds and Ingestion of Records with MetaData

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/results/feeds/feed-with-meta-pk-in-meta/feed-with-meta-pk-in-meta.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feed-with-meta-pk-in-meta/feed-with-meta-pk-in-meta.1.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feed-with-meta-pk-in-meta/feed-with-meta-pk-in-meta.1.adm
new file mode 100644
index 0000000..8ba1df3
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feed-with-meta-pk-in-meta/feed-with-meta-pk-in-meta.1.adm
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a change feed with meta-data and test ingestion of records
+ * Expected Res : Success
+ * Date         : 24th Feb 2016
+ */
+
+drop dataverse Couchbase if exists;
+create dataverse Couchbase;
+use dataverse Couchbase;
+
+create type DocumentType as open{
+};
+
+create type CouchbaseMetaType as open{
+id:string,
+flags:int32,
+expiration:int32,
+cas:int64,
+rev:int64,
+vbid:int32,
+dtype:int32,
+};
+
+create dataset CouchFeedDataset(DocumentType) with meta(CouchbaseMetaType)primary key meta()."key";
+
+create feed CouchFeedWithMeta using FeedAdapter(
+    ("type-name"="DocumentType"),
+    ("meta-type-name"="CouchbaseMetaType"),
+    ("reader"="csv-with-record"),
+    ("path"="..."),
+    ("format"="record-with-meta")
+);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/testsuite.xml
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterix-app/src/test/resources/runtimets/testsuite.xml
index d50a0a6..e6c9d53 100644
--- a/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -29,25 +29,29 @@
         QueryOffsetPath="queries"
         QueryFileExtension=".aql">
     <test-group name="feeds">
-        <!-- Fails constantly and not clear what is intended
         <test-case FilePath="feeds">
-          <compilation-unit name="feeds_06">
-            <output-dir compare="Text">feeds_06</output-dir>
-          </compilation-unit>
-        </test-case> -->
+            <compilation-unit name="change-feed-with-meta-pk-in-meta">
+                <output-dir compare="Text">change-feed-with-meta-pk-in-meta</output-dir>
+            </compilation-unit>
+        </test-case>
         <test-case FilePath="feeds">
             <compilation-unit name="feed-with-filtered-dataset">
                 <output-dir compare="Text">feed-with-filtered-dataset</output-dir>
             </compilation-unit>
         </test-case>
         <test-case FilePath="feeds">
-            <compilation-unit name="feed-push-socket">
-                <output-dir compare="Text">feed-push-socket</output-dir>
+            <compilation-unit name="change-feed">
+                <output-dir compare="Text">change-feed</output-dir>
             </compilation-unit>
         </test-case>
         <test-case FilePath="feeds">
-            <compilation-unit name="drop-dataverse-with-disconnected-feed">
-                <output-dir compare="Text">drop-dataverse-with-disconnected-feed</output-dir>
+            <compilation-unit name="feed-with-meta-pk-in-meta">
+                <output-dir compare="Text">feed-with-meta-pk-in-meta</output-dir>
+            </compilation-unit>
+        </test-case>
+        <test-case FilePath="feeds">
+            <compilation-unit name="feeds_07">
+                <output-dir compare="Text">feeds_07</output-dir>
             </compilation-unit>
         </test-case>
         <test-case FilePath="feeds">
@@ -55,9 +59,20 @@
                 <output-dir compare="Text">feed-with-external-parser</output-dir>
             </compilation-unit>
         </test-case>
+        <!-- Fails constantly and not clear what is intended
         <test-case FilePath="feeds">
-            <compilation-unit name="feeds_07">
-                <output-dir compare="Text">feeds_07</output-dir>
+          <compilation-unit name="feeds_06">
+            <output-dir compare="Text">feeds_06</output-dir>
+          </compilation-unit>
+        </test-case> -->
+        <test-case FilePath="feeds">
+            <compilation-unit name="drop-dataverse-with-disconnected-feed">
+                <output-dir compare="Text">drop-dataverse-with-disconnected-feed</output-dir>
+            </compilation-unit>
+        </test-case>
+        <test-case FilePath="feeds">
+            <compilation-unit name="feed-push-socket">
+                <output-dir compare="Text">feed-push-socket</output-dir>
             </compilation-unit>
         </test-case>
         <test-case FilePath="feeds">
@@ -115,12 +130,63 @@
                 <output-dir compare="Text">issue_230_feeds</output-dir>
             </compilation-unit>
         </test-case>
-<!-- 
-        <test-case FilePath="feeds">
+        <!--<test-case FilePath="feeds">
             <compilation-unit name="issue_711_feeds">
                 <output-dir compare="Text">issue_711_feeds</output-dir>
             </compilation-unit>
-        </test-case>  -->
+        </test-case>-->
+    </test-group>
+    <test-group name="upsert">
+        <test-case FilePath="upsert">
+            <compilation-unit name="filtered-dataset">
+                <output-dir compare="Text">filtered-dataset</output-dir>
+            </compilation-unit>
+        </test-case>
+        <test-case FilePath="upsert">
+            <compilation-unit name="nested-index">
+                <output-dir compare="Text">nested-index</output-dir>
+            </compilation-unit>
+        </test-case>
+        <test-case FilePath="upsert">
+            <compilation-unit name="primary-secondary-rtree">
+                <output-dir compare="Text">primary-secondary-rtree</output-dir>
+            </compilation-unit>
+        </test-case>
+        <test-case FilePath="upsert">
+            <compilation-unit name="upsert-with-self-read">
+                <output-dir compare="Text">upsert-with-self-read</output-dir>
+            </compilation-unit>
+        </test-case>
+        <test-case FilePath="upsert">
+            <compilation-unit name="nullable-index">
+                <output-dir compare="Text">nullable-index</output-dir>
+            </compilation-unit>
+        </test-case>
+        <test-case FilePath="upsert">
+            <compilation-unit name="open-index">
+                <output-dir compare="Text">open-index</output-dir>
+            </compilation-unit>
+        </test-case>
+        <test-case FilePath="upsert">
+            <compilation-unit name="primary-index">
+                <output-dir compare="Text">primary-index</output-dir>
+            </compilation-unit>
+        </test-case>
+        <test-case FilePath="upsert">
+            <compilation-unit name="primary-secondary-btree">
+                <output-dir compare="Text">primary-secondary-btree</output-dir>
+            </compilation-unit>
+        </test-case>
+        <test-case FilePath="upsert">
+            <compilation-unit name="primary-secondary-inverted">
+                <output-dir compare="Text">primary-secondary-inverted</output-dir>
+            </compilation-unit>
+        </test-case>
+        <test-case FilePath="upsert">
+            <compilation-unit name="multiple-secondaries">
+                <output-dir compare="Text">multiple-secondaries</output-dir>
+            </compilation-unit>
+        </test-case>
     </test-group>
     <test-group name="external-library">
         <test-case FilePath="external-library">
@@ -1320,60 +1386,26 @@
         </test-case>
         -->
     </test-group>
-    <test-group name="upsert">
-        <test-case FilePath="upsert">
-            <compilation-unit name="primary-secondary-rtree">
-                <output-dir compare="Text">primary-secondary-rtree</output-dir>
-            </compilation-unit>
-        </test-case>
-        <test-case FilePath="upsert">
-            <compilation-unit name="upsert-with-self-read">
-                <output-dir compare="Text">upsert-with-self-read</output-dir>
-            </compilation-unit>
-        </test-case>
-        <test-case FilePath="upsert">
-            <compilation-unit name="filtered-dataset">
-                <output-dir compare="Text">filtered-dataset</output-dir>
-            </compilation-unit>
-        </test-case>
-        <test-case FilePath="upsert">
-            <compilation-unit name="nullable-index">
-                <output-dir compare="Text">nullable-index</output-dir>
-            </compilation-unit>
-        </test-case>
-        <test-case FilePath="upsert">
-            <compilation-unit name="nested-index">
-                <output-dir compare="Text">nested-index</output-dir>
-            </compilation-unit>
-        </test-case>
-        <test-case FilePath="upsert">
-            <compilation-unit name="open-index">
-                <output-dir compare="Text">open-index</output-dir>
-            </compilation-unit>
-        </test-case>
-        <test-case FilePath="upsert">
-            <compilation-unit name="primary-index">
-                <output-dir compare="Text">primary-index</output-dir>
-            </compilation-unit>
-        </test-case>
-        <test-case FilePath="upsert">
-            <compilation-unit name="primary-secondary-btree">
-                <output-dir compare="Text">primary-secondary-btree</output-dir>
+    <test-group name="dml">
+        <test-case FilePath="dml">
+            <compilation-unit name="insert-dataset-with-meta">
+                <output-dir compare="Text">insert-dataset-with-meta</output-dir>
+                <expected-error>insert into dataset is not supported on Datasets with Meta records</expected-error>
             </compilation-unit>
         </test-case>
-        <test-case FilePath="upsert">
-            <compilation-unit name="primary-secondary-inverted">
-                <output-dir compare="Text">primary-secondary-inverted</output-dir>
+        <test-case FilePath="dml">
+            <compilation-unit name="delete-dataset-with-meta">
+                <output-dir compare="Text">delete-dataset-with-meta</output-dir>
+                <expected-error>delete from dataset is not supported on Datasets with Meta records</expected-error>
             </compilation-unit>
         </test-case>
-        <test-case FilePath="upsert">
-            <compilation-unit name="multiple-secondaries">
-                <output-dir compare="Text">multiple-secondaries</output-dir>
+        <test-case FilePath="dml">
+            <compilation-unit name="upsert-dataset-with-meta">
+                <output-dir compare="Text">upsert-dataset-with-meta</output-dir>
+                <expected-error>upsert into dataset is not supported on Datasets with Meta records</expected-error>
             </compilation-unit>
         </test-case>
-    </test-group>
-    <test-group name="dml">
-         <test-case FilePath="dml">
+        <test-case FilePath="dml">
             <compilation-unit name="load-with-ngram-index">
                 <output-dir compare="Text">load-with-ngram-index</output-dir>
             </compilation-unit>
@@ -6120,6 +6152,12 @@
     </test-group>
     <test-group name="load">
         <test-case FilePath="load">
+            <compilation-unit name="dataset-with-meta">
+                <output-dir compare="Text">dataset-with-meta</output-dir>
+                <expected-error>load dataset is not supported on Datasets with Meta records</expected-error>
+            </compilation-unit>
+        </test-case>
+        <test-case FilePath="load">
             <compilation-unit name="csv_01">
                 <output-dir compare="Text">csv_01</output-dir>
             </compilation-unit>
@@ -6178,13 +6216,13 @@
         <test-case FilePath="load">
             <compilation-unit name="issue14_query">
                 <output-dir compare="Text">issue14_query</output-dir>
-                <expected-error>org.apache.asterix.common.exceptions.AsterixException: Unspecified ("reader" or "format") parameter for local filesystem adapter</expected-error>
+                <expected-error>org.apache.asterix.common.exceptions.AsterixException: The parameter parser must be specified</expected-error>
             </compilation-unit>
         </test-case>
         <test-case FilePath="load">
             <compilation-unit name="issue315_query">
                 <output-dir compare="Text">none</output-dir>
-                <expected-error>org.apache.asterix.common.exceptions.AsterixException: Unspecified ("reader" or "format") parameter for local filesystem adapter</expected-error>
+                <expected-error>org.apache.asterix.common.exceptions.AsterixException: The parameter parser must be specified</expected-error>
             </compilation-unit>
         </test-case>
         <test-case FilePath="load">

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml b/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
index 2d994df..23215db 100644
--- a/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
+++ b/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
@@ -5989,13 +5989,13 @@
         <test-case FilePath="load">
             <compilation-unit name="issue14_query">
                 <output-dir compare="Text">issue14_query</output-dir>
-                <expected-error>org.apache.asterix.common.exceptions.AsterixException: Unspecified ("reader" or "format") parameter for local filesystem adapter</expected-error>
+                <expected-error>org.apache.asterix.common.exceptions.AsterixException: The parameter parser must be specified</expected-error>
             </compilation-unit>
         </test-case>
         <test-case FilePath="load">
             <compilation-unit name="issue315_query">
                 <output-dir compare="Text">none</output-dir>
-                <expected-error>org.apache.asterix.common.exceptions.AsterixException: Unspecified ("reader" or "format") parameter for local filesystem adapter</expected-error>
+                <expected-error>org.apache.asterix.common.exceptions.AsterixException: The parameter parser must be specified</expected-error>
             </compilation-unit>
         </test-case>
         <test-case FilePath="load">

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixPropertiesAccessor.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixPropertiesAccessor.java b/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixPropertiesAccessor.java
index 77a290d..8ceee62 100644
--- a/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixPropertiesAccessor.java
+++ b/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixPropertiesAccessor.java
@@ -45,7 +45,7 @@ import org.apache.asterix.common.configuration.TransactionLogDir;
 import org.apache.asterix.common.exceptions.AsterixException;
 
 public class AsterixPropertiesAccessor {
-    private static final Logger LOGGER = Logger.getLogger(AsterixPropertiesAccessor.class.getName());
+    private static Logger LOGGER = Logger.getLogger(AsterixPropertiesAccessor.class.getName());
 
     private final String instanceName;
     private final String metadataNodeName;
@@ -56,7 +56,7 @@ public class AsterixPropertiesAccessor {
     private final Map<String, String> transactionLogDirs;
     private final Map<String, String> asterixBuildProperties;
     private final Map<String, ClusterPartition[]> nodePartitionsMap;
-    private SortedMap<Integer, ClusterPartition> clusterPartitions;
+    private final SortedMap<Integer, ClusterPartition> clusterPartitions;
 
     public AsterixPropertiesAccessor() throws AsterixException {
         String fileName = System.getProperty(GlobalConfig.CONFIG_FILE_PROPERTY);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-common/src/test/java/org/apache/asterix/test/server/FileTestServer.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/test/java/org/apache/asterix/test/server/FileTestServer.java b/asterix-common/src/test/java/org/apache/asterix/test/server/FileTestServer.java
index ba32af2..947d7d7 100644
--- a/asterix-common/src/test/java/org/apache/asterix/test/server/FileTestServer.java
+++ b/asterix-common/src/test/java/org/apache/asterix/test/server/FileTestServer.java
@@ -65,6 +65,7 @@ public class FileTestServer implements ITestServer {
                         // This also could be due to the close() call
                     }
                 }
+
             }
         });
         listenerThread.start();
@@ -72,9 +73,11 @@ public class FileTestServer implements ITestServer {
 
     @Override
     public void stop() throws IOException, InterruptedException {
-        serverSocket.close();
-        if (listenerThread.isAlive()) {
-            listenerThread.join();
+        if (serverSocket.isBound()) {
+            serverSocket.close();
+            if (listenerThread.isAlive()) {
+                listenerThread.join();
+            }
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
index 851acd4..a03ad1a 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
@@ -21,6 +21,7 @@ package org.apache.asterix.external.adapter.factory;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.api.IAdapterFactory;
 import org.apache.asterix.external.api.IDataFlowController;
 import org.apache.asterix.external.api.IDataParserFactory;
@@ -28,6 +29,8 @@ import org.apache.asterix.external.api.IDataSourceAdapter;
 import org.apache.asterix.external.api.IExternalDataSourceFactory;
 import org.apache.asterix.external.api.IIndexibleExternalDataSource;
 import org.apache.asterix.external.api.IIndexingAdapterFactory;
+import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
+import org.apache.asterix.external.dataset.adapter.FeedAdapter;
 import org.apache.asterix.external.dataset.adapter.GenericAdapter;
 import org.apache.asterix.external.indexing.ExternalFile;
 import org.apache.asterix.external.provider.DataflowControllerProvider;
@@ -40,6 +43,7 @@ import org.apache.asterix.external.util.FeedUtils;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.std.file.FileSplit;
 
 public class GenericAdapterFactory implements IIndexingAdapterFactory, IAdapterFactory {
@@ -53,6 +57,7 @@ public class GenericAdapterFactory implements IIndexingAdapterFactory, IAdapterF
     private boolean indexingOp;
     private boolean isFeed;
     private FileSplit[] feedLogFileSplits;
+    private ARecordType metaType;
 
     @Override
     public void setSnapshot(List<ExternalFile> files, boolean indexingOp) {
@@ -66,7 +71,7 @@ public class GenericAdapterFactory implements IIndexingAdapterFactory, IAdapterF
     }
 
     @Override
-    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws Exception {
+    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AsterixException {
         return dataSourceFactory.getPartitionConstraint();
     }
 
@@ -74,14 +79,23 @@ public class GenericAdapterFactory implements IIndexingAdapterFactory, IAdapterF
      * Runs on each node controller (after serialization-deserialization)
      */
     @Override
-    public synchronized IDataSourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
-        restoreExternalObjects();
+    public synchronized IDataSourceAdapter createAdapter(IHyracksTaskContext ctx, int partition)
+            throws HyracksDataException {
+        try {
+            restoreExternalObjects();
+        } catch (AsterixException e) {
+            throw new HyracksDataException(e);
+        }
         IDataFlowController controller = DataflowControllerProvider.getDataflowController(recordType, ctx, partition,
                 dataSourceFactory, dataParserFactory, configuration, indexingOp, isFeed, feedLogFileSplits);
-        return new GenericAdapter(controller);
+        if (isFeed) {
+            return new FeedAdapter((AbstractFeedDataFlowController) controller);
+        } else {
+            return new GenericAdapter(controller);
+        }
     }
 
-    private void restoreExternalObjects() throws Exception {
+    private void restoreExternalObjects() throws AsterixException {
         if (dataSourceFactory == null) {
             dataSourceFactory = DatasourceFactoryProvider.getExternalDataSourceFactory(configuration);
             // create and configure parser factory
@@ -94,15 +108,19 @@ public class GenericAdapterFactory implements IIndexingAdapterFactory, IAdapterF
             // create and configure parser factory
             dataParserFactory = ParserFactoryProvider.getDataParserFactory(configuration);
             dataParserFactory.setRecordType(recordType);
+            dataParserFactory.setMetaType(metaType);
             dataParserFactory.configure(configuration);
         }
     }
 
     @Override
-    public void configure(Map<String, String> configuration, ARecordType outputType) throws Exception {
+    public void configure(Map<String, String> configuration, ARecordType outputType, ARecordType metaType)
+            throws AsterixException {
         this.recordType = outputType;
+        this.metaType = metaType;
         this.configuration = configuration;
         dataSourceFactory = DatasourceFactoryProvider.getExternalDataSourceFactory(configuration);
+
         dataParserFactory = ParserFactoryProvider.getDataParserFactory(configuration);
         prepare();
         ExternalDataCompatibilityUtils.validateCompatibility(dataSourceFactory, dataParserFactory);
@@ -110,7 +128,7 @@ public class GenericAdapterFactory implements IIndexingAdapterFactory, IAdapterF
         nullifyExternalObjects();
     }
 
-    private void configureFeedLogManager() throws Exception {
+    private void configureFeedLogManager() throws AsterixException {
         this.isFeed = ExternalDataUtils.isFeed(configuration);
         if (isFeed) {
             feedLogFileSplits = FeedUtils.splitsForAdapter(ExternalDataUtils.getDataverse(configuration),
@@ -127,12 +145,13 @@ public class GenericAdapterFactory implements IIndexingAdapterFactory, IAdapterF
         }
     }
 
-    private void prepare() throws Exception {
+    private void prepare() throws AsterixException {
         if (dataSourceFactory.isIndexible() && (files != null)) {
             ((IIndexibleExternalDataSource) dataSourceFactory).setSnapshot(files, indexingOp);
         }
         dataSourceFactory.configure(configuration);
         dataParserFactory.setRecordType(recordType);
+        dataParserFactory.setMetaType(metaType);
         dataParserFactory.configure(configuration);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/LookupAdapterFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/LookupAdapterFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/LookupAdapterFactory.java
index f149ed3..49c5943 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/LookupAdapterFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/LookupAdapterFactory.java
@@ -21,6 +21,7 @@ package org.apache.asterix.external.adapter.factory;
 import java.io.Serializable;
 import java.util.Map;
 
+import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.external.api.ILookupReaderFactory;
 import org.apache.asterix.external.api.ILookupRecordReader;
 import org.apache.asterix.external.api.IRecordDataParser;
@@ -44,12 +45,12 @@ public class LookupAdapterFactory<T> implements Serializable {
     private static final long serialVersionUID = 1L;
     private IRecordDataParserFactory dataParserFactory;
     private ILookupReaderFactory readerFactory;
-    private ARecordType recordType;
-    private int[] ridFields;
+    private final ARecordType recordType;
+    private final int[] ridFields;
     private Map<String, String> configuration;
-    private boolean retainInput;
-    private boolean retainNull;
-    private INullWriterFactory iNullWriterFactory;
+    private final boolean retainInput;
+    private final boolean retainNull;
+    private final INullWriterFactory iNullWriterFactory;
 
     public LookupAdapterFactory(ARecordType recordType, int[] ridFields, boolean retainInput, boolean retainNull,
             INullWriterFactory iNullWriterFactory) {
@@ -64,7 +65,6 @@ public class LookupAdapterFactory<T> implements Serializable {
             ExternalFileIndexAccessor snapshotAccessor, IFrameWriter writer) throws HyracksDataException {
         try {
             IRecordDataParser<T> dataParser = dataParserFactory.createRecordParser(ctx);
-            dataParser.configure(configuration, recordType);
             ILookupRecordReader<? extends T> reader = readerFactory.createRecordReader(ctx, partition,
                     snapshotAccessor);
             reader.configure(configuration);
@@ -76,7 +76,7 @@ public class LookupAdapterFactory<T> implements Serializable {
         }
     }
 
-    public void configure(Map<String, String> configuration) throws Exception {
+    public void configure(Map<String, String> configuration) throws AsterixException {
         this.configuration = configuration;
         readerFactory = LookupReaderFactoryProvider.getLookupReaderFactory(configuration);
         dataParserFactory = (IRecordDataParserFactory<T>) ParserFactoryProvider.getDataParserFactory(configuration);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterFactory.java
index 3965e5e..59a7514 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterFactory.java
@@ -21,9 +21,11 @@ package org.apache.asterix.external.api;
 import java.io.Serializable;
 import java.util.Map;
 
+import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 /**
  * Base interface for IGenericDatasetAdapterFactory and ITypedDatasetAdapterFactory.
@@ -50,7 +52,7 @@ public interface IAdapterFactory extends Serializable {
      * In the former case, the IP address is translated to a node controller id
      * running on the node with the given IP address.
      */
-    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws Exception;
+    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AsterixException;
 
     /**
      * Creates an instance of IDatasourceAdapter.
@@ -60,14 +62,21 @@ public interface IAdapterFactory extends Serializable {
      * @return An instance of IDatasourceAdapter.
      * @throws Exception
      */
-    public IDataSourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception;
+    public IDataSourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws HyracksDataException;
 
     /**
      * @param configuration
      * @param outputType
+     * @param metaType
      * @throws Exception
      */
-    public void configure(Map<String, String> configuration, ARecordType outputType) throws Exception;
+    public void configure(Map<String, String> configuration, ARecordType outputType, ARecordType metaType)
+            throws AsterixException;
+
+    public default void configure(final Map<String, String> configuration, final ARecordType outputType)
+            throws AsterixException {
+        configure(configuration, outputType, null);
+    }
 
     /**
      * Gets the record type associated with the output of the adapter

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterRuntimeManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterRuntimeManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterRuntimeManager.java
index 252b43b..e5b22e9 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterRuntimeManager.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterRuntimeManager.java
@@ -18,6 +18,7 @@
  */
 package org.apache.asterix.external.api;
 
+import org.apache.asterix.external.dataset.adapter.FeedAdapter;
 import org.apache.asterix.external.feed.api.IIntakeProgressTracker;
 import org.apache.asterix.external.feed.management.FeedId;
 
@@ -57,14 +58,14 @@ public interface IAdapterRuntimeManager {
     public void stop() throws Exception;
 
     /**
-     * @return feedId associated with the feed that is being ingested
+     * @return feedId associated with the feed that is being ingested.
      */
     public FeedId getFeedId();
 
     /**
-     * @return the instance of the feed adapter (an implementation of {@code IFeedAdapter}) in use.
+     * @return an instance of the {@code FeedAdapter} in use.
      */
-    public IFeedAdapter getFeedAdapter();
+    public FeedAdapter getFeedAdapter();
 
     /**
      * @return state associated with the AdapterRuntimeManager. See {@code State}.

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataFlowController.java
index d9ed131..33f262a 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataFlowController.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataFlowController.java
@@ -18,46 +18,27 @@
  */
 package org.apache.asterix.external.api;
 
-import java.util.Map;
-
 import org.apache.hyracks.api.comm.IFrameWriter;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public interface IDataFlowController {
 
-    /**
-     * Order of calls:
-     * 1. Constructor()
-     * 2. if record flow controller
-     * |-a. Set record reader
-     * |-b. Set record parser
-     * else
-     * |-a. Set stream parser
-     * 3. setTupleForwarder(forwarder)
-     * 4. configure(configuration,ctx)
-     * 5. start(writer)
-     *
-     * pause(), resume(), and stop() are only used with feeds
-     * pause is called after start when a feed is running and the system is overwhelmed with data.
-     * resume is called after the load goes down and we are ready to receive more data.
-     * stop is called to disconnect the feed. once stop is called, no other method is called.
-     *
-     */
-
+    //TODO: Refactor this interface. Remove writer from start() signature
     public void start(IFrameWriter writer) throws HyracksDataException;
 
-    public boolean stop() throws HyracksDataException;
-
-    public boolean pause() throws HyracksDataException;
-
-    public boolean resume() throws HyracksDataException;
-
-    public boolean handleException(Throwable th);
+    public default boolean pause() throws HyracksDataException {
+        throw new HyracksDataException("Method not implemented");
+    }
 
-    public ITupleForwarder getTupleForwarder();
+    public default boolean resume() throws HyracksDataException {
+        throw new HyracksDataException("Method not implemented");
+    }
 
-    public void setTupleForwarder(ITupleForwarder forwarder);
+    public default void flush() throws HyracksDataException {
+        throw new HyracksDataException("Method not implemented");
+    }
 
-    public void configure(Map<String, String> configuration, IHyracksTaskContext ctx) throws HyracksDataException;
+    public default boolean stop() throws HyracksDataException {
+        throw new HyracksDataException("Method not implemented");
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataParser.java
index e680822..322e51f 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataParser.java
@@ -19,15 +19,11 @@
 package org.apache.asterix.external.api;
 
 import java.io.DataOutput;
-import java.io.IOException;
-import java.util.Map;
 
 import org.apache.asterix.builders.IARecordBuilder;
 import org.apache.asterix.builders.OrderedListBuilder;
 import org.apache.asterix.builders.RecordBuilder;
 import org.apache.asterix.builders.UnorderedListBuilder;
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 import org.apache.asterix.om.base.AMutableOrderedList;
 import org.apache.asterix.om.base.AMutableRecord;
@@ -43,24 +39,9 @@ import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 
 public interface IDataParser {
 
-    /**
-     * @return The supported data sources
-     */
-    public DataSourceType getDataSourceType();
-
-    /**
-     * @param configuration
-     *            a set of configurations that comes from two sources.
-     *            1. The create adapter statement.
-     *            2. The query compiler.
-     * @param recordType
-     *            The expected record type
-     * @throws IOException
-     */
-    public void configure(Map<String, String> configuration, ARecordType recordType) throws IOException;
-
     /*
-     * The following two static methods are expensive. right now, they are used by RSSFeeds and Twitter feed
+     * The following two static methods are expensive. right now, they are used by RSSFeeds and
+     * Twitter feed
      * TODO: Get rid of them
      */
     public static void writeRecord(AMutableRecord record, DataOutput dataOutput, IARecordBuilder recordBuilder)

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataParserFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataParserFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataParserFactory.java
index 5c3845c..1fc97c9 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataParserFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IDataParserFactory.java
@@ -35,18 +35,27 @@ public interface IDataParserFactory extends Serializable {
      *         an instance of IDataParserFactory with STREAM data source type must implement IStreamDataParserFactory
      * @throws AsterixException
      */
-    public DataSourceType getDataSourceType() throws AsterixException;
+    public DataSourceType getDataSourceType();
 
     /**
      * Configure the data parser factory. The passed map contains key value pairs from the
      * submitted AQL statement and any additional pairs added by the compiler
+     *
      * @param configuration
      */
-    public void configure(Map<String, String> configuration) throws Exception;
+    public void configure(Map<String, String> configuration) throws AsterixException;
 
     /**
      * Set the record type expected to be produced by parsers created by this factory
+     *
      * @param recordType
      */
     public void setRecordType(ARecordType recordType);
+
+    /**
+     * Set the meta record type expected to be produced by parsers created by this factory
+     *
+     * @param metaType
+     */
+    public void setMetaType(ARecordType metaType);
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IExternalDataSourceFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IExternalDataSourceFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IExternalDataSourceFactory.java
index 1487cf1..b49a719 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IExternalDataSourceFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IExternalDataSourceFactory.java
@@ -22,6 +22,7 @@ import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.Map;
 
+import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.om.util.AsterixAppContextInfo;
 import org.apache.asterix.om.util.AsterixClusterProperties;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
@@ -31,7 +32,6 @@ public interface IExternalDataSourceFactory extends Serializable {
     /**
      * The data source type indicates whether the data source produces a continuous stream or
      * a set of records
-     * @author amoudi
      */
     public enum DataSourceType {
         STREAM,
@@ -45,21 +45,24 @@ public interface IExternalDataSourceFactory extends Serializable {
 
     /**
      * Specifies on which locations this data source is expected to run.
+     *
      * @return
-     * @throws Exception
+     * @throws AsterixException
      */
-    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws Exception;
+    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AsterixException;
 
     /**
      * Configure the data parser factory. The passed map contains key value pairs from the
      * submitted AQL statement and any additional pairs added by the compiler
+     *
      * @param configuration
-     * @throws Exception
+     * @throws AsterixException
      */
-    public void configure(Map<String, String> configuration) throws Exception;
+    public void configure(Map<String, String> configuration) throws AsterixException;
 
     /**
      * Specify whether the external data source can be indexed
+     *
      * @return
      */
     public default boolean isIndexible() {
@@ -93,5 +96,4 @@ public interface IExternalDataSourceFactory extends Serializable {
         }
         return constraints;
     }
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IExternalIndexer.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IExternalIndexer.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IExternalIndexer.java
index 0b4277e..01ffd99 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IExternalIndexer.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IExternalIndexer.java
@@ -18,36 +18,39 @@
  */
 package org.apache.asterix.external.api;
 
+import java.io.IOException;
 import java.io.Serializable;
 
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 
 /**
- * @author amoudi
- *         This Interface represents the component responsible for adding record ids to tuples when indexing external data
+ * This Interface represents the component responsible for adding record IDs to tuples when indexing external data
  */
 public interface IExternalIndexer extends Serializable {
 
     /**
      * This method is called by an indexible datasource when the external source reader have been updated.
      * this gives a chance for the indexer to update its reader specific values (i,e. file name)
+     *
      * @param reader
-     *        the new reader
+     *            the new reader
      * @throws Exception
      */
-    public void reset(IRecordReader<?> reader) throws Exception;
+    public void reset(IRecordReader<?> reader) throws IOException;
 
     /**
      * This method is called by the dataflow controller with each tuple. the indexer is expected to append record ids to the tuple.
+     *
      * @param tb
      * @throws Exception
      */
-    public void index(ArrayTupleBuilder tb) throws Exception;
+    public void index(ArrayTupleBuilder tb) throws IOException;
 
     /**
      * This method returns the number of fields in the record id. It is used by tuple appender at the initialization step.
+     *
      * @return
      * @throws Exception
      */
-    public int getNumberOfFields() throws Exception;
+    public int getNumberOfFields() throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IFeedAdapter.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IFeedAdapter.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IFeedAdapter.java
deleted file mode 100644
index 3261556..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IFeedAdapter.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.api;
-
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-public interface IFeedAdapter extends IDataSourceAdapter {
-    /**
-     * Pause the ingestion of data.
-     * @throws HyracksDataException
-     * @throws Exception
-     */
-    public boolean pause() throws HyracksDataException;
-
-    /**
-     * Resume the ingestion of data.
-     * @throws HyracksDataException
-     * @throws Exception
-     */
-    public boolean resume() throws HyracksDataException;
-
-    /**
-     * Discontinue the ingestion of data.
-     * @throws Exception
-     */
-    public boolean stop() throws Exception;
-
-    /**
-     * @param e
-     * @return true if the ingestion should continue post the exception else false
-     * @throws Exception
-     */
-    public boolean handleException(Throwable e);
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IIndexibleExternalDataSource.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IIndexibleExternalDataSource.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IIndexibleExternalDataSource.java
index fe30b38..accd730 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IIndexibleExternalDataSource.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IIndexibleExternalDataSource.java
@@ -23,7 +23,7 @@ import java.util.List;
 import org.apache.asterix.external.indexing.ExternalFile;
 
 public interface IIndexibleExternalDataSource extends IExternalDataSourceFactory {
-    public void setSnapshot(List<ExternalFile> files, boolean indexingOp) throws Exception;
+    public void setSnapshot(List<ExternalFile> files, boolean indexingOp);
 
     /**
      * Specify whether the external data source is configured for indexing

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IIndexingDatasource.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IIndexingDatasource.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IIndexingDatasource.java
index ed5e7b5..c247ef6 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IIndexingDatasource.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IIndexingDatasource.java
@@ -20,6 +20,4 @@ package org.apache.asterix.external.api;
 
 public interface IIndexingDatasource {
     public IExternalIndexer getIndexer();
-
-    public void setIndexer(IExternalIndexer indexer);
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IInputStreamProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IInputStreamProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IInputStreamProvider.java
index 8cc4e27..b10452e 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IInputStreamProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IInputStreamProvider.java
@@ -18,15 +18,12 @@
  */
 package org.apache.asterix.external.api;
 
-import java.util.Map;
-
 import org.apache.asterix.external.input.stream.AInputStream;
 import org.apache.asterix.external.util.FeedLogManager;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public interface IInputStreamProvider {
-    public AInputStream getInputStream() throws Exception;
-
-    public void configure(Map<String, String> configuration);
+    public AInputStream getInputStream() throws HyracksDataException;
 
     public void setFeedLogManager(FeedLogManager feedLogManager);
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IInputStreamProviderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IInputStreamProviderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IInputStreamProviderFactory.java
index 3cc31dc..f52f7d3 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IInputStreamProviderFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IInputStreamProviderFactory.java
@@ -19,8 +19,15 @@
 package org.apache.asterix.external.api;
 
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public interface IInputStreamProviderFactory extends IExternalDataSourceFactory {
 
-    public IInputStreamProvider createInputStreamProvider(IHyracksTaskContext ctx, int partition) throws Exception;
+    public IInputStreamProvider createInputStreamProvider(IHyracksTaskContext ctx, int partition)
+            throws HyracksDataException;
+
+    @Override
+    public default DataSourceType getDataSourceType() {
+        return DataSourceType.STREAM;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordConverter.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordConverter.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordConverter.java
new file mode 100644
index 0000000..0f5ada4
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordConverter.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.api;
+
+import java.io.IOException;
+
+public interface IRecordConverter<I, O> {
+
+    public O convert(IRawRecord<? extends I> input) throws IOException;
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordDataParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordDataParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordDataParser.java
index 3cb8f37..bc97ed0 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordDataParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordDataParser.java
@@ -29,9 +29,4 @@ public interface IRecordDataParser<T> extends IDataParser {
      * @throws Exception
      */
     public void parse(IRawRecord<? extends T> record, DataOutput out) throws IOException;
-
-    /**
-     * @return the record class
-     */
-    public Class<? extends T> getRecordClass();
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordDataParserFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordDataParserFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordDataParserFactory.java
index 993d947..2ddbbcd 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordDataParserFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordDataParserFactory.java
@@ -18,15 +18,17 @@
  */
 package org.apache.asterix.external.api;
 
-import java.io.IOException;
-
-import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public interface IRecordDataParserFactory<T> extends IDataParserFactory {
-    public IRecordDataParser<T> createRecordParser(IHyracksTaskContext ctx)
-            throws HyracksDataException, AsterixException, IOException;
+    public IRecordDataParser<T> createRecordParser(IHyracksTaskContext ctx) throws HyracksDataException;
+
+    public Class<?> getRecordClass();
 
-    public Class<? extends T> getRecordClass();
+    @Override
+    public default DataSourceType getDataSourceType() {
+        return DataSourceType.RECORDS;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordReader.java
index 769db19..b4d67d4 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordReader.java
@@ -20,7 +20,6 @@ package org.apache.asterix.external.api;
 
 import java.io.Closeable;
 import java.io.IOException;
-import java.util.Map;
 
 import org.apache.asterix.external.util.FeedLogManager;
 
@@ -31,15 +30,6 @@ import org.apache.asterix.external.util.FeedLogManager;
 public interface IRecordReader<T> extends Closeable {
 
     /**
-     * Configure the reader with the set of key/value pairs passed by the compiler
-     * @param configuration
-     *        the set of key/value pairs
-     * @throws Exception
-     *         when the reader can't be configured (i,e. due to incorrect configuration, unreachable source, etc.)
-     */
-    public void configure(Map<String, String> configuration) throws Exception;
-
-    /**
      * @return true if the reader has more records remaining, false, otherwise.
      * @throws Exception
      *         if an error takes place

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordReaderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordReaderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordReaderFactory.java
index fdc54d6..c6adbc4 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordReaderFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordReaderFactory.java
@@ -19,10 +19,17 @@
 package org.apache.asterix.external.api;
 
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public interface IRecordReaderFactory<T> extends IExternalDataSourceFactory {
 
-    public IRecordReader<? extends T> createRecordReader(IHyracksTaskContext ctx, int partition) throws Exception;
+    public IRecordReader<? extends T> createRecordReader(IHyracksTaskContext ctx, int partition)
+            throws HyracksDataException;
 
-    public Class<? extends T> getRecordClass();
+    public Class<?> getRecordClass();
+
+    @Override
+    public default DataSourceType getDataSourceType() {
+        return DataSourceType.RECORDS;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordWithMetaDataAndPKParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordWithMetaDataAndPKParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordWithMetaDataAndPKParser.java
new file mode 100644
index 0000000..23c5bdd
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordWithMetaDataAndPKParser.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.api;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.asterix.external.input.record.RecordWithMetadataAndPK;
+import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+
+public interface IRecordWithMetaDataAndPKParser<T> extends IRecordDataParser<RecordWithMetadataAndPK<T>> {
+
+    public void parseMeta(RecordWithMetadataAndPK<? extends T> record, DataOutput out) throws IOException;
+
+    public void appendKeys(RecordWithMetadataAndPK<T> record, ArrayTupleBuilder tb) throws IOException;
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordWithMetaDataParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordWithMetaDataParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordWithMetaDataParser.java
new file mode 100644
index 0000000..4b97e8d
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordWithMetaDataParser.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.api;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+public interface IRecordWithMetaDataParser<T> extends IRecordDataParser<T> {
+    public void parseMeta(DataOutput out) throws IOException;
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordWithPKDataParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordWithPKDataParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordWithPKDataParser.java
new file mode 100644
index 0000000..e6c114d
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordWithPKDataParser.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.api;
+
+import java.io.IOException;
+
+import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+
+public interface IRecordWithPKDataParser<T> extends IRecordDataParser<T> {
+
+    public void appendKeys(ArrayTupleBuilder tb, IRawRecord<? extends T> record) throws IOException;
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamDataParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamDataParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamDataParser.java
index f596efa..ca274e8 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamDataParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamDataParser.java
@@ -25,6 +25,8 @@ import java.io.InputStream;
 public interface IStreamDataParser extends IDataParser {
     /**
      * Sets the inputStream for the parser. called only for parsers that support InputStreams
+     *
+     * @throws IOException
      */
     public void setInputStream(InputStream in) throws IOException;
 
@@ -34,6 +36,7 @@ public interface IStreamDataParser extends IDataParser {
      *
      * @param out
      *            DataOutput instance that for writing the parser output.
+     * @throws IOException
      */
     public boolean parse(DataOutput out) throws IOException;
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamDataParserFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamDataParserFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamDataParserFactory.java
index 828f71e..ad9acc6 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamDataParserFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamDataParserFactory.java
@@ -18,12 +18,11 @@
  */
 package org.apache.asterix.external.api;
 
-import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public interface IStreamDataParserFactory extends IDataParserFactory {
 
     public IStreamDataParser createInputStreamParser(IHyracksTaskContext ctx, int partition)
-            throws HyracksDataException, AsterixException;
+            throws HyracksDataException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamFlowController.java
deleted file mode 100644
index d368c48..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamFlowController.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.api;
-
-public interface IStreamFlowController extends IDataFlowController {
-    public void setStreamParser(IStreamDataParser dataParser);
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/api/ITupleForwarder.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/ITupleForwarder.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/ITupleForwarder.java
index c0add02..22d0d6b 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/ITupleForwarder.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/ITupleForwarder.java
@@ -18,8 +18,6 @@
  */
 package org.apache.asterix.external.api;
 
-import java.util.Map;
-
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -36,8 +34,6 @@ public interface ITupleForwarder {
         FEED
     }
 
-    public void configure(Map<String, String> configuration) throws HyracksDataException;
-
     public void initialize(IHyracksTaskContext ctx, IFrameWriter frameWriter) throws HyracksDataException;
 
     public void addTuple(ArrayTupleBuilder tb) throws HyracksDataException;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractDataFlowController.java
index a5aaac4..bbd93c2 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractDataFlowController.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractDataFlowController.java
@@ -18,37 +18,17 @@
  */
 package org.apache.asterix.external.dataflow;
 
-import java.util.Map;
-
-import org.apache.asterix.external.api.ITupleForwarder;
 import org.apache.asterix.external.api.IDataFlowController;
-import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.asterix.external.api.ITupleForwarder;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 public abstract class AbstractDataFlowController implements IDataFlowController {
 
-    protected ITupleForwarder tupleForwarder;
-    protected IHyracksTaskContext ctx;
-    protected Map<String, String> configuration;
+    protected final ITupleForwarder tupleForwarder;
+    protected final IHyracksTaskContext ctx;
 
-    @Override
-    public ITupleForwarder getTupleForwarder() {
-        return tupleForwarder;
-    }
-
-    @Override
-    public void setTupleForwarder(ITupleForwarder tupleForwarder) {
-        this.tupleForwarder = tupleForwarder;
-    }
-
-    protected void initializeTupleForwarder(IFrameWriter writer) throws HyracksDataException {
-        tupleForwarder.initialize(ctx, writer);
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration, IHyracksTaskContext ctx) {
-        this.configuration = configuration;
+    public AbstractDataFlowController(IHyracksTaskContext ctx, ITupleForwarder tupleForwarder) {
         this.ctx = ctx;
+        this.tupleForwarder = tupleForwarder;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractFeedDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractFeedDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractFeedDataFlowController.java
index 0c58ee3..cf4ed19 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractFeedDataFlowController.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractFeedDataFlowController.java
@@ -18,50 +18,28 @@
  */
 package org.apache.asterix.external.dataflow;
 
-import java.util.Map;
-
-
 import javax.annotation.Nonnull;
 
 import org.apache.asterix.external.api.IDataFlowController;
-import org.apache.asterix.external.api.ITupleForwarder;
 import org.apache.asterix.external.util.FeedLogManager;
-import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 
 public abstract class AbstractFeedDataFlowController implements IDataFlowController {
-    protected FeedTupleForwarder tupleForwarder;
-    protected IHyracksTaskContext ctx;
-    protected Map<String, String> configuration;
-    protected static final int NUMBER_OF_TUPLE_FIELDS = 1;
-    protected ArrayTupleBuilder tb = new ArrayTupleBuilder(NUMBER_OF_TUPLE_FIELDS);
-    protected FeedLogManager feedLogManager;
+    protected final FeedTupleForwarder tupleForwarder;
+    protected final IHyracksTaskContext ctx;
+    protected final int numOfFields;
+    protected final ArrayTupleBuilder tb;
+    protected final FeedLogManager feedLogManager;
 
-    public AbstractFeedDataFlowController(@Nonnull FeedLogManager feedLogManager) {
+    public AbstractFeedDataFlowController(IHyracksTaskContext ctx, FeedTupleForwarder tupleForwarder,
+            @Nonnull FeedLogManager feedLogManager, int numOfFields) {
         this.feedLogManager = feedLogManager;
-    }
-
-    @Override
-    public ITupleForwarder getTupleForwarder() {
-        return tupleForwarder;
-    }
-
-    @Override
-    public void setTupleForwarder(ITupleForwarder tupleForwarder) {
-        this.tupleForwarder = (FeedTupleForwarder) tupleForwarder;
-    }
-
-    protected void initializeTupleForwarder(IFrameWriter writer) throws HyracksDataException {
-        tupleForwarder.configure(configuration);
-        tupleForwarder.initialize(ctx, writer);
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration, IHyracksTaskContext ctx) {
-        this.configuration = configuration;
+        this.numOfFields = numOfFields;
         this.ctx = ctx;
+        this.tupleForwarder = tupleForwarder;
+        this.tb = new ArrayTupleBuilder(numOfFields);
     }
 
     @Override
@@ -76,7 +54,13 @@ public abstract class AbstractFeedDataFlowController implements IDataFlowControl
         return true;
     }
 
+    @Override
     public void flush() throws HyracksDataException {
         tupleForwarder.flush();
     }
+
+    @Override
+    public abstract boolean stop() throws HyracksDataException;
+
+    public abstract boolean handleException(Throwable th);
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedDataFlowController.java
new file mode 100644
index 0000000..8ec422f
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedDataFlowController.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.dataflow;
+
+import java.io.IOException;
+
+import org.apache.asterix.external.api.IRawRecord;
+import org.apache.asterix.external.api.IRecordReader;
+import org.apache.asterix.external.api.IRecordWithPKDataParser;
+import org.apache.asterix.external.util.FeedLogManager;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+
+public class ChangeFeedDataFlowController<T> extends FeedRecordDataFlowController<T> {
+
+    private final IRecordWithPKDataParser<T> dataParser;
+
+    public ChangeFeedDataFlowController(final IHyracksTaskContext ctx, final FeedTupleForwarder tupleForwarder,
+            final FeedLogManager feedLogManager, final int numOfOutputFields,
+            final IRecordWithPKDataParser<T> dataParser, final IRecordReader<T> recordReader) {
+        super(ctx, tupleForwarder, feedLogManager, numOfOutputFields, dataParser, recordReader);
+        this.dataParser = dataParser;
+    }
+
+    @Override
+    protected void addPrimaryKeys(final ArrayTupleBuilder tb, final IRawRecord<? extends T> record) throws IOException {
+        dataParser.appendKeys(tb, record);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedWithMetaDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedWithMetaDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedWithMetaDataFlowController.java
new file mode 100644
index 0000000..370eec0
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedWithMetaDataFlowController.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.dataflow;
+
+import java.io.IOException;
+
+import org.apache.asterix.external.api.IRawRecord;
+import org.apache.asterix.external.api.IRecordReader;
+import org.apache.asterix.external.parser.RecordWithMetadataParser;
+import org.apache.asterix.external.util.FeedLogManager;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+
+public class ChangeFeedWithMetaDataFlowController<T, O> extends FeedWithMetaDataFlowController<T, O> {
+
+    public ChangeFeedWithMetaDataFlowController(final IHyracksTaskContext ctx, final FeedTupleForwarder tupleForwarder,
+            final FeedLogManager feedLogManager, final int numOfOutputFields,
+            final RecordWithMetadataParser<T, O> dataParser, final IRecordReader<T> recordReader) {
+        super(ctx, tupleForwarder, feedLogManager, numOfOutputFields, dataParser, recordReader);
+    }
+
+    @Override
+    protected void addPrimaryKeys(final ArrayTupleBuilder tb, final IRawRecord<? extends T> record) throws IOException {
+        dataParser.appendPK(tb);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/CounterTimerTupleForwarder.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/CounterTimerTupleForwarder.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/CounterTimerTupleForwarder.java
index 93f866c..db95a6a 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/CounterTimerTupleForwarder.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/CounterTimerTupleForwarder.java
@@ -52,20 +52,27 @@ public class CounterTimerTupleForwarder implements ITupleForwarder {
     private Object lock = new Object();
     private boolean activeTimer = false;
 
-    @Override
-    public void configure(Map<String, String> configuration) {
+    private CounterTimerTupleForwarder(int batchSize, long batchInterval) {
+        this.batchSize = batchSize;
+        this.batchInterval = batchInterval;
+        if (batchInterval > 0L) {
+            activeTimer = true;
+        }
+    }
+
+    // Factory method
+    public static CounterTimerTupleForwarder create(Map<String, String> configuration) {
+        int batchSize = -1;
+        long batchInterval = 0L;
         String propValue = configuration.get(BATCH_SIZE);
         if (propValue != null) {
             batchSize = Integer.parseInt(propValue);
-        } else {
-            batchSize = -1;
         }
-
         propValue = configuration.get(BATCH_INTERVAL);
         if (propValue != null) {
             batchInterval = Long.parseLong(propValue);
-            activeTimer = true;
         }
+        return new CounterTimerTupleForwarder(batchSize, batchInterval);
     }
 
     @Override
@@ -152,6 +159,5 @@ public class CounterTimerTupleForwarder implements ITupleForwarder {
                 e.printStackTrace();
             }
         }
-
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java
index 3408af9..2cc3c66 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java
@@ -18,6 +18,7 @@
  */
 package org.apache.asterix.external.dataflow;
 
+import java.io.IOException;
 import java.util.concurrent.atomic.AtomicBoolean;
 
 import javax.annotation.Nonnull;
@@ -29,7 +30,9 @@ import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.external.util.ExternalDataExceptionUtils;
 import org.apache.asterix.external.util.FeedLogManager;
 import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import org.apache.log4j.Logger;
 
 public class FeedRecordDataFlowController<T> extends AbstractFeedDataFlowController {
@@ -37,12 +40,13 @@ public class FeedRecordDataFlowController<T> extends AbstractFeedDataFlowControl
     protected final IRecordDataParser<T> dataParser;
     protected final IRecordReader<? extends T> recordReader;
     protected final AtomicBoolean closed = new AtomicBoolean(false);
-    protected long interval;
+    protected final long interval = 1000;
     protected boolean failed = false;
 
-    public FeedRecordDataFlowController(@Nonnull FeedLogManager feedLogManager,
-            @Nonnull IRecordDataParser<T> dataParser, @Nonnull IRecordReader<T> recordReader) {
-        super(feedLogManager);
+    public FeedRecordDataFlowController(IHyracksTaskContext ctx, FeedTupleForwarder tupleForwarder,
+            @Nonnull FeedLogManager feedLogManager, int numOfOutputFields, @Nonnull IRecordDataParser<T> dataParser,
+            @Nonnull IRecordReader<T> recordReader) {
+        super(ctx, tupleForwarder, feedLogManager, numOfOutputFields);
         this.dataParser = dataParser;
         this.recordReader = recordReader;
         recordReader.setFeedLogManager(feedLogManager);
@@ -54,7 +58,7 @@ public class FeedRecordDataFlowController<T> extends AbstractFeedDataFlowControl
         HyracksDataException hde = null;
         try {
             failed = false;
-            initializeTupleForwarder(writer);
+            tupleForwarder.initialize(ctx, writer);
             while (recordReader.hasNext()) {
                 IRawRecord<? extends T> record = recordReader.next();
                 if (record == null) {
@@ -65,6 +69,8 @@ public class FeedRecordDataFlowController<T> extends AbstractFeedDataFlowControl
                 tb.reset();
                 dataParser.parse(record, tb.getDataOutput());
                 tb.addFieldEndOffset();
+                addMetaPart(tb, record);
+                addPrimaryKeys(tb, record);
                 if (tb.getSize() > tupleForwarder.getMaxRecordSize()) {
                     // log
                     feedLogManager.logRecord(record.toString(), ExternalDataConstants.LARGE_RECORD_ERROR_MESSAGE);
@@ -96,6 +102,12 @@ public class FeedRecordDataFlowController<T> extends AbstractFeedDataFlowControl
         }
     }
 
+    protected void addMetaPart(ArrayTupleBuilder tb, IRawRecord<? extends T> record) throws IOException {
+    }
+
+    protected void addPrimaryKeys(ArrayTupleBuilder tb, IRawRecord<? extends T> record) throws IOException {
+    }
+
     private void closeSignal() {
         synchronized (closed) {
             closed.set(true);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedStreamDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedStreamDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedStreamDataFlowController.java
index 580e350..f233971 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedStreamDataFlowController.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedStreamDataFlowController.java
@@ -19,25 +19,28 @@
 package org.apache.asterix.external.dataflow;
 
 import org.apache.asterix.external.api.IStreamDataParser;
-import org.apache.asterix.external.api.IStreamFlowController;
 import org.apache.asterix.external.input.stream.AInputStream;
 import org.apache.asterix.external.util.FeedLogManager;
 import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
-public class FeedStreamDataFlowController extends AbstractFeedDataFlowController implements IStreamFlowController {
+public class FeedStreamDataFlowController extends AbstractFeedDataFlowController {
 
-    private IStreamDataParser dataParser;
-    private AInputStream stream;
+    private final IStreamDataParser dataParser;
+    private final AInputStream stream;
 
-    public FeedStreamDataFlowController(FeedLogManager feedLogManager) {
-        super(feedLogManager);
+    public FeedStreamDataFlowController(IHyracksTaskContext ctx, FeedTupleForwarder tupleForwarder,
+            FeedLogManager feedLogManager, int numOfFields, IStreamDataParser streamParser, AInputStream inputStream) {
+        super(ctx, tupleForwarder, feedLogManager, numOfFields);
+        this.dataParser = streamParser;
+        this.stream = inputStream;
     }
 
     @Override
     public void start(IFrameWriter writer) throws HyracksDataException {
         try {
-            initializeTupleForwarder(writer);
+            tupleForwarder.initialize(ctx, writer);
             while (true) {
                 tb.reset();
                 if (!dataParser.parse(tb.getDataOutput())) {
@@ -80,13 +83,4 @@ public class FeedStreamDataFlowController extends AbstractFeedDataFlowController
         }
         return handled;
     }
-
-    @Override
-    public void setStreamParser(IStreamDataParser dataParser) {
-        this.dataParser = dataParser;
-    }
-
-    public void setStream(AInputStream stream) {
-        this.stream = stream;
-    }
 }



[05/19] incubator-asterixdb git commit: Support Change Feeds and Ingestion of Records with MetaData

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/test/resources/beer.csv
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/resources/beer.csv b/asterix-external-data/src/test/resources/beer.csv
new file mode 100644
index 0000000..cc41549
--- /dev/null
+++ b/asterix-external-data/src/test/resources/beer.csv
@@ -0,0 +1,7308 @@
+id,flags,expiration,cas,value,rev,vbid,dtype
+alameda_brewhouse,0,0,244368670720,"{""name"":""Alameda Brewhouse"",""city"":""Portland"",""state"":""Oregon"",""code"":""97213"",""country"":""United States"",""phone"":""1-503-460-9025"",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""4765 NE Fremont""],""geo"":{""accuracy"":""ROOFTOP"",""lat"":45.5484,""lon"":-122.619}}",1,30,1
+abbey_wright_brewing_valley_inn,0,0,244371881984,"{""name"":""Abbey Wright Brewing/Valley Inn"",""city"":""Williamsport"",""state"":""Pennsylvania"",""code"":""17702"",""country"":""United States"",""phone"":""570.326.3383"",""website"":""http://www.valleyinnonline.com/"",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""204 Valley Street""],""geo"":{""accuracy"":""RANGE_INTERPOLATED"",""lat"":41.2225,""lon"":-77.0369}}",1,36,1
+allguer_brauhaus_ag_kempten,0,0,244377518080,"{""name"":""Allguer Brauhaus AG Kempten"",""city"":""Kempten"",""state"":""Bayern"",""code"":"""",""country"":""Germany"",""phone"":""49-(0)831-/-2050-0"",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""Beethovenstrae 7""],""geo"":{""accuracy"":""ROOFTOP"",""lat"":47.7487,""lon"":10.5694}}",1,51,1
+ali_i_brewing,0,0,244368670721,"{""name"":""Ali`i Brewing"",""city"":""Honolulu"",""state"":""Hawaii"",""code"":"""",""country"":""United States"",""phone"":"""",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[],""geo"":{""accuracy"":""APPROXIMATE"",""lat"":21.3069,""lon"":-157.858}}",1,30,1
+asheville_pizza_and_brewing_co,0,0,244371947520,"{""name"":""Asheville Pizza and Brewing Co."",""city"":""Asheville"",""state"":""North Carolina"",""code"":""28804"",""country"":""United States"",""phone"":""(828) 254-1281"",""website"":""http://www.ashevillepizza.com/"",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""Asheville Brewpub, Brewery, Movie Theater and Pizzeria all rolled into 2 great places to meet and eat. Award winning beer and award winning pizza."",""address"":[""675 Merrimon Avenue""],""geo"":{""accuracy"":""ROOFTOP"",""lat"":35.6221,""lon"":-82.5536}}",1,36,1
+amstel_brouwerij-amstel_light,0,0,244377583616,"{""name"":""Amstel Light"",""abv"":3.5,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""amstel_brouwerij"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Light Lager"",""category"":""North American Lager""}",1,51,1
+allagash_brewing-allagash_fluxus_09,0,0,244368736256,"{""name"":""Allagash Fluxus 09"",""abv"":8.3,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""allagash_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""French & Belgian-Style Saison"",""category"":""Belgian and French Ale""}",1,30,1
+atwater_block_brewing-dirty_blond,0,0,244372013056,"{""name"":""Dirty Blond"",""abv"":4.5,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""atwater_block_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":""Made with unmalted wheat, coriander and orange peel to help you live smart and enjoy everyday!"",""style"":""Light American Wheat Ale or Lager"",""category"":""Other Style""}",1,36,1
+andechser_klosterbrauerei,0,0,244377649152,"{""name"":""Andechser Klosterbrauerei"",""city"":""Andechs"",""state"":""Bayern"",""code"":"""",""country"":""Germany"",""phone"":""49-(0)8152-/-376-0"",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""Bergstrae 2""],""geo"":{""accuracy"":""ROOFTOP"",""lat"":47.9775,""lon"":11.185}}",1,51,1
+allentown_brew_works,0,0,244368736257,"{""name"":""Allentown Brew Works"",""city"":""Allentown"",""state"":""Pennsylvania"",""code"":""18101"",""country"":""United States"",""phone"":""610.433.7777"",""website"":""http://www.thebrewworks.com/allentown-brewworks/"",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""The Allentown Brew Works is housed inside the historic Harold's Furniture Building at 812 W Hamilton Street. Years in the making, the new Allentown restaurant and brewery are part of the continuing vision of the Fegley family that established the original Bethlehem Brew Works in 1998. Proud to be part of the resurging downtown, the 400 seat restaurant is a testament to the vibrant heartbeat that thrives in this Queen City. Offering two and a half levels of dining, plus state of the art banquet facilities in The Hamilton Room, and multiple bars offering different atmospheres, as well as an outdoor casual Biergarten and a new seductive lower level lou
 nge � Silk."",""address"":[""814 W Hamilton St""],""geo"":{""accuracy"":""RANGE_INTERPOLATED"",""lat"":40.6016,""lon"":-75.474}}",1,30,1
+atwater_block_brewing-dunkel,0,0,244372013057,"{""name"":""Dunkel"",""abv"":5.2,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""atwater_block_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":""Our malty, sweet dark lager is a hometown favorite. Our Dunkel is packed with subtle roasted malt flavors without the excessive bitterness and heaviness of many dark beers and has a balanced hop finish.\r\n\r\nGABF Gold Winner"",""style"":""American-Style Lager"",""category"":""North American Lager""}",1,36,1
+anheuser_busch-budweiser,0,0,244377649153,"{""name"":""Budweiser"",""abv"":5.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""anheuser_busch"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Light Lager"",""category"":""North American Lager""}",1,51,1
+allguer_brauhaus_ag_kempten-bayrisch_hell,0,0,244368801792,"{""name"":""Bayrisch Hell"",""abv"":4.7,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""allguer_brauhaus_ag_kempten"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,30,1
+atwater_block_brewing-pilsner,0,0,244372078592,"{""name"":""Pilsner"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""atwater_block_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,36,1
+arcadia_brewing-india_pale_ale,0,0,244377649154,"{""name"":""India Pale Ale"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""arcadia_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Pale Ale"",""category"":""North American Ale""}",1,51,1
+american_river_brewing,0,0,244368801793,"{""name"":""American River Brewing"",""city"":""Auburn"",""state"":""California"",""code"":"""",""country"":""United States"",""phone"":"""",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[],""geo"":{""accuracy"":""APPROXIMATE"",""lat"":38.8966,""lon"":-121.077}}",1,30,1
+atwater_block_brewing-x_line,0,0,244372078593,"{""name"":""X-Line"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""atwater_block_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,36,1
+august_schell_brewing-caramel_bock,0,0,244377714688,"{""name"":""Caramel Bock"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""august_schell_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,51,1
+anderson_valley_brewing-summer_solstice_cerveza_crema,0,0,244368867328,"{""name"":""Summer Solstice Cerveza Crema"",""abv"":5.6,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""anderson_valley_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":""\""This copper colored ale is smooth, malty, and lightly sweet, with a delicate hint of spice for that oh-so-drinkable, extra velvety flavor.  The character is lighter in body than its cousin our wildly popular Winter Solstice Seasonal Ale.  This is a silky, creamy dream, perfect as a warm weather beer.\"""",""style"":""American-Style Cream Ale or Lager"",""category"":""Other Style""}",1,30,1
+avery_brewing_company-czar_imperial_stout,0,0,244372078594,"{""name"":""Czar Imperial Stout"",""abv"":11.9,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""avery_brewing_company"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Stout"",""category"":""North American Ale""}",1,36,1
+bbc_brewing_co_llc-dark_star,0,0,244377714689,"{""name"":""Dark Star"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""bbc_brewing_co_llc"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""Porter"",""category"":""Irish Ale""}",1,51,1
+augusta_brewing-tannhauser,0,0,244368867329,"{""name"":""Tannhauser"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""augusta_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Pale Ale"",""category"":""North American Ale""}",1,30,1
+bear_republic_brewery-apex,0,0,244372078595,"{""name"":""Apex"",""abv"":7.5,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""bear_republic_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""Crafted with a blend of American and English malts and aggressively hopped with Pacific Northwest hops, this beer reflects what our brewers believe to be the Apex of IPA."",""style"":""Imperial or Double India Pale Ale"",""category"":""North American Ale""}",1,36,1
+big_time_brewing-trombipulator,0,0,244377714690,"{""name"":""Trombipulator"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""big_time_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,51,1
+augustiner_brau_munchen-weissbier,0,0,244368932864,"{""name"":""Weißbier"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""augustiner_brau_munchen"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""Light American Wheat Ale or Lager"",""category"":""Other Style""}",1,30,1
+bierbrouwerij_st_christoffel-robertus,0,0,244372078596,"{""name"":""Robertus"",""abv"":6.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""bierbrouwerij_st_christoffel"",""updated"":""2010-07-22 20:00:20"",""description"":""Christoffel Robertus is a low-fermenting ruby-red beer, brewed in the Münchener-style. It is a malty, fresh beer with a light sweetness. The typical hop bitterness found in Blond, is very lightly present in Robertus. The use of an extensive amount of selected barley gives Robertus the special malty taste and aroma."",""style"":""American-Style Lager"",""category"":""North American Lager""}",1,36,1
+boston_beer_company-samuel_adams_irish_red,0,0,244377780224,"{""name"":""Samuel Adams Irish Red"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""boston_beer_company"",""updated"":""2010-07-22 20:00:20"",""description"":""Malty and slightly sweet, balanced by earthy notes from the hops.  The gentle rain and fertile soil of Ireland helped inspire this style of ale, known for being remarkably balanced. Pale and Caramel malts give the beer its rich, deep red color and distinctive caramel flavor. The sweetness of the malt is pleasantly balanced by a pronounced hop bitterness and an earthy note from the East Kent Goldings hops. Samuel Adams® Irish Red finishes smooth and leaves you wanting to take another sip."",""style"":""Irish-Style Red Ale"",""category"":""Irish Ale""}",1,51,1
+baron_brewing_company-baron_pilsner,0,0,244368932865,"{""name"":""Baron Pilsner"",""abv"":4.7,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""baron_brewing_company"",""updated"":""2010-07-22 20:00:20"",""description"":""Our pilsner is a traditional Northern German Style Pilsner. It has a fantastic malty aroma with a slight spice from the hops. The head is brilliant white and floats on the clean pale lager. The sparkling mouthfeel gives way to a soft malt sweetness that is followed by a long, dry, crisp finish. The balanced clean finish taunts the mouth to take another drink. Lagered for a minimum of 8-12 weeks to ensure smoothness and drinkability.\r\n\r\nAll ingredients for the beer are imported from Germany. Brewed in accordance to the German Beer Purity Law (Reinheitsgebot) of 1516."",""style"":""German-Style Pilsener"",""category"":""German Lager""}",1,30,1
+big_buck_brewery_and_steakhouse_2-stout,0,0,244372144128,"{""name"":""Stout"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""big_buck_brewery_and_steakhouse_2"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Stout"",""category"":""North American Ale""}",1,36,1
+brasserie_de_saint_sylvestre-3_monts,0,0,244377780225,"{""name"":""3 Monts"",""abv"":8.5,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brasserie_de_saint_sylvestre"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,51,1
+bavaria-club_colombia,0,0,244368932866,"{""name"":""Club Colombia"",""abv"":4.7,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""bavaria"",""updated"":""2010-07-22 20:00:20"",""description"":""La mejor cerveza de Colombia.""}",1,30,1
+bootleggers_steakhouse_and_brewery-34th_street_porter,0,0,244372144129,"{""name"":""34th Street Porter"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""bootleggers_steakhouse_and_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""Porter"",""category"":""Irish Ale""}",1,36,1
+brasserie_des_gants,0,0,244377780226,"{""name"":""Brasserie des Gants"",""city"":""Irchonwelz"",""state"":""Hainaut"",""code"":"""",""country"":""Belgium"",""phone"":""32-068-28-79-36"",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""rue du Castel, 19""],""geo"":{""accuracy"":""GEOMETRIC_CENTER"",""lat"":50.6204,""lon"":3.7592}}",1,51,1
+bear_republic_brewery-big_bear_black_stout,0,0,244368998400,"{""name"":""Big Bear Black Stout"",""abv"":8.1,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""bear_republic_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""Big Bear, as the name implies, is a hefty, black, Russian Imperial-style stout. This bold stout boasts a rich, caramel sweetness lavished by a robust, deep-roasted heartiness you can sink your teeth into. ...Big Bear's bold flavors are produced using a blend of Belgian and English roasted barley and crystal malts. Some unique flavors come forth in the malt character. ...Louisiana sweet molasses and dark brown sugar. This dark brew is well hopped with Chinook and Cascade hops, which are somewhat, masked by the malt. This is a balanced bold brew boasting an A.V.B. of 8.1% that can creep up on you, \""so don't get mauled\"". It has a dry roasted quality that masks its' high alchohol content, so drink responsibly. 2004 California State 
 Fair, Silver Medal Winner; 2002 World Beer Cup, Gold Medal Winner; \r\n2002 Annual Bistro Beer Festival, Hayward, Gold Medal Winner; 2001 North American Brewers' Award, Honorable Mention - og 1.076, ABV 8.1%, IBU 68."",""style"":""American-Style Imperial Stout"",""category"":""North American Ale""}",1,30,1
+bootleggers_steakhouse_and_brewery,0,0,244372144130,"{""name"":""Bootleggers Steakhouse and Brewery"",""city"":""Bakersfield"",""state"":""California"",""code"":"""",""country"":""United States"",""phone"":"""",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[],""geo"":{""accuracy"":""APPROXIMATE"",""lat"":35.3733,""lon"":-119.019}}",1,36,1
+brasserie_du_benin-ngoma_awooyo_special,0,0,244377780227,"{""name"":""Ngoma Awooyo Special"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brasserie_du_benin"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,51,1
+berliner_kindl_brauerei_ag,0,0,244368998401,"{""name"":""Berliner Kindl Brauerei AG"",""city"":""Berlin"",""state"":""Berlin"",""code"":"""",""country"":""Germany"",""phone"":""49-(0)30-/-68992-0"",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""Werbellinstrasse 50""],""geo"":{""accuracy"":""ROOFTOP"",""lat"":52.4793,""lon"":13.4293}}",1,30,1
+boston_beer_company-samuel_adams_scotch_ale,0,0,244372144131,"{""name"":""Samuel Adams Scotch Ale"",""abv"":5.4,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""boston_beer_company"",""updated"":""2010-07-22 20:00:20"",""description"":""This is a brew for adventurous beer drinkers. It is brewed with four malts: two row pale Harrington, Munich malt, chocolate malt, and a rare peat smoked malt commonly used by distillers of Scotch malt whiskey. This unique malt gives Samuel Adams® Scotch Ale its distinct, subtle smoky character and deep amber hue. Samuel Adams® Scotch Ale is brewed using traditional English hops, Goldings and Fuggles. This is a big brew dominated by malt flavors and aromas, rich and full bodied, slightly sweet. Its layered malt complexity lingers to a smooth and silky finish."",""style"":""Scotch Ale"",""category"":""British Ale""}",1,36,1
+brauerei_schwelm-hefe_weizen,0,0,244495941632,"{""name"":""Hefe-Weizen"",""abv"":5.2,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brauerei_schwelm"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""South German-Style Hefeweizen"",""category"":""German Ale""}",1,51,1
+21st_amendment_brewery_cafe-amendment_pale_ale,0,0,244375420928,"{""name"":""Amendment Pale Ale"",""abv"":5.2,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""21st_amendment_brewery_cafe"",""updated"":""2010-07-22 20:00:20"",""description"":""Rich golden hue color. Floral hop with sweet malt aroma. Medium mouth feel with malt sweetness, hop quenching flavor and well-balanced bitterness."",""style"":""American-Style Pale Ale"",""category"":""North American Ale""}",1,45,1
+bill_s_tavern_brewhouse-thundermuck_stout,0,0,244369063936,"{""name"":""Thundermuck Stout"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""bill_s_tavern_brewhouse"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Stout"",""category"":""North American Ale""}",1,30,1
+boulder_beer_company-gabf_25th_year_beer,0,0,244372209664,"{""name"":""GABF 25th Year Beer"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""boulder_beer_company"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""Extra Special Bitter"",""category"":""British Ale""}",1,36,1
+brewer_s_art-proletary,0,0,244495941633,"{""name"":""Proletary"",""abv"":5.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brewer_s_art"",""updated"":""2010-07-22 20:00:20"",""description"":""Black, smooth and easy to drink, this is a beer for the people!""}",1,51,1
+alesmith_brewing-old_numbskull_2003,0,0,244375420929,"{""name"":""Old Numbskull 2003"",""abv"":10.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""alesmith_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,45,1
+birrificia_le_baladin-nora,0,0,244369063937,"{""name"":""Nora"",""abv"":7.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""birrificia_le_baladin"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,30,1
+brasserie_des_cimes-yeti,0,0,244372209665,"{""name"":""Yeti"",""abv"":8.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brasserie_des_cimes"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,36,1
+brewmasters_restaurant_and_brewery_south,0,0,244495941634,"{""name"":""Brewmasters Restaurant and Brewery South"",""city"":""Kenosha"",""state"":""Wisconsin"",""code"":"""",""country"":""United States"",""phone"":"""",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[],""geo"":{""accuracy"":""APPROXIMATE"",""lat"":42.5847,""lon"":-87.8212}}",1,51,1
+amherst_brewing_company-boltwood_bock,0,0,244375420930,"{""name"":""Boltwood Bock"",""abv"":5.7,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""amherst_brewing_company"",""updated"":""2010-07-22 20:00:20"",""description"":""Light in color, full bodied and very malty, this lager has a toasted malt flavor. Brewed in February and usually on tap by May with a keg of the previous year's batch."",""style"":""Traditional German-Style Bock"",""category"":""German Lager""}",1,45,1
+bj_s_restaurant_and_brewery-p_m_porter,0,0,244369063938,"{""name"":""P.M. Porter"",""abv"":6.4,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""bj_s_restaurant_and_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""Porter"",""category"":""Irish Ale""}",1,30,1
+brasserie_du_bocq-triple_moine,0,0,244372275200,"{""name"":""Triple Moine"",""abv"":7.2,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brasserie_du_bocq"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,36,1
+brouwerij_abdij_saint_sixtus-trappist_westvleteren_12,0,0,244496007168,"{""name"":""Trappist Westvleteren 12"",""abv"":11.2,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brouwerij_abdij_saint_sixtus"",""updated"":""2010-07-22 20:00:20"",""description"":""This Belgian beer has an everlasting tast.  It has been choosen as the best beer in the world for several years!"",""style"":""Belgian-Style Quadrupel"",""category"":""Belgian and French Ale""}",1,51,1
+anderson_valley_brewing-nitro_stout,0,0,244375420931,"{""name"":""Nitro Stout"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""anderson_valley_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Stout"",""category"":""North American Ale""}",1,45,1
+bj_s_restaurant_and_brewery-piranha_pale_ale,0,0,244369129472,"{""name"":""Piranha Pale Ale"",""abv"":5.7,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""bj_s_restaurant_and_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Pale Ale"",""category"":""North American Ale""}",1,30,1
+brasserie_dupont,0,0,244488732672,"{""name"":""Brasserie Dupont"",""city"":""Tourpes"",""state"":""Hainaut"",""code"":"""",""country"":""Belgium"",""phone"":""32-069-67-10-66"",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""Rue Basse 5""],""geo"":{""accuracy"":""RANGE_INTERPOLATED"",""lat"":50.5718,""lon"":3.6508}}",1,36,1
+brouwerij_bavik_de_brabandere-petrus_aged_pale,0,0,244496007169,"{""name"":""Petrus Aged Pale"",""abv"":7.3,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brouwerij_bavik_de_brabandere"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,51,1
+bell_s_brewery_inc-batch_8000,0,0,244375486464,"{""name"":""Batch 8000"",""abv"":9.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""bell_s_brewery_inc"",""updated"":""2010-07-22 20:00:20"",""description"":""Batch 8,000 is part of our commemorative series celebrating our progress with special brews. Our 8,000th batch is a special recipe to be brewed only once. It is wheat ale spiced with Coriander, Orange Peel, and Paradise Seed. Best consumed fresh."",""style"":""Belgian-Style White"",""category"":""Belgian and French Ale""}",1,45,1
+boulevard_brewing_company-single_wide_i_p_a,0,0,244369129473,"{""name"":""Single-Wide I.P.A."",""abv"":5.7,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""boulevard_brewing_company"",""updated"":""2010-07-22 20:00:20"",""description"":""The latest addition to the Boulevard family of year-around beers, Single-Wide I.P.A. is our take on a style that originated in 18th century Great Britain. This American version -- inspired by our Smokestack Series Double-Wide I.P.A. -- boasts a heady combination of six varieties of hops, some of which were employed for dry-hopping."",""style"":""American-Style India Pale Ale"",""category"":""North American Ale""}",1,30,1
+brasserie_fantme,0,0,244488798208,"{""name"":""Brasserie Fantme"",""city"":""Soy"",""state"":""Luxembourg"",""code"":"""",""country"":""Belgium"",""phone"":""32-(0)86-47-70-44"",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""Rue Pral 8""],""geo"":{""accuracy"":""RANGE_INTERPOLATED"",""lat"":50.286,""lon"":5.5127}}",1,36,1
+brouwerij_de_gouden_boom-blanche_de_bruges,0,0,244496007170,"{""name"":""Blanche de Bruges"",""abv"":5.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brouwerij_de_gouden_boom"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,51,1
+boston_beer_company-samuel_adams_honey_porter,0,0,244375486465,"{""name"":""Samuel Adams Honey Porter"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""boston_beer_company"",""updated"":""2010-07-22 20:00:20"",""description"":""A dark, full flavored English porter with Scottish heather honey.  Samuel Adams® Honey Porter is a full-flavored, full-bodied English porter with a substantial roasted malt character, offering a smooth, rounded finish. This beer is brewed with traditional English Ale hops and is dry-hopped with East Kent Goldings, known for their spicy aroma and distinctive, earthy flavor. We brew Honey Porter with Scottish heather honey which balances the spiciness of the hops.\r\n\r\nThis brew is the perfect complement to glazed ham, spicy chili, and roasted vegetables like beets and carrots, which bring out the herbal notes found in the hops and the sweetness of the honey. Samuel Adams® Honey Porter also pairs well with rich desserts such
  as baklava and molasses cookies."",""style"":""Porter"",""category"":""Irish Ale""}",1,45,1
+brasserie_de_blaugies-biere_darbyste,0,0,244369129474,"{""name"":""Bière Darbyste"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brasserie_de_blaugies"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""Light American Wheat Ale or Lager"",""category"":""Other Style""}",1,30,1
+brasserie_la_caracole,0,0,244488798209,"{""name"":""Brasserie La Caracole"",""city"":""Falmignoul"",""state"":""Namur"",""code"":"""",""country"":""Belgium"",""phone"":""32-082-74-40-80"",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""Cte Marie-Thrse 86""],""geo"":{""accuracy"":""RANGE_INTERPOLATED"",""lat"":50.2024,""lon"":4.8914}}",1,36,1
+brouwerij_nacional_balashi,0,0,244496072704,"{""name"":""Brouwerij Nacional Balashi"",""city"":"""",""state"":"""",""code"":"""",""country"":""Aruba"",""phone"":""297 854805"",""website"":""http://www.balashi.com/balashi/"",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[]}",1,51,1
+boulevard_brewing_company-bully_porter,0,0,244375486466,"{""name"":""Bully! Porter"",""abv"":5.4,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""boulevard_brewing_company"",""updated"":""2010-07-22 20:00:20"",""description"":""The intense flavors of dark-roasted malt in Boulevard’s rendition of the classic English porter are perfectly balanced by a generous and complex hop character. Bully! Porter’s robust nature makes it the ideal companion to a variety of foods, from seafood to chocolate."",""style"":""Porter"",""category"":""Irish Ale""}",1,45,1
+brasserie_de_brunehaut-brasserie_de_brunehaut_bio_biere_ambree_organic,0,0,244369195008,"{""name"":""Brasserie de Brunehaut Bio Bière Ambrée (Organic)"",""abv"":6.5,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brasserie_de_brunehaut"",""updated"":""2010-07-22 20:00:20"",""description"":""Amber copper color with a beige head.\r\nCaramel malt aromas reminiscent of vanilla, along with toffee, butterscotch and ripe fruits. Top-fermented and bottle-conditioned, this is a clean, refreshing regional 'artisan' beer.\r\nHazy amber to brown coloured beer, with a fluffy off-white head. Nice aroma of spices, yeast and oak.  The alcohol subtle. Flavour is moderately spicy and slightly fruity, with balanced hops. \r\nThis beer is certified organic."",""style"":""American-Style Amber/Red Ale"",""category"":""North American Ale""}",1,30,1
+brauerei_beck-st_pauli_girl_beer,0,0,244488863744,"{""name"":""St.Pauli Girl Beer"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brauerei_beck"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Lager"",""category"":""North American Lager""}",1,36,1
+brouwerij_sint_jozef,0,0,244496072705,"{""name"":""Brouwerij Sint-Jozef"",""city"":""Opitter"",""state"":""Limburg"",""code"":"""",""country"":""Belgium"",""phone"":""32-089-86-47-11"",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""Itterplein 19""],""geo"":{""accuracy"":""RANGE_INTERPOLATED"",""lat"":51.1168,""lon"":5.6464}}",1,51,1
+brasserie_de_tahiti-hinano,0,0,244375552000,"{""name"":""Hinano"",""abv"":5.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brasserie_de_tahiti"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Lager"",""category"":""North American Lager""}",1,45,1
+brasserie_ellezelloise-la_biere_des_collines_van_de_saisis,0,0,244486373376,"{""name"":""La Bière des Collines van de Saisis"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brasserie_ellezelloise"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,30,1
+brauerei_beck,0,0,244488863745,"{""name"":""Brauerei Beck"",""city"":""Bremen"",""state"":""Bremen"",""code"":"""",""country"":""Germany"",""phone"":""49-(0)421-/-50940"",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""Am Deich 18-19""],""geo"":{""accuracy"":""ROOFTOP"",""lat"":53.0787,""lon"":8.7901}}",1,36,1
+burgerbrau_wolnzach-hell,0,0,244496072706,"{""name"":""Hell"",""abv"":5.1,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""burgerbrau_wolnzach"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,51,1
+brasserie_pietra,0,0,244492533760,"{""name"":""Brasserie Pietra"",""city"":""Furiani"",""state"":"""",""code"":"""",""country"":""France"",""phone"":""33-04.95.30.14.70"",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""Route de la Marana""],""geo"":{""accuracy"":""GEOMETRIC_CENTER"",""lat"":42.6483,""lon"":9.4529}}",1,45,1
+brasserie_grain_d_orge,0,0,244486504448,"{""name"":""Brasserie Grain D'Orge"",""city"":""Ronchin"",""state"":"""",""code"":"""",""country"":""France"",""phone"":"""",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""Once named Brasserie Jeanne D'Arc, this Brewery had changed their name due to change in management."",""address"":[],""geo"":{""accuracy"":""APPROXIMATE"",""lat"":50.6054,""lon"":3.0775}}",1,30,1
+brauerei_gbr_maisel_kg,0,0,244488863746,"{""name"":""Brauerei Gbr. Maisel KG"",""city"":""Bayreuth"",""state"":"""",""code"":""95445"",""country"":""Germany"",""phone"":""+49 (0) 9 21/4 01-0"",""website"":""http://www.maisel.com/"",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""Hindenburgstrasse 9""],""geo"":{""accuracy"":""ROOFTOP"",""lat"":49.9477,""lon"":11.5659}}",1,36,1
+cains-fa,0,0,244496138240,"{""name"":""FA"",""abv"":4.8,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""cains"",""updated"":""2011-05-17 03:15:54"",""description"":""FA is no small beer: despite its deceptively pale golden colour, it boasts a big, smooth flavour and strong punch. Brewed with the finest English malts, and conditioned in cask with dry hops to produce fresh hop aromas and a fuller flavour, delighting the mouth and stimulating the tongue."",""style"":""Special Bitter or Best Bitter"",""category"":""British Ale""}",1,51,1
+brauhaus_johann_albrecht_konstanz-weizen,0,0,244492599296,"{""name"":""Weizen"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brauhaus_johann_albrecht_konstanz"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""South German-Style Hefeweizen"",""category"":""German Ale""}",1,45,1
+breckenridge_brewery-471_extra_esb,0,0,244486504449,"{""name"":""471 Extra ESB"",""abv"":7.8,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""breckenridge_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,30,1
+breckenridge_bbq_of_omaha-raspberry_porter,0,0,244488863747,"{""name"":""Raspberry Porter"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""breckenridge_bbq_of_omaha"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,36,1
+chama_river_brewing-rye_on,0,0,244496138241,"{""name"":""Rye On"",""abv"":4.8,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""chama_river_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Lager"",""category"":""North American Lager""}",1,51,1
+brausttte_der_steirerbrau_aktiengesellschaft,0,0,244492664832,"{""name"":""Brausttte der Steirerbrau Aktiengesellschaft"",""city"":""Graz"",""state"":"""",""code"":"""",""country"":""Austria"",""phone"":""43-0316/502-3545"",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""Reiningshausstrae 1-7""],""geo"":{""accuracy"":""APPROXIMATE"",""lat"":47.0679,""lon"":15.4417}}",1,45,1
+brewery_budweiser_budvar,0,0,244486569984,"{""name"":""Brewery Budweiser Budvar"",""city"":"""",""state"":""Ceske Budejovice"",""code"":"""",""country"":""Czech Republic"",""phone"":"""",""website"":""http://www.budvar.cz/"",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[],""geo"":{""accuracy"":""APPROXIMATE"",""lat"":48.9739,""lon"":14.475}}",1,30,1
+breckenridge_brewery-autumn_ale,0,0,244488929280,"{""name"":""Autumn Ale"",""abv"":6.7,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""breckenridge_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Brown Ale"",""category"":""North American Ale""}",1,36,1
+cherryland_brewing,0,0,244496138242,"{""name"":""Cherryland Brewing"",""city"":""Sturgeon Bay"",""state"":""Wisconsin"",""code"":"""",""country"":""United States"",""phone"":"""",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[],""geo"":{""accuracy"":""APPROXIMATE"",""lat"":44.8342,""lon"":-87.377}}",1,51,1
+brewdog_ltd-paradox_speyside,0,0,244492664833,"{""name"":""Paradox Speyside"",""abv"":10.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brewdog_ltd"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Imperial Stout"",""category"":""North American Ale""}",1,45,1
+bridgeport_brewing-old_knucklehead_1992,0,0,244486569985,"{""name"":""Old Knucklehead 1992"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""bridgeport_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,30,1
+brewery_de_troch-chapeau_exotic_lambic,0,0,244488994816,"{""name"":""Chapeau Exotic Lambic"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brewery_de_troch"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""Belgian-Style Fruit Lambic"",""category"":""Belgian and French Ale""}",1,36,1
+clipper_city_brewing_co-pale_ale,0,0,244496203776,"{""name"":""Pale Ale"",""abv"":5.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""clipper_city_brewing_co"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Pale Ale"",""category"":""North American Ale""}",1,51,1
+brewery_creek_brewing-irish_stout,0,0,244492664834,"{""name"":""Irish Stout"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brewery_creek_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":""\""Guinness\"" is the prototype of all modern stouts. Many people, however, don't realize that there are different varieties of \""Guinness\"" brewed around the world. \""Draught Guinness* and \""Foreign Extra Stout\"" are the two primary types brewed in Ireland. Foreign Extra is the one I have emulated. It is closer in style to the London Porters of old than to modern stout. Very dark and rich, not as dry as Draught, about 6% abv and around 60 IBUs (that's hop bitterness). I used \""First Gold\"" hops because that's what I could get. Guinness use Nitrogen mixed with carbon dioxide to dispense their stout which adds to the creamy mouth-feel. BTW: The \""Imported\"" Guinness you buy here in the US comes from Canada. It could just as well be brewed in 
 the US but the common wisdom in the brewing world is that Americans prefer \""imported\"" beers and will pay more for them."",""style"":""American-Style Stout"",""category"":""North American Ale""}",1,45,1
+broughton_ales-kinmount_willie_stout,0,0,244486569986,"{""name"":""Kinmount Willie Stout"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""broughton_ales"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Stout"",""category"":""North American Ale""}",1,30,1
+bridgeport_brewing-blue_heron_pale_ale,0,0,244488994817,"{""name"":""Blue Heron Pale Ale"",""abv"":4.9,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""bridgeport_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,36,1
+dark_horse_brewing_co-fore_smoked_stout,0,0,244496203777,"{""name"":""Fore Smoked Stout"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""dark_horse_brewing_co"",""updated"":""2010-07-22 20:00:20"",""description"":""Brewed with all malted barley and peat malt (smoked malt). This beer is full bodied with chocolate, roasted barley flavors, and a smokey almost BBQ finish."",""style"":""American-Style Stout"",""category"":""North American Ale""}",1,51,1
+brewmasters_restaurant_and_brewery_south-cherry_ice,0,0,244492730368,"{""name"":""Cherry Ice"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brewmasters_restaurant_and_brewery_south"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""Light American Wheat Ale or Lager"",""category"":""Other Style""}",1,45,1
+brouwerij_sterkens,0,0,244486569987,"{""name"":""Brouwerij Sterkens"",""city"":""Hoogstraten-Meer"",""state"":""Antwerpen"",""code"":"""",""country"":""Belgium"",""phone"":""32-03-317-00-50"",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""Meerdorp 20""],""geo"":{""accuracy"":""RANGE_INTERPOLATED"",""lat"":51.4439,""lon"":4.7386}}",1,30,1
+bridgeport_brewing-bottle_conditioned_porter,0,0,244488994818,"{""name"":""Bottle Conditioned Porter"",""abv"":5.5,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""bridgeport_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""Porter"",""category"":""Irish Ale""}",1,36,1
+deschutes_brewery-bachelor_esb,0,0,244628062208,"{""name"":""Bachelor ESB"",""abv"":5.3,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""deschutes_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""Standing alone, distant from the Three Sisters mountains nestled to the north, Bachelor Butte was originally called \""Brother Jonathan\"" and then simply \""The Bachelor\"" before becoming widely known today as Mt. Bachelor."",""style"":""Extra Special Bitter"",""category"":""British Ale""}",1,51,1
+bridgeport_brewing-esb,0,0,244492730369,"{""name"":""ESB"",""abv"":6.1,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""bridgeport_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,45,1
+browar_okocim-o_k_beer,0,0,244486635520,"{""name"":""O.K. Beer"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""browar_okocim"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,30,1
+brooklyn_brewery-brooklyn_lager,0,0,244489060352,"{""name"":""Brooklyn Lager"",""abv"":5.2,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brooklyn_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""Brooklyn Lager, the Brewery's flagship label, is New York's \""hometown\"" beer, brewed to a pre-Prohibition recipe that dates back to the days when Brooklyn was the brewing capital of the East Coast. Brooklyn Lager has won numerous awards. Wrote Michael Jackson in the Simon & Schuster Pocket Guide to Beer: \""The dry-hopped, fresh, flowery, firm, flavourful, Brooklyn Lager **-*** started well, in 1988, and has gained in character since.\""""}",1,36,1
+desnoes_geddes_ltd-dragon_stout,0,0,244628127744,"{""name"":""Dragon Stout"",""abv"":7.5,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""desnoes_geddes_ltd"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Stout"",""category"":""North American Ale""}",1,51,1
+brouwerij_oud_beersel-oude_gueuze_vielle,0,0,244492730370,"{""name"":""Oude Gueuze Vielle"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brouwerij_oud_beersel"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""Belgian-Style Fruit Lambic"",""category"":""Belgian and French Ale""}",1,45,1
+bull_bush_pub_brewery-royal_arms_ipa,0,0,244486635521,"{""name"":""Royal Arms IPA"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""bull_bush_pub_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style India Pale Ale"",""category"":""North American Ale""}",1,30,1
+brouwerij_bosteels-triple_karmeliet,0,0,244489060353,"{""name"":""Triple Karmeliet"",""abv"":8.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brouwerij_bosteels"",""updated"":""2010-07-22 20:00:20"",""description"":""LOOK:\r\nTripel Karmeliet is a very refi ned and complex golden-to-bronze brew with a fantastic creamy head. These characteristics derive not only from the grains used but also from restrained hopping with Styrians and the fruity nature (banana and vanilla) of the house yeast.\r\n\r\nSMELL:\r\nVery refined and complex. Hints of vanilla mixed with citrus aromas.\r\n\r\nTASTE:\r\nTripel Karmeliet has not only the lightness and freshness of wheat, but also the creaminess of oats together with a spicy lemony almost quinine\r\ndryness."",""style"":""Belgian-Style Tripel"",""category"":""Belgian and French Ale""}",1,36,1
+dix_barbecue_brewery-red_truck_ale,0,0,244628127745,"{""name"":""Red Truck Ale"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""dix_barbecue_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Amber/Red Ale"",""category"":""North American Ale""}",1,51,1
+brouwerij_van_den_bossche-kerst_pater_special_christmas_beer,0,0,244492795904,"{""name"":""Kerst Pater Special Christmas Beer"",""abv"":9.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brouwerij_van_den_bossche"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,45,1
+cains,0,0,244486701056,"{""name"":""Cains"",""city"":""Liverpool"",""state"":""Merseyside"",""code"":""L8 5XJ"",""country"":""United Kingdom"",""phone"":""01517098734"",""website"":""http://www.cains.co.uk/"",""type"":""brewery"",""updated"":""2011-05-17 03:05:10"",""description"":""Cains is a brewery in Liverpool, England, founded in 1858 by Irish immigrant Robert Cain. \n\nCain had begun his brewing career aged 24 when he purchased a pub and brewed his own ales. Within 25 years of founding his brewery, Cain had established over 200 pubs. Robert Cain passed away in 1907 and the brewery still lives on to this day. \n\nOne of the most iconic beer brands in the UK Cain's has gone from strength to strength since being taken over by Indian brothers Sudarghara and Ajmail Dusanj in 2002. Cains now exports it's famous beers worldwide to the USA, Denmark, China, Ireland, India and Australia."",""address"":[""Robert Cain Brewery"",""Stanhope St""]}",1,30,1
+brouwerij_sterkens-bokrijks_kruikenbier,0,0,244489060354,"{""name"":""Bokrijks Kruikenbier"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brouwerij_sterkens"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,36,1
+dixie_brewing,0,0,244628193280,"{""name"":""Dixie Brewing"",""city"":""New Orleans"",""state"":""Louisiana"",""code"":""70119"",""country"":""United States"",""phone"":""1-504-822-8711"",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""2401 Tulane Avenue""],""geo"":{""accuracy"":""ROOFTOP"",""lat"":29.9606,""lon"":-90.0871}}",1,51,1
+c_b_potts_of_cheyenne-big_horn_fort_collins_stout,0,0,244492795905,"{""name"":""Big Horn Fort Collins Stout"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""c_b_potts_of_cheyenne"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Stout"",""category"":""North American Ale""}",1,45,1
+caldera_brewing-caldera_ipa,0,0,244486701057,"{""name"":""Caldera IPA"",""abv"":6.1,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""caldera_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":""Available in 12 oz. cans and kegs.  An American-style India Pale Ale brewed with plenty of body and an assertive hop profile."",""style"":""American-Style India Pale Ale"",""category"":""North American Ale""}",1,30,1
+brouwerij_the_musketiers-troubadour,0,0,244489125888,"{""name"":""Troubadour"",""abv"":6.5,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brouwerij_the_musketiers"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Pale Ale"",""category"":""North American Ale""}",1,36,1
+dogfish_head_craft_brewery-black_blue,0,0,244628258816,"{""name"":""Black & Blue"",""abv"":10.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""dogfish_head_craft_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""A belgian-style Strong ale fermented with blackberries and blueberries."",""style"":""Fruit Beer"",""category"":""Other Style""}",1,51,1
+captain_lawrence_brewing_company,0,0,244492795906,"{""name"":""Captain Lawrence Brewing Company"",""city"":""Pleasantville"",""state"":""New York"",""code"":""10570"",""country"":""United States"",""phone"":""914-741-BEER"",""website"":""http://www.captainlawrencebrewing.com/"",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""Captain Lawrence beers are craft brewed in Pleasantville New York. We offer tastings and tours on Fridays from 4pm to 7pm, and Saturdays from 12pm  6pm."",""address"":[""99 Castleton Street""],""geo"":{""accuracy"":""ROOFTOP"",""lat"":41.126,""lon"":-73.7896}}",1,45,1
+captain_lawrence_brewing_company-captin_lawrence_liquid_gold,0,0,244486701058,"{""name"":""Captin Lawrence Liquid Gold"",""abv"":6.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""captain_lawrence_brewing_company"",""updated"":""2010-07-22 20:00:20"",""description"":""Don't let the golden color fool you - this isn't your father's lite beer!\r\n\r\nBrewed with imported German malts and US-grown hops, this beer is a full-flavored introduction to craft-brewed beer. We add the hops late in the boil, allowing you to enjoy the flavor and aroma of the hops without an aggressive bitterness."",""style"":""Belgian-Style Pale Ale"",""category"":""Belgian and French Ale""}",1,30,1
+brouwerij_van_steenberge-ertvelds_wit,0,0,244489125889,"{""name"":""Ertvelds Wit"",""abv"":5.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brouwerij_van_steenberge"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,36,1
+egan_brewing-1916_irish_stout,0,0,244628258817,"{""name"":""1916 Irish Stout"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""egan_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Stout"",""category"":""North American Ale""}",1,51,1
+carlow_brewing_company-o_hara_s_celtic_stout,0,0,244492861440,"{""name"":""O'Hara's Celtic Stout"",""abv"":4.3,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""carlow_brewing_company"",""updated"":""2010-07-22 20:00:20"",""description"":""Unlike the image, does not have the creamy head like many Irish stouts (Beamish, Guinness, Murphys) and as a result lighter on the stomach and nice with a meal. Very distinctive taste which may put off those not accustomed to drinking stouts.I would recommend it for the more adventurous pallette.\r\n\r\nRoast barley comes through in the taste."",""style"":""American-Style Stout"",""category"":""North American Ale""}",1,45,1
+carolina_brewery-lager,0,0,244486766592,"{""name"":""Lager"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""carolina_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Lager"",""category"":""North American Lager""}",1,30,1
+butterfield_brewing_1,0,0,244489125890,"{""name"":""Butterfield Brewing #1"",""city"":""Fresno"",""state"":""California"",""code"":"""",""country"":""United States"",""phone"":"""",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[],""geo"":{""accuracy"":""APPROXIMATE"",""lat"":36.7477,""lon"":-119.772}}",1,36,1
+engel_brauerei_schwbisch_gmnd_lang,0,0,244628258818,"{""name"":""Engel Brauerei Schwbisch Gmnd Lang"",""city"":""Schwbisch Gmnd"",""state"":""Baden-Wrttemberg"",""code"":"""",""country"":""Germany"",""phone"":"""",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""D-73525 Schwbisch Gmnd""]}",1,51,1
+castle_springs_brewery,0,0,244492861441,"{""name"":""Castle Springs Brewery"",""city"":""Moultonborough"",""state"":""New Hampshire"",""code"":"""",""country"":""United States"",""phone"":"""",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[],""geo"":{""accuracy"":""APPROXIMATE"",""lat"":43.7548,""lon"":-71.3967}}",1,45,1
+coastal_fog_brewing-brandenburg_gate_marzen,0,0,244486766593,"{""name"":""Brandenburg Gate Märzen"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""coastal_fog_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""German-Style Oktoberfest"",""category"":""German Lager""}",1,30,1
+alaskan_brewing-alaskan_barley_wine_ale,0,0,244368277504,"{""name"":""Alaskan Barley Wine Ale"",""abv"":10.4,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""alaskan_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":""Alaskan Barley Wine is produced in small batches each year. Typically this higher alcohol beverage is brewed in the spring, cellared in the tunnels of the Alaska-Juneau Gold Mine for the summer and retrieved in time for its release at the Great Alaska Beer and Barley Wine Festival in January. The cool tunnels of the mine shaft provide natural refrigeration and a prime environment for the aging process. \r\n\r\nLike a fine wine, Alaskan Barley Wine can be aged for years. The bottling of the 2007 vintage of Alaskan Barley Wine will allow individuals to age it to their liking. “We figured we’d leave it up to individuals as to how long to age their Alaskan Barley Wine,” said Quality Assurance Analyst Ryan Harvey. “Some people like ba
 rley wines fresh, and others store it for years.”"",""style"":""American-Style Barley Wine Ale"",""category"":""North American Ale""}",1,29,1
+climax_brewing_copmany-climax_ipa,0,0,244489191424,"{""name"":""Climax IPA"",""abv"":6.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""climax_brewing_copmany"",""updated"":""2010-07-22 20:00:20"",""description"":""This American-style, hop-driven beer starts with a citrus flavor, followed by two layers of caramel, and finishes on a hoppy note."",""style"":""American-Style India Pale Ale"",""category"":""North American Ale""}",1,36,1
+flat_earth_brewing_company-angry_planet_pale_ale,0,0,244628324352,"{""name"":""Angry Planet Pale Ale"",""abv"":6.1,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""flat_earth_brewing_company"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Pale Ale"",""category"":""North American Ale""}",1,51,1
+cervecera_cuauhtmoc_moctezuma-dos_equis_special_lager,0,0,244492926976,"{""name"":""Dos Equis Special Lager"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""cervecera_cuauhtmoc_moctezuma"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Lager"",""category"":""North American Lager""}",1,45,1
+cooperstown_brewing_company-backyard_india_pale_ale,0,0,244486766594,"{""name"":""Backyard India Pale Ale"",""abv"":6.1,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""cooperstown_brewing_company"",""updated"":""2010-07-22 20:00:20"",""description"":""\""Back Yard\"" is a golden India Pale Ale. Historically, ale shipped to India in the 19th Century was brewed to higher gravities so that it could mature during the long sea voyage. English brewers also hopped these ales heavily to protect them from spoiling. The term \""India pale Ale\"" or \""I.P.A.\"" is still used by brewers to denote a super-premium, hoppy pale ale style. Backyard IPA is no exception. English pale barley malt is predominant in this beer with just a small amount of crystal malt. It is well bittered with Cluster and Cascade hops and finished with a mix of local hop and larger amounts of Fuggle hop."",""style"":""English-Style India Pale Ale"",""category"":""British Ale""}",1,30,1
+amherst_brewing_company-north_pleasant_pale_ale,0,0,244368343040,"{""name"":""North Pleasant Pale Ale"",""abv"":5.25,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""amherst_brewing_company"",""updated"":""2010-07-22 20:00:20"",""description"":""Light gold in color, slightly malty and hoppy, with a mild flowery hop finish."",""style"":""American-Style Pale Ale"",""category"":""North American Ale""}",1,29,1
+climax_brewing_copmany-climax_oktoberfest,0,0,244489191425,"{""name"":""Climax Oktoberfest"",""abv"":6.2,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""climax_brewing_copmany"",""updated"":""2010-07-22 20:00:20"",""description"":""Oktoberfest is typically available from August to November and has toasty caramel and malty flavor.  It is made from German Noble Hops and massive amounts of Münich Malt, which give it an orange color."",""style"":""German-Style Oktoberfest"",""category"":""German Lager""}",1,36,1
+flossmoor_station_brewery-kilt_kicker_wee_heavy,0,0,244628324353,"{""name"":""Kilt Kicker Wee Heavy"",""abv"":7.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""flossmoor_station_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""Scotch Ale"",""category"":""British Ale""}",1,51,1
+cervecera_hondurea-port_royal_export,0,0,244492926977,"{""name"":""Port Royal Export"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""cervecera_hondurea"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Lager"",""category"":""North American Lager""}",1,45,1
+coors_brewing_golden_brewery-blue_moon_abbey_ale,0,0,244486766595,"{""name"":""Blue Moon Abbey Ale"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""coors_brewing_golden_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,30,1
+anheuser_busch-michelob_ultra,0,0,244368343041,"{""name"":""Michelob Ultra"",""abv"":4.2,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""anheuser_busch"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Lager"",""category"":""North American Lager""}",1,29,1
+clipper_city_brewing_co-heavy_seas_prosit_imperial_oktoberfest_lager,0,0,244489191426,"{""name"":""Heavy Seas Prosit! Imperial Oktoberfest Lager"",""abv"":9.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""clipper_city_brewing_co"",""updated"":""2010-07-22 20:00:20"",""description"":""Malt focused, made with five types of grain including Vienna and Munich malts – plus a secret extra malt that we use only in our Prosit! Consider this bomber to be the burly big brother to our Clipper City MarzHon, a three year in a row winner at the Great American Beer Festival. We’ve balanced the sweetness of the malt with three kinds of hops making this one of the boldest marzen style lagers you’ll ever try.\r\n\r\n5 Kinds of Malt, 3 Kinds of Hops\r\nestimated ABV 9% estimated IBU 25"",""style"":""German-Style Oktoberfest"",""category"":""German Lager""}",1,36,1
+flying_dog_brewery-old_scratch_lager,0,0,244628324354,"{""name"":""Old Scratch Lager"",""abv"":5.5,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""flying_dog_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""This dog enjoys his days in the sun... Old Scratch Amber Lager is a malty, mellow beer that is fermented at medium temperatures to develop both ale and lager characteristics. \""Gold Scratch\"" raises the standard in the amber lager category."",""style"":""American-Style Amber/Red Ale"",""category"":""North American Ale""}",1,51,1
+cooper_s_cave_ale_company,0,0,244492992512,"{""name"":""Cooper's Cave Ale Company"",""city"":""Glens Falls"",""state"":""New York"",""code"":""12801"",""country"":""United States"",""phone"":""518.792.0007"",""website"":""http://www.cooperscaveale.com/"",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""Cooper's Cave Ale Company, Ltd. Micro Brewery is the realized goal of three ale lovers, Ed, Pat and Adrian Bethel. A passion for great English style ales led us to commercially produce small quantities on a brewing system which we built. We gladly welcome visitors to our brewery and tasting room, and now we welcome you to learn more about Cooper's Cave Ale Company."",""address"":[""2 Sagamore Street""],""geo"":{""accuracy"":""ROOFTOP"",""lat"":43.3177,""lon"":-73.64}}",1,45,1
+coronado_brewing_company-outlet_stout,0,0,244486832128,"{""name"":""Outlet Stout"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""coronado_brewing_company"",""updated"":""2010-07-22 20:00:20"",""description"":""Extremely dark in color, with a malty flavor dominated by caramel and chocolate malts and a slight hoppy bitterness. This full-bodied ale has a nice smooth lasting finish."",""style"":""American-Style Stout"",""category"":""North American Ale""}",1,30,1
+appalachian_brewing_company-hoppy_trails_india_pale_ale,0,0,244368408576,"{""name"":""Hoppy Trails India Pale Ale"",""abv"":6.2,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""appalachian_brewing_company"",""updated"":""2010-07-22 20:00:20"",""description"":""This IPA is an exciting beer with a floral in aroma and an incredible hop flavor. The maltiness is increased (over a regular pale ale) to help balance the aggressive hop usage. This contributes to the higher alcohol content as well. \r\nWhen India was part of the British Empire, pale ale shipped to the troops would often spoil due to the rough voyage and temperature extremes. The brewers had a theory that if they loaded the beer with extra hops, the hops would preserve the beer. Of course, this added significantly to the beer’s flavor and aroma. When the troops returned to Britain, they had become \""hop-heads\"", appreciating the beauty of the hop \""over-influence\"". Regular pale ale was simply not eno
 ugh anymore! A new beer style, India Pale Ale, had been created: an aggressively hoppy and now quite happy to be home pale ale."",""style"":""American-Style India Pale Ale"",""category"":""North American Ale""}",1,29,1
+coopers_brewery-coopers_best_extra_stout,0,0,244489256960,"{""name"":""Coopers Best Extra Stout"",""abv"":6.3,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""coopers_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""Now here's a beer with punch! \r\n\r\nCoopers Best Extra Stout is a beacon for lovers of a hearty brew. With its robust flavour it is everything a stout should be. \r\n\r\nBrewed naturally using a top fermentation method, Coopers Stout's unique rich, dark texture comes from specially roasted black malt. \r\n\r\nCoopers Best Extra Stout contains no additives and no preservatives."",""style"":""American-Style Stout"",""category"":""North American Ale""}",1,36,1
+founders_hill_brewing-founders_light,0,0,244628389888,"{""name"":""Founders Light"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""founders_hill_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Pale Ale"",""category"":""North American Ale""}",1,51,1
+coronado_brewing_company-coronado_golden_ale,0,0,244493058048,"{""name"":""Coronado Golden Ale"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""coronado_brewing_company"",""updated"":""2010-07-22 20:00:20"",""description"":""This golden colored ale is smooth, light in flavor, crisp and very similar to a European-style pilsner. Our Golden ale is delicately hopped with traditional pilsner style Czech Saaz hops. It is one of our most popular beers and considered our gateway beer."",""style"":""German-Style Pilsener"",""category"":""German Lager""}",1,45,1
+crabtree_brewery,0,0,244486832129,"{""name"":""Crabtree Brewery"",""city"":""Greeley"",""state"":""Colorado"",""code"":""80631"",""country"":""United States"",""phone"":""[970] 356-0516"",""website"":""http://www.crabtreebrewing.com"",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""The Crabtree Brewing Company is dedicated to serving the environment, the community, our customers, and our valued employees.  Requiring the best ingredients to support unique premium beers.  Keeping a dynamic business mind while remaining modestly profitable.   Employing technology, embracing ethics, growing to meet the needs of our customers and employees, and having fun doing what we love�.Making Great Beer."",""address"":[""625 3rd St. #D""],""geo"":{""accuracy"":""RANGE_INTERPOLATED"",""lat"":40.4313,""lon"":-104.688}}",1,30,1
+appalachian_brewing_company-mountain_lager,0,0,244368408577,"{""name"":""Mountain Lager"",""abv"":4.5,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""appalachian_brewing_company"",""updated"":""2010-07-22 20:00:20"",""description"":""The Dortmunder-Export style of beer was developed in Westfalen, Germany, and is a classic light lager with great character. This style boasts a light golden blonde color and exhibits a moderate hop palate. The finish of our Mountain Lager is rich yet mellow.\r\nOur brewers have developed this beer as a tribute to the Appalachian Mountains where we live and play."",""style"":""American-Style Lager"",""category"":""North American Lager""}",1,29,1
+coronado_brewing_company-uptown_brown,0,0,244489256961,"{""name"":""Uptown Brown"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""coronado_brewing_company"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Brown Ale"",""category"":""North American Ale""}",1,36,1
+frederick_brewing,0,0,244628389889,"{""name"":""Frederick Brewing"",""city"":""Frederick"",""state"":""Maryland"",""code"":""20176"",""country"":""United States"",""phone"":""1-888-258-7434"",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""4607 Wedgewood Boulevard""],""geo"":{""accuracy"":""ROOFTOP"",""lat"":39.3628,""lon"":-77.4265}}",1,51,1
+crane_river_brewpub_and_cafe-homestead_pale_ale,0,0,244493058049,"{""name"":""Homestead Pale Ale"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""crane_river_brewpub_and_cafe"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Pale Ale"",""category"":""North American Ale""}",1,45,1
+dark_horse_brewing_co,0,0,244618821632,"{""name"":""Dark Horse Brewing Co."",""city"":""Marshall"",""state"":""Michigan"",""code"":""49068"",""country"":""United States"",""phone"":""269-781-9940"",""website"":""http://www.darkhorsebrewery.com/"",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""511 S. Kalamazoo Ave.""],""geo"":{""accuracy"":""RANGE_INTERPOLATED"",""lat"":42.2667,""lon"":-84.9641}}",1,30,1
+barley_brothers_brewery_and_grill-tripppleberry_wheat,0,0,244368408578,"{""name"":""Tripppleberry Wheat"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""barley_brothers_brewery_and_grill"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""Light American Wheat Ale or Lager"",""category"":""Other Style""}",1,29,1
+crane_river_brewpub_and_cafe-good_life_stout,0,0,244489256962,"{""name"":""Good Life Stout"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""crane_river_brewpub_and_cafe"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Stout"",""category"":""North American Ale""}",1,36,1
+full_sail_brewing_1,0,0,244628455424,"{""name"":""Full Sail Brewing #1"",""city"":""Hood River"",""state"":""Oregon"",""code"":""97031"",""country"":""United States"",""phone"":""1-541-386-2281"",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""506 Columbia Street""],""geo"":{""accuracy"":""RANGE_INTERPOLATED"",""lat"":45.7103,""lon"":-121.515}}",1,51,1
+degroen_s_grill,0,0,244625637376,"{""name"":""DeGroen's Grill"",""city"":""Baltimore"",""state"":""Maryland"",""code"":"""",""country"":""United States"",""phone"":"""",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[],""geo"":{""accuracy"":""APPROXIMATE"",""lat"":39.2904,""lon"":-76.6122}}",1,45,1
+de_halve_maan-straffe_hendrik_brugse,0,0,244618887168,"{""name"":""Straffe Hendrik Brugse"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""de_halve_maan"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,30,1
+barley_island_brewing-flat_belly_american_wheat,0,0,244368474112,"{""name"":""Flat Belly American Wheat"",""abv"":3.6,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""barley_island_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Lager"",""category"":""North American Lager""}",1,29,1
+denver_chophouse_and_brewery-lucky_u_denver_special_bitter,0,0,244621705216,"{""name"":""Lucky U Denver Special Bitter"",""abv"":4.46,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""denver_chophouse_and_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,36,1
+granite_city_food_brewery_saint_cloud-northern_light,0,0,244754808832,"{""name"":""Northern Light"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""granite_city_food_brewery_saint_cloud"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Lager"",""category"":""North American Lager""}",1,51,1
+dempsey_s_restaurant_brewery-holiday_wheat_bock,0,0,244625702912,"{""name"":""Holiday Wheat Bock"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""dempsey_s_restaurant_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""Traditional German-Style Bock"",""category"":""German Lager""}",1,45,1
+de_proef_brouwerij-lozen_boer_abt,0,0,244618887169,"{""name"":""Lozen Boer Abt"",""abv"":10.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""de_proef_brouwerij"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,30,1
+belhaven_brewery-st_andrews_ale,0,0,244368474113,"{""name"":""St. Andrews Ale"",""abv"":4.6,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""belhaven_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,29,1
+desnoes_geddes_ltd,0,0,244621770752,"{""name"":""Desnoes & Geddes Ltd"",""city"":""Kingston"",""state"":"""",""code"":"""",""country"":""Jamaica"",""phone"":""1-876-923-9291"",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""214 Spanish Town""],""geo"":{""accuracy"":""GEOMETRIC_CENTER"",""lat"":33.9858,""lon"":-96.6515}}",1,36,1
+gray_brewing-honey_ale,0,0,244754874368,"{""name"":""Honey Ale"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""gray_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Lager"",""category"":""North American Lager""}",1,51,1
+deschutes_brewery-mirror_mirror,0,0,244625702913,"{""name"":""Mirror Mirror"",""abv"":11.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""deschutes_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""The Reserve Series romance all began with our first release of this limited-edition brew. Mirror Mirror, born of a double batch of Mirror Pond Pale Ale, is an inspired, barrel-aged barley wine layered with intriguing nuances. Explore this latest incarnation and enjoy its delicious complexity in every sip."",""style"":""American-Style Barley Wine Ale"",""category"":""North American Ale""}",1,45,1
+delafield_brewhaus-hops_and_glory_american_ale,0,0,244618887170,"{""name"":""Hops and Glory American Ale"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""delafield_brewhaus"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Pale Ale"",""category"":""North American Ale""}",1,30,1
+big_river_brewing-dry_stout,0,0,244368539648,"{""name"":""Dry Stout"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""big_river_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Stout"",""category"":""North American Ale""}",1,29,1
+dock_street_beer,0,0,244621770753,"{""name"":""Dock Street Beer"",""city"":""West Philly"",""state"":""Pennsylvania"",""code"":""19143"",""country"":""United States"",""phone"":""(215)-726-2337"",""website"":""http://www.dockstreetbeer.com"",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""701 S. 50th Street""],""geo"":{""accuracy"":""ROOFTOP"",""lat"":39.9478,""lon"":-75.2229}}",1,36,1
+green_bay_brewing-hinterland_pale_ale,0,0,244754939904,"{""name"":""Hinterland Pale Ale"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""green_bay_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Pale Ale"",""category"":""North American Ale""}",1,51,1
+diamond_knot_brewery_alehouse-possession_porter,0,0,244625768448,"{""name"":""Possession Porter"",""abv"":5.6,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""diamond_knot_brewery_alehouse"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""Porter"",""category"":""Irish Ale""}",1,45,1
+dempsey_s_restaurant_brewery-golden_eagle,0,0,244618952704,"{""name"":""Golden Eagle"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""dempsey_s_restaurant_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,30,1
+boss_browar_witnica_s_a,0,0,244368539649,"{""name"":""BOSS Browar Witnica S.A."",""city"":""Witnica"",""state"":"""",""code"":"""",""country"":""Poland"",""phone"":""48-(95)-751-51-14"",""website"":""http://www.browar-witnica.pl/"",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""Ul. Marii Konopnickiej 1""],""geo"":{""accuracy"":""RANGE_INTERPOLATED"",""lat"":52.6739,""lon"":14.9004}}",1,29,1
+drake_s_brewing-maibock,0,0,244621836288,"{""name"":""Maibock"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""drake_s_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""Traditional German-Style Bock"",""category"":""German Lager""}",1,36,1
+greenshields_brewery_and_pub-greenshields_pale_ale,0,0,244755005440,"{""name"":""Greenshields Pale Ale"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""greenshields_brewery_and_pub"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Pale Ale"",""category"":""North American Ale""}",1,51,1
+diebels_privatbrauerei-german_premium_dark,0,0,244625768449,"{""name"":""German Premium Dark"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""diebels_privatbrauerei"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Brown Ale"",""category"":""North American Ale""}",1,45,1
+diamond_knot_brewery_alehouse-golden_ale,0,0,244618952705,"{""name"":""Golden Ale"",""abv"":5.2,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""diamond_knot_brewery_alehouse"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,30,1
+boston_beer_company-samuel_adams_imperial_white,0,0,244368605184,"{""name"":""Samuel Adams Imperial White"",""abv"":10.3,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""boston_beer_company"",""updated"":""2010-07-22 20:00:20"",""description"":""Samuel Adams® Imperial White is a new perspective on the classic witbier style.  Witbiers are normally light and refreshing with a fruity finish and we wanted to see how these characteristics would stand up when we amped up the recipe.  We were totally blown away by the flavors that were created by this beer.\r\n\r\nThis is not just a more intense version of our spring seasonal Samuel Adams® White Ale.  Imperial White is a new recipe that stands on it own merits.  In fact, it is more of a wine substitute than just another refreshing witbier.  This is a beer that should be sipped and savored and you"",""style"":""Belgian-Style White"",""category"":""Belgian and French Ale""}",1,29,1
+egan_brewing-nitro_pale,0,0,244621836289,"{""name"":""Nitro Pale"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""egan_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Pale Ale"",""category"":""North American Ale""}",1,36,1
+hacker_pschorr_bru-alt_munich_dark,0,0,244755005441,"{""name"":""Alt Munich Dark"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""hacker_pschorr_bru"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,51,1
+egan_brewing-abbot_pennings_trippel,0,0,244625768450,"{""name"":""Abbot Pennings Trippel"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""egan_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,45,1
+duck_rabbit_craft_brewery-duck_rabbit_schwarzbier,0,0,244618952706,"{""name"":""Duck-Rabbit Schwarzbier"",""abv"":5.8,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""duck_rabbit_craft_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""German-Style Schwarzbier"",""category"":""German Lager""}",1,30,1
+brasserie_de_brunehaut-abbaye_de_st_amand,0,0,244368605185,"{""name"":""Abbaye de St Amand"",""abv"":7.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brasserie_de_brunehaut"",""updated"":""2010-07-22 20:00:20"",""description"":""A unique 7% Belgian blonde ale, flavoured with juniper berries. The beer has bittersweet malt flavours, and a heady perfumed aroma of orange and juniper."",""style"":""Other Belgian-Style Ales"",""category"":""Belgian and French Ale""}",1,29,1
+egan_brewing-o_tay_bockwheat,0,0,244621836290,"{""name"":""O-Tay Bockwheat"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""egan_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,36,1
+hausbrauerei_zum_schlssel-altbier,0,0,244755070976,"{""name"":""Altbier"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""hausbrauerei_zum_schlssel"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Brown Ale"",""category"":""North American Ale""}",1,51,1
+f_x_matt_brewing,0,0,244625833984,"{""name"":""F.X. Matt Brewing"",""city"":""Utica"",""state"":""New York"",""code"":""13502"",""country"":""United States"",""phone"":""1-800-690-3181-x222"",""website"":""http://www.saranac.com"",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""811 Edward Street""],""geo"":{""accuracy"":""ROOFTOP"",""lat"":43.1045,""lon"":-75.2452}}",1,45,1
+egan_brewing,0,0,244619018240,"{""name"":""Egan Brewing"",""city"":""De Pere"",""state"":""Wisconsin"",""code"":"""",""country"":""United States"",""phone"":"""",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[],""geo"":{""accuracy"":""APPROXIMATE"",""lat"":44.4489,""lon"":-88.0604}}",1,30,1
+brasserie_de_l_abbaye_val_dieu-winter,0,0,244368605186,"{""name"":""Winter"",""abv"":10.5,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brasserie_de_l_abbaye_val_dieu"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,29,1
+ej_phair_brewing_company_and_alehouse-marzen,0,0,244621836291,"{""name"":""Marzen"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""ej_phair_brewing_company_and_alehouse"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""German-Style Oktoberfest"",""category"":""German Lager""}",1,36,1
+industrias_la_constancia_ilc-suprema,0,0,244755070977,"{""name"":""Suprema"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""industrias_la_constancia_ilc"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Lager"",""category"":""North American Lager""}",1,51,1
+firehouse_brewing-smoke_jump_stout,0,0,244625833985,"{""name"":""Smoke Jump Stout"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""firehouse_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Stout"",""category"":""North American Ale""}",1,45,1
+el_ahram_company,0,0,244619018241,"{""name"":""el ahram company"",""city"":"""",""state"":"""",""code"":"""",""country"":""Egypt"",""phone"":"""",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[]}",1,30,1
+brasserie_mcauslan-st_ambroise_pale_ale,0,0,244485914624,"{""name"":""St-Ambroise Pale Ale"",""abv"":5.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brasserie_mcauslan"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,29,1
+elk_creek_cafe_and_aleworks-elk_creek_copper_ale,0,0,244621901824,"{""name"":""Elk Creek Copper Ale"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""elk_creek_cafe_and_aleworks"",""updated"":""2010-07-22 20:00:20"",""description"":""This beautifully colored ale strikes an agreeable balance between malt and hops. Caramel + toffee notes from the use of more highly kilned malts step up to complement this beers assertive hop profile."",""style"":""American-Style Amber/Red Ale"",""category"":""North American Ale""}",1,36,1
+iron_springs_pub_brewery-fairfax_coffee_porter,0,0,244755070978,"{""name"":""Fairfax Coffee Porter"",""abv"":5.5,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""iron_springs_pub_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,51,1
+fish_brewing_company_fish_tail_brewpub-mudshark_porter,0,0,244625833986,"{""name"":""Mudshark Porter"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""fish_brewing_company_fish_tail_brewpub"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""Porter"",""category"":""Irish Ale""}",1,45,1
+emery_pub,0,0,244619018242,"{""name"":""Emery Pub"",""city"":""Emeryville"",""state"":""California"",""code"":"""",""country"":""United States"",""phone"":"""",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[],""geo"":{""accuracy"":""APPROXIMATE"",""lat"":37.8313,""lon"":-122.285}}",1,30,1
+brasseries_kronenbourg-1664,0,0,244485914625,"{""name"":""1664"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brasseries_kronenbourg"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,29,1
+empyrean_brewing_company,0,0,244621901825,"{""name"":""Empyrean Brewing Company"",""city"":""Lincoln"",""state"":""Nebraska"",""code"":""68508"",""country"":""United States"",""phone"":""1-402-434-5959"",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""729 Q Street""],""geo"":{""accuracy"":""ROOFTOP"",""lat"":40.8155,""lon"":-96.7105}}",1,36,1
+island_brewing_company,0,0,244755136512,"{""name"":""Island Brewing Company"",""city"":""Carpinteria"",""state"":""California"",""code"":""93013"",""country"":""United States"",""phone"":""1-805-745-8272"",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""5049 Sixth Street""]}",1,51,1
+3_fonteinen_brouwerij_ambachtelijke_geuzestekerij,0,0,244380663808,"{""name"":""3 Fonteinen Brouwerij Ambachtelijke Geuzestekerij"",""city"":""Beersel"",""state"":""Vlaams Brabant"",""code"":"""",""country"":""Belgium"",""phone"":""32-02-/-306-71-03"",""website"":""http://www.3fonteinen.be/index.htm"",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""Hoogstraat 2A""],""geo"":{""accuracy"":""RANGE_INTERPOLATED"",""lat"":50.7668,""lon"":4.3081}}",1,60,1
+fitger_s_brewhouse_brewery_and_grill-habanero,0,0,244625899520,"{""name"":""Habañero"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""fitger_s_brewhouse_brewery_and_grill"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,45,1
+esser_s_cross_plains_brewery-esser_s_best,0,0,244619083776,"{""name"":""Esser's Best"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""esser_s_cross_plains_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""German Style Lager"",""style"":""American-Style Lager"",""category"":""North American Lager""}",1,30,1
+bricktown_brewery,0,0,244485914626,"{""name"":""Bricktown Brewery"",""city"":""Dubuque"",""state"":""Iowa"",""code"":""52001"",""country"":""United States"",""phone"":""1-563-582-0608"",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""299 Main Street""],""geo"":{""accuracy"":""ROOFTOP"",""lat"":42.4965,""lon"":-90.6652}}",1,29,1
+engine_house_9-barleywine,0,0,244621967360,"{""name"":""Barleywine"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""engine_house_9"",""updated"":""2010-07-22 20:00:20"",""description"":""""}",1,36,1
+jacob_leinenkugel_brewing_company,0,0,244755136513,"{""name"":""Jacob Leinenkugel Brewing Company"",""city"":""Chippewa Falls"",""state"":""Wisconsin"",""code"":""54729"",""country"":""United States"",""phone"":""1-715-723-5557"",""website"":""http://www.leinie.com/welcome.html"",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[""1 Jefferson Avenue""],""geo"":{""accuracy"":""RANGE_INTERPOLATED"",""lat"":44.9449,""lon"":-91.3968}}",1,51,1
+512_brewing_company-512_alt,0,0,244380663809,"{""name"":""(512) ALT"",""abv"":6.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""512_brewing_company"",""updated"":""2010-07-22 20:00:20"",""description"":""(512) ALT is a German-style amber ale that is fermented cooler than typical ales and cold conditioned like a lager. ALT means “old” in German and refers to a beer style made using ale yeast after many German brewers had switched to newly discovered lager yeast. This ale has a very smooth, yet pronounced, hop bitterness with a malty backbone and a characteristic German yeast character. Made with 98% Organic 2-row and Munch malts and US noble hops."",""style"":""German-Style Brown Ale/Altbier"",""category"":""German Ale""}",1,60,1
+flatlander_s_restaurant_brewery-prairie_wheat_beer,0,0,244625899521,"{""name"":""Prairie Wheat Beer"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""flatlander_s_restaurant_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Lager"",""category"":""North American Lager""}",1,45,1
+f_x_matt_brewing-pomegranate_wheat,0,0,244619083777,"{""name"":""Pomegranate Wheat"",""abv"":4.7,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""f_x_matt_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":""This wholesome combination of unfiltered wheat beer and real pomegranate juice makes a smooth refreshing beer with all the goodness of pomegranate. Look for a light body with a slight tart finish and golden straw color."",""style"":""Fruit Beer"",""category"":""Other Style""}",1,30,1
+brooklyn_brewery-brooklyn_summer_ale,0,0,244485980160,"{""name"":""Brooklyn Summer Ale"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""brooklyn_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""It is light with a bit of a citrus flavor.  A pretty standard summer seasonal."",""style"":""Golden or Blonde Ale"",""category"":""North American Ale""}",1,29,1
+esser_s_cross_plains_brewery-esser_s_cross_plains_special,0,0,244621967361,"{""name"":""Esser's Cross Plains Special"",""abv"":0.0,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""esser_s_cross_plains_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""German-Style Pilsener"",""category"":""German Lager""}",1,36,1
+john_harvard_s_brewhouse_wilmington,0,0,244755202048,"{""name"":""John Harvard's Brewhouse - Wilmington"",""city"":""Wilmington"",""state"":""Delaware"",""code"":"""",""country"":""United States"",""phone"":"""",""website"":"""",""type"":""brewery"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""address"":[],""geo"":{""accuracy"":""APPROXIMATE"",""lat"":39.7458,""lon"":-75.5467}}",1,51,1
+adnams_co-nut_brown_ale,0,0,244380729344,"{""name"":""Nut Brown Ale"",""abv"":3.2,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""adnams_co"",""updated"":""2010-07-22 20:00:20"",""description"":"""",""style"":""American-Style Brown Ale"",""category"":""North American Ale""}",1,60,1
+flying_dog_brewery-garde_dog,0,0,244625965056,"{""name"":""Garde Dog"",""abv"":5.8,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""flying_dog_brewery"",""updated"":""2010-07-22 20:00:20"",""description"":""Proving the French may actually know something we don't... Garde Dog is a traditional French Biere de Garde or \""beer for keeping\"". This classic farmhouse ale was brewed in March for drinking during the spring and summer months. With it's toasted aroma and spicy, malty sweetness Garde Dog will liberate you from the winter doldrum.""}",1,45,1
+f_x_matt_brewing-roggen_bock,0,0,244619083778,"{""name"":""Roggen Bock"",""abv"":5.5,""ibu"":0.0,""srm"":0.0,""upc"":0,""type"":""beer"",""brewery_id"":""f_x_matt_brewing"",""updated"":""2010-07-22 20:00:20"",""description"":""Brewed in the tradition of great German Bock beers with only German malts and hops, using 20% rye, malt to give it a distinctive spicy, yet smooth character. You'll love the rich flavors and deep red color of this unique German Brew."",""style"":""Traditional German-Style Bock"",""category"":""German Lager""}",1,30,1
+broughton_ales-black_douglas,0,0,244486045696,"{""name"":""Black Douglas"","

<TRUNCATED>


[19/19] incubator-asterixdb git commit: Support Change Feeds and Ingestion of Records with MetaData

Posted by am...@apache.org.
Support Change Feeds and Ingestion of Records with MetaData

This change allows feeds to perform upserts and deletes
in order to perform replication of an external data source.
The change does so by performing the following:
1. The adapter produces [PK][Record]. (Record == null --> delete)
2. The insert is replaced by an upsert operator.

Change-Id: If136a03d424970132dfb09f0dda56e160d4c0078
Reviewed-on: https://asterix-gerrit.ics.uci.edu/621
Reviewed-by: Yingyi Bu <bu...@gmail.com>
Tested-by: Jenkins <je...@fulliautomatix.ics.uci.edu>


Project: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/commit/d3338f66
Tree: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/tree/d3338f66
Diff: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/diff/d3338f66

Branch: refs/heads/master
Commit: d3338f6659f7f97919ed42d5207970b47bc04fff
Parents: 205e490
Author: Abdullah Alamoudi <ba...@gmail.com>
Authored: Wed Mar 16 02:12:11 2016 +0300
Committer: abdullah alamoudi <ba...@gmail.com>
Committed: Tue Mar 15 16:30:21 2016 -0700

----------------------------------------------------------------------
 .gitignore                                      |     1 +
 .../optimizer/rules/ConstantFoldingRule.java    |     2 +-
 .../rules/IntroduceDynamicTypeCastRule.java     |     6 +-
 .../IntroduceStaticTypeCastForInsertRule.java   |    29 +-
 .../rules/MetaFunctionToMetaVariableRule.java   |   129 +-
 .../rules/ReplaceSinkOpWithCommitOpRule.java    |    20 +-
 .../rules/SetClosedRecordConstructorsRule.java  |     2 +-
 .../optimizer/rules/UnnestToDataScanRule.java   |    89 +-
 .../asterix/translator/CompiledStatements.java  |    89 +-
 .../LangExpressionToPlanTranslator.java         |   176 +-
 .../translator/util/PlanTranslationUtil.java    |    90 +
 asterix-app/pom.xml                             |    65 +-
 .../external/ExternalIndexingOperations.java    |     6 +-
 .../asterix/aql/translator/QueryTranslator.java |     2 +-
 .../apache/asterix/file/DatasetOperations.java  |     8 +-
 .../asterix/test/optimizer/OptimizerTest.java   |    20 +-
 .../results/disjunction-to-join-delete-3.plan   |     8 +-
 .../delete-dataset-with-meta.1.ddl.aql          |    36 +
 .../delete-dataset-with-meta.2.update.aql       |    28 +
 .../insert-dataset-with-meta.1.ddl.aql          |    36 +
 .../insert-dataset-with-meta.2.update.aql       |    29 +
 .../upsert-dataset-with-meta.1.ddl.aql          |    36 +
 .../upsert-dataset-with-meta.2.update.aql       |    29 +
 .../classad-parser/classad-parser.3.ddl.aql     |     7 +-
 .../classad-parser2/classad-parser2.3.ddl.aql   |     5 +-
 .../change-feed-with-meta-pk-in-meta.1.ddl.aql  |    58 +
 ...hange-feed-with-meta-pk-in-meta.2.update.aql |    27 +
 ...change-feed-with-meta-pk-in-meta.3.sleep.aql |    24 +
 .../change-feed-with-meta-pk-in-meta.4.ddl.aql  |    24 +
 .../feeds/change-feed/change-feed.1.ddl.aql     |    43 +
 .../feeds/change-feed/change-feed.2.update.aql  |    27 +
 .../feeds/change-feed/change-feed.3.query.aql   |    29 +
 .../feeds/change-feed/change-feed.4.ddl.aql     |    24 +
 .../feed-with-external-parser.3.ddl.aql         |     4 +-
 .../feed-with-filtered-dataset.1.ddl.aql        |     2 +-
 .../feed-with-meta-pk-in-meta.1.ddl.aql         |    55 +
 .../feed-with-meta-pk-in-meta.2.update.aql      |    27 +
 .../feed-with-meta-pk-in-meta.3.sleep.aql       |    24 +
 .../feed-with-meta-pk-in-meta.4.ddl.aql         |    24 +
 .../queries/feeds/feeds_02/feeds_02.1.ddl.aql   |    14 +-
 .../queries/feeds/feeds_07/feeds_07.1.ddl.aql   |    11 +-
 .../queries/feeds/feeds_08/feeds_08.1.ddl.aql   |     8 +-
 .../queries/feeds/feeds_09/feeds_09.1.ddl.aql   |     9 +-
 .../queries/feeds/feeds_10/feeds_10.1.ddl.aql   |     9 +-
 .../queries/feeds/feeds_11/feeds_11.1.ddl.aql   |    11 +-
 .../queries/feeds/feeds_12/feeds_12.1.ddl.aql   |    11 +-
 .../issue_230_feeds/issue_230_feeds.1.ddl.aql   |     9 +-
 .../hdfs/large-record/large-record.1.ddl.aql    |     7 +-
 .../issue_251_dataset_hint_7.1.ddl.aql          |     9 +-
 .../dataset-with-meta.1.ddl.aql                 |    36 +
 .../dataset-with-meta.2.update.aql              |    27 +
 .../adm-format/adm-format.1.ddl.aql             |     2 +-
 .../nested-index/nested-index.2.update.aql      |    10 +-
 .../issue_251_dataset_hint_7.1.ddl.sqlpp        |     6 +-
 .../distinct_by/distinct_by.2.update.sqlpp      |     2 +-
 .../nest_aggregate.2.update.sqlpp               |    12 +-
 .../nest_aggregate2.2.update.sqlpp              |    12 +-
 ...q01_pricing_summary_report_nt.2.update.sqlpp |     2 +-
 .../q02_minimum_cost_supplier.2.update.sqlpp    |    16 +-
 .../q03_shipping_priority_nt.2.update.sqlpp     |     6 +-
 .../q04_order_priority.2.update.sqlpp           |    16 +-
 .../q05_local_supplier_volume.2.update.sqlpp    |    16 +-
 .../q06_forecast_revenue_change.2.update.sqlpp  |    16 +-
 .../q07_volume_shipping.2.update.sqlpp          |    16 +-
 .../q08_national_market_share.2.update.sqlpp    |    16 +-
 .../q09_product_type_profit_nt.2.update.sqlpp   |    16 +-
 .../q10_returned_item.2.update.sqlpp            |    16 +-
 .../q10_returned_item_int64.2.update.sqlpp      |    16 +-
 .../q11_important_stock.2.update.sqlpp          |    16 +-
 .../q12_shipping/q12_shipping.2.update.sqlpp    |    16 +-
 .../q13_customer_distribution.2.update.sqlpp    |    16 +-
 .../q14_promotion_effect.2.update.sqlpp         |    16 +-
 .../q15_top_supplier.2.update.sqlpp             |    16 +-
 ...6_parts_supplier_relationship.2.update.sqlpp |    16 +-
 .../q17_large_gby_variant.2.update.sqlpp        |    16 +-
 ..._small_quantity_order_revenue.2.update.sqlpp |    16 +-
 .../q18_large_volume_customer.2.update.sqlpp    |    16 +-
 .../q19_discounted_revenue.2.update.sqlpp       |    16 +-
 .../q20_potential_part_promotion.2.update.sqlpp |    16 +-
 ...liers_who_kept_orders_waiting.2.update.sqlpp |    16 +-
 .../q22_global_sales_opportunity.2.update.sqlpp |    16 +-
 .../query-issue562.2.update.sqlpp               |    16 +-
 .../query-issue601.2.update.sqlpp               |     2 +-
 .../query-issue785-2.2.update.sqlpp             |    12 +-
 .../query-issue785.2.update.sqlpp               |    12 +-
 .../query-issue786.2.update.sqlpp               |    12 +-
 .../query-issue810-2.2.update.sqlpp             |     2 +-
 .../query-issue810-3.2.update.sqlpp             |     2 +-
 .../query-issue810.2.update.sqlpp               |     2 +-
 .../query-issue827-2.2.update.sqlpp             |     2 +-
 .../query-issue827.2.update.sqlpp               |     2 +-
 .../classad-parser/classad-parser.1.adm         |   200 +-
 .../results/feeds/change-feed/change-feed.1.adm |    10 +
 .../feed-with-external-parser.1.adm             |   198 +-
 .../feed-with-meta-pk-in-meta.1.adm             |    50 +
 .../src/test/resources/runtimets/testsuite.xml  |   166 +-
 .../resources/runtimets/testsuite_sqlpp.xml     |     4 +-
 .../config/AsterixPropertiesAccessor.java       |     4 +-
 .../asterix/test/server/FileTestServer.java     |     9 +-
 .../adapter/factory/GenericAdapterFactory.java  |    35 +-
 .../adapter/factory/LookupAdapterFactory.java   |    14 +-
 .../asterix/external/api/IAdapterFactory.java   |    15 +-
 .../external/api/IAdapterRuntimeManager.java    |     7 +-
 .../external/api/IDataFlowController.java       |    45 +-
 .../asterix/external/api/IDataParser.java       |    23 +-
 .../external/api/IDataParserFactory.java        |    13 +-
 .../api/IExternalDataSourceFactory.java         |    14 +-
 .../asterix/external/api/IExternalIndexer.java  |    15 +-
 .../asterix/external/api/IFeedAdapter.java      |    50 -
 .../api/IIndexibleExternalDataSource.java       |     2 +-
 .../external/api/IIndexingDatasource.java       |     2 -
 .../external/api/IInputStreamProvider.java      |     7 +-
 .../api/IInputStreamProviderFactory.java        |     9 +-
 .../asterix/external/api/IRecordConverter.java  |    26 +
 .../asterix/external/api/IRecordDataParser.java |     5 -
 .../external/api/IRecordDataParserFactory.java  |    14 +-
 .../asterix/external/api/IRecordReader.java     |    10 -
 .../external/api/IRecordReaderFactory.java      |    11 +-
 .../api/IRecordWithMetaDataAndPKParser.java     |    32 +
 .../external/api/IRecordWithMetaDataParser.java |    26 +
 .../external/api/IRecordWithPKDataParser.java   |    28 +
 .../asterix/external/api/IStreamDataParser.java |     3 +
 .../external/api/IStreamDataParserFactory.java  |     3 +-
 .../external/api/IStreamFlowController.java     |    23 -
 .../asterix/external/api/ITupleForwarder.java   |     4 -
 .../dataflow/AbstractDataFlowController.java    |    30 +-
 .../AbstractFeedDataFlowController.java         |    48 +-
 .../dataflow/ChangeFeedDataFlowController.java  |    45 +
 .../ChangeFeedWithMetaDataFlowController.java   |    42 +
 .../dataflow/CounterTimerTupleForwarder.java    |    20 +-
 .../dataflow/FeedRecordDataFlowController.java  |    22 +-
 .../dataflow/FeedStreamDataFlowController.java  |    26 +-
 .../external/dataflow/FeedTupleForwarder.java   |     7 +-
 .../FeedWithMetaDataFlowController.java         |    47 +
 .../dataflow/FrameFullTupleForwarder.java       |     7 -
 .../dataflow/IndexingDataFlowController.java    |    24 +-
 .../dataflow/RateControlledTupleForwarder.java  |    13 +-
 .../dataflow/RecordDataFlowController.java      |    33 +-
 .../dataflow/StreamDataFlowController.java      |    43 +-
 .../external/dataset/adapter/FeedAdapter.java   |    54 +
 .../dataset/adapter/GenericAdapter.java         |    24 +-
 .../feed/dataflow/FeedExceptionHandler.java     |     7 +-
 .../feed/dataflow/FeedRuntimeInputHandler.java  |    22 +-
 .../external/feed/runtime/AdapterExecutor.java  |     6 +-
 .../feed/runtime/AdapterRuntimeManager.java     |     8 +-
 .../external/indexing/FileOffsetIndexer.java    |    37 +-
 .../indexing/RecordColumnarIndexer.java         |    54 +-
 .../external/input/HDFSDataSourceFactory.java   |    95 +-
 .../external/input/record/CharArrayRecord.java  |    10 +-
 .../input/record/RecordWithMetadata.java        |   138 -
 .../input/record/RecordWithMetadataAndPK.java   |   186 +
 .../external/input/record/RecordWithPK.java     |    98 +
 .../CSVToRecordWithMetadataAndPKConverter.java  |    72 +
 .../CSVWithRecordConverterFactory.java          |   124 +
 .../record/converter/DCPConverterFactory.java   |    61 +
 ...questToRecordWithMetadataAndPKConverter.java |   122 +
 .../converter/IRecordConverterFactory.java      |    42 +
 ...ecordToRecordWithMetadataAndPKConverter.java |    26 +
 .../input/record/reader/EmptyRecordReader.java  |    56 -
 .../reader/RecordWithPKTestReaderFactory.java   |    55 +
 .../record/reader/TestAsterixMembersReader.java |    88 +
 .../reader/couchbase/CouchbaseReader.java       |   265 -
 .../couchbase/CouchbaseReaderFactory.java       |   154 -
 .../record/reader/hdfs/EmptyRecordReader.java   |    56 +
 .../reader/hdfs/HDFSLookupReaderFactory.java    |    27 +-
 .../record/reader/hdfs/HDFSRecordReader.java    |    31 +-
 .../input/record/reader/kv/KVReader.java        |   193 +
 .../input/record/reader/kv/KVReaderFactory.java |   149 +
 .../input/record/reader/kv/KVTestReader.java    |   178 +
 .../record/reader/kv/KVTestReaderFactory.java   |    78 +
 .../record/reader/rss/RSSRecordReader.java      |     5 -
 .../reader/rss/RSSRecordReaderFactory.java      |    20 +-
 .../stream/AbstractStreamRecordReader.java      |    27 +-
 .../AbstractStreamRecordReaderFactory.java      |    23 +-
 .../stream/EmptyLineSeparatedRecordReader.java  |     9 +-
 .../EmptyLineSeparatedRecordReaderFactory.java  |    18 +-
 .../record/reader/stream/LineRecordReader.java  |    47 +-
 .../reader/stream/LineRecordReaderFactory.java  |    22 +-
 .../reader/stream/QuotedLineRecordReader.java   |    18 +-
 .../stream/SemiStructuredRecordReader.java      |    21 +-
 .../SemiStructuredRecordReaderFactory.java      |    25 +-
 .../reader/twitter/TwitterPullRecordReader.java |    21 +-
 .../reader/twitter/TwitterPushRecordReader.java |    32 +-
 .../twitter/TwitterRecordReaderFactory.java     |    29 +-
 .../external/input/stream/AInputStream.java     |     4 -
 .../input/stream/AInputStreamReader.java        |    49 +-
 .../external/input/stream/BasicInputStream.java |     5 -
 .../stream/LocalFileSystemInputStream.java      |    16 +-
 .../input/stream/SocketInputStream.java         |   175 -
 .../input/stream/SocketServerInputStream.java   |   170 +
 .../LocalFSInputStreamProviderFactory.java      |     9 +-
 .../SocketClientInputStreamProviderFactory.java |    80 +
 .../SocketInputStreamProviderFactory.java       |   129 -
 .../SocketServerInputStreamProviderFactory.java |   141 +
 .../TwitterFirehoseStreamProviderFactory.java   |     8 +-
 .../provider/HDFSInputStreamProvider.java       |    16 +-
 .../provider/LocalFSInputStreamProvider.java    |    26 +-
 .../SocketClientInputStreamProvider.java        |    98 +
 .../provider/SocketInputStreamProvider.java     |    48 -
 .../SocketServerInputStreamProvider.java        |    43 +
 .../TwitterFirehoseInputStreamProvider.java     |    45 +-
 ...lDatasetIndexesCommitOperatorDescriptor.java |    32 +-
 .../operators/FeedIntakeOperatorDescriptor.java |    12 +-
 .../FeedIntakeOperatorNodePushable.java         |    20 +-
 .../operators/FeedMetaOperatorDescriptor.java   |    15 +-
 .../operators/FeedMetaStoreNodePushable.java    |    25 +-
 .../asterix/external/parser/ADMDataParser.java  |   122 +-
 .../external/parser/DelimitedDataParser.java    |   104 +-
 .../external/parser/HiveRecordParser.java       |    51 +-
 .../asterix/external/parser/RSSParser.java      |    18 +-
 .../parser/RecordWithMetadataParser.java        |   109 +-
 .../external/parser/RecordWithPKDataParser.java |    50 +
 .../external/parser/TestRecordWithPKParser.java |    49 +
 .../asterix/external/parser/TweetParser.java    |    18 +-
 .../parser/factory/ADMDataParserFactory.java    |    12 +-
 .../AbstractRecordStreamParserFactory.java      |     3 +-
 .../factory/DelimitedDataParserFactory.java     |    31 +-
 .../parser/factory/HiveDataParserFactory.java   |    21 +-
 .../parser/factory/RSSParserFactory.java        |    22 +-
 .../RecordWithMetadataParserFactory.java        |    89 +-
 .../factory/TestRecordWithPKParserFactory.java  |    77 +
 .../parser/factory/TweetParserFactory.java      |    21 +-
 .../provider/AdapterFactoryProvider.java        |    90 +-
 .../provider/DataflowControllerProvider.java    |   136 +-
 .../provider/DatasourceFactoryProvider.java     |   145 +-
 .../provider/LookupReaderFactoryProvider.java   |     3 +-
 .../provider/ParserFactoryProvider.java         |    28 +-
 .../RecordConverterFactoryProvider.java         |    46 +
 .../asterix/external/util/DataflowUtils.java    |    19 +-
 .../util/ExternalDataCompatibilityUtils.java    |    87 +-
 .../external/util/ExternalDataConstants.java    |    58 +-
 .../external/util/ExternalDataUtils.java        |   152 +-
 .../asterix/external/util/FeedLogManager.java   |    35 +-
 .../apache/asterix/external/util/FeedUtils.java |    20 +-
 .../external/util/FileSystemWatcher.java        |    42 +-
 .../apache/asterix/external/util/HDFSUtils.java |     2 +-
 .../asterix/external/util/TweetGenerator.java   |    15 +-
 .../classad/BuiltinClassAdFunctions.java        |     5 +-
 .../asterix/external/classad/ClassAd.java       |    82 +-
 .../classad/test/ClassAdParserTest.java         |     2 +-
 .../external/classad/test/ClassAdToADMTest.java |    16 +-
 .../external/classad/test/ClassAdUnitTest.java  |     2 +-
 .../classad/test/ClassAdUnitTester.java         |     6 +-
 .../external/classad/test/FunctionalTester.java |     4 +-
 .../generator/test/DCPGeneratorTest.java        |    54 +
 .../asterix/external/library/ClassAdParser.java |    76 +-
 .../external/library/ClassAdParserFactory.java  |    63 +-
 .../library/adapter/TestTypedAdapter.java       |    10 +-
 .../adapter/TestTypedAdapterFactory.java        |    23 +-
 .../external/parser/test/ADMDataParserTest.java |    14 +-
 .../parser/test/RecordWithMetaTest.java         |   149 +
 .../src/test/resources/beer.csv                 |  7308 +++++++++++
 .../src/test/resources/results/beer.txt         | 10995 +++++++++++++++++
 .../installer/test/AbstractExecutionIT.java     |    24 +-
 .../installer/test/ClusterExecutionIT.java      |     5 +-
 .../typed_adapter/typed_adapter.1.ddl.aql       |     4 +-
 .../feed_ingest/feed_ingest.1.ddl.aql           |     9 +-
 .../record-parser/record-parser.1.ddl.aql       |     5 +-
 .../record-parser/record-parser.1.adm           |   100 +
 .../backupRestore/backupRestore.1.adm           |     2 +-
 .../node_failback/node_failback.2.update.aql    |     3 +-
 .../aql/statement/SubscribeFeedStatement.java   |    20 +-
 .../metadata/declared/AqlDataSource.java        |    16 +-
 .../metadata/declared/AqlMetadataProvider.java  |   115 +-
 .../metadata/declared/FeedDataSource.java       |    90 +-
 .../metadata/feeds/FeedMetadataUtil.java        |   108 +-
 .../asterix/metadata/utils/DatasetUtils.java    |    32 +-
 .../om/functions/AsterixBuiltinFunctions.java   |     3 +
 .../apache/asterix/om/types/ARecordType.java    |    30 +-
 ...rixLSMPrimaryUpsertOperatorNodePushable.java |    76 +-
 ...xLSMSecondaryUpsertOperatorNodePushable.java |     9 +-
 271 files changed, 24969 insertions(+), 3618 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/.gitignore
----------------------------------------------------------------------
diff --git a/.gitignore b/.gitignore
index f8e0b2d..d802def 100644
--- a/.gitignore
+++ b/.gitignore
@@ -24,6 +24,7 @@ bin/
 *-coredump
 *.pyc
 *.iml
+asterix-app/data/csv/beer.csv
 .idea/
 asterix.ipr
 asterix.iws

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java
index c534454..fb48395 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java
@@ -90,7 +90,7 @@ public class ConstantFoldingRule implements IAlgebraicRewriteRule {
             AsterixBuiltinFunctions.GET_RECORD_FIELD_VALUE, AsterixBuiltinFunctions.FIELD_ACCESS_NESTED,
             AsterixBuiltinFunctions.GET_ITEM, AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR,
             AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX, AsterixBuiltinFunctions.CAST_RECORD,
-            AsterixBuiltinFunctions.CAST_LIST, AsterixBuiltinFunctions.META);
+            AsterixBuiltinFunctions.CAST_LIST, AsterixBuiltinFunctions.META, AsterixBuiltinFunctions.META_KEY);
 
     /** Throws exceptions in substituiteProducedVariable, setVarType, and one getVarType method. */
     private static final IVariableTypeEnvironment _emptyTypeEnv = new IVariableTypeEnvironment() {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
index de6eebf..f8bbba8 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
@@ -99,8 +99,9 @@ public class IntroduceDynamicTypeCastRule implements IAlgebraicRewriteRule {
                 AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
                 if (op2.getOperatorTag() == LogicalOperatorTag.INSERT_DELETE_UPSERT) {
                     InsertDeleteUpsertOperator insertDeleteOp = (InsertDeleteUpsertOperator) op2;
-                    if (insertDeleteOp.getOperation() == InsertDeleteUpsertOperator.Kind.DELETE)
+                    if (insertDeleteOp.getOperation() == InsertDeleteUpsertOperator.Kind.DELETE) {
                         return false;
+                    }
 
                     // Remember this is the operator we need to modify
                     op = insertDeleteOp;
@@ -108,8 +109,7 @@ public class IntroduceDynamicTypeCastRule implements IAlgebraicRewriteRule {
                     // Derive the required ARecordType based on the schema of the AqlDataSource
                     InsertDeleteUpsertOperator insertDeleteOperator = (InsertDeleteUpsertOperator) op2;
                     AqlDataSource dataSource = (AqlDataSource) insertDeleteOperator.getDataSource();
-                    IAType[] schemaTypes = dataSource.getSchemaTypes();
-                    requiredRecordType = (ARecordType) schemaTypes[schemaTypes.length - 1];
+                    requiredRecordType = (ARecordType) dataSource.getItemType();
 
                     // Derive the Variable which we will potentially wrap with cast/null functions
                     ILogicalExpression expr = insertDeleteOperator.getPayloadExpression().getValue();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceStaticTypeCastForInsertRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceStaticTypeCastForInsertRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceStaticTypeCastForInsertRule.java
index 0c847e6..223fab3 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceStaticTypeCastForInsertRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceStaticTypeCastForInsertRule.java
@@ -22,12 +22,11 @@ package org.apache.asterix.optimizer.rules;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.lang3.mutable.Mutable;
-
 import org.apache.asterix.metadata.declared.AqlDataSource;
 import org.apache.asterix.om.typecomputer.base.TypeComputerUtilities;
 import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.optimizer.rules.typecast.StaticTypeCastUtil;
+import org.apache.commons.lang3.mutable.Mutable;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
 import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
@@ -67,7 +66,8 @@ import org.apache.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
 public class IntroduceStaticTypeCastForInsertRule implements IAlgebraicRewriteRule {
 
     @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+            throws AlgebricksException {
         return false;
     }
 
@@ -78,29 +78,32 @@ public class IntroduceStaticTypeCastForInsertRule implements IAlgebraicRewriteRu
          * pattern match: sink/insert/assign record type is propagated from
          * insert data source to the record-constructor expression
          */
-        if (context.checkIfInDontApplySet(this, opRef.getValue()))
+        if (context.checkIfInDontApplySet(this, opRef.getValue())) {
             return false;
+        }
         context.addToDontApplySet(this, opRef.getValue());
 
         AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
         List<LogicalVariable> producedVariables = new ArrayList<LogicalVariable>();
         LogicalVariable oldRecordVariable;
 
-        if (op1.getOperatorTag() != LogicalOperatorTag.SINK)
+        if (op1.getOperatorTag() != LogicalOperatorTag.SINK) {
             return false;
+        }
         AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
-        if (op2.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE_UPSERT)
+        if (op2.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE_UPSERT) {
             return false;
+        }
         InsertDeleteUpsertOperator insertDeleteOp = (InsertDeleteUpsertOperator) op2;
-        if (insertDeleteOp.getOperation() == InsertDeleteUpsertOperator.Kind.DELETE)
+        if (insertDeleteOp.getOperation() == InsertDeleteUpsertOperator.Kind.DELETE) {
             return false;
+        }
         /**
          * get required record type
          */
         InsertDeleteUpsertOperator insertDeleteOperator = (InsertDeleteUpsertOperator) op2;
         AqlDataSource dataSource = (AqlDataSource) insertDeleteOperator.getDataSource();
-        IAType[] schemaTypes = (IAType[]) dataSource.getSchemaTypes();
-        IAType requiredRecordType = schemaTypes[schemaTypes.length - 1];
+        IAType requiredRecordType = dataSource.getItemType();
 
         List<LogicalVariable> usedVariables = new ArrayList<LogicalVariable>();
         insertDeleteOperator.getPayloadExpression().getValue().getUsedVariables(usedVariables);
@@ -108,8 +111,9 @@ public class IntroduceStaticTypeCastForInsertRule implements IAlgebraicRewriteRu
         // the used variable should contain the record that will be inserted
         // but it will not fail in many cases even if the used variable set is
         // empty
-        if (usedVariables.size() == 0)
+        if (usedVariables.size() == 0) {
             return false;
+        }
 
         oldRecordVariable = usedVariables.get(0);
         LogicalVariable inputRecordVar = usedVariables.get(0);
@@ -150,10 +154,11 @@ public class IntroduceStaticTypeCastForInsertRule implements IAlgebraicRewriteRu
                     context.computeAndSetTypeEnvironmentForOperator(originalAssign);
                 }
             }
-            if (currentOperator.getInputs().size() > 0)
+            if (currentOperator.getInputs().size() > 0) {
                 currentOperator = (AbstractLogicalOperator) currentOperator.getInputs().get(0).getValue();
-            else
+            } else {
                 break;
+            }
         } while (currentOperator != null);
         return true;
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/MetaFunctionToMetaVariableRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/MetaFunctionToMetaVariableRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/MetaFunctionToMetaVariableRule.java
index cd06303..605ddb4 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/MetaFunctionToMetaVariableRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/MetaFunctionToMetaVariableRule.java
@@ -21,9 +21,15 @@ package org.apache.asterix.optimizer.rules;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.asterix.lang.common.util.FunctionUtil;
 import org.apache.asterix.metadata.declared.AqlDataSource;
+import org.apache.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
+import org.apache.asterix.metadata.declared.FeedDataSource;
+import org.apache.asterix.om.constants.AsterixConstantValue;
 import org.apache.asterix.om.functions.AsterixBuiltinFunctions;
+import org.apache.asterix.om.types.IAType;
 import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
 import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
@@ -32,7 +38,11 @@ import org.apache.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
 import org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
 import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
 import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
 import org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import org.apache.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
 import org.apache.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
 import org.apache.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
@@ -65,28 +75,48 @@ public class MetaFunctionToMetaVariableRule implements IAlgebraicRewriteRule {
         if (op.getInputs().size() == 0) {
             return NoOpExpressionReferenceTransform.INSTANCE;
         }
-
         // Datascan returns an useful transform if the meta part presents in the dataset.
         if (op.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
             DataSourceScanOperator scanOp = (DataSourceScanOperator) op;
             ILogicalExpressionReferenceTransformWithCondition inputTransfomer = visit(op.getInputs().get(0));
             AqlDataSource dataSource = (AqlDataSource) scanOp.getDataSource();
-            if (!dataSource.hasMeta()) {
-                return inputTransfomer;
-            };
+            List<ILogicalExpressionReferenceTransformWithCondition> transformers = null;
             List<LogicalVariable> allVars = scanOp.getVariables();
             LogicalVariable dataVar = dataSource.getDataRecordVariable(allVars);
             LogicalVariable metaVar = dataSource.getMetaVariable(allVars);
-            LogicalExpressionReferenceTransform currentTransformer = new LogicalExpressionReferenceTransform(dataVar,
-                    metaVar);
-            if (inputTransfomer.equals(NoOpExpressionReferenceTransform.INSTANCE)) {
+            LogicalExpressionReferenceTransform currentTransformer = null;
+            if (dataSource.getDatasourceType() == AqlDataSourceType.FEED) {
+                FeedDataSource fds = (FeedDataSource) dataSource;
+                if (fds.isChange()) {
+                    transformers = new ArrayList<>();
+                    transformers.add(new MetaKeyExpressionReferenceTransform(fds.getPkVars(allVars),
+                            fds.getKeyAccessExpression()));
+                } else if (metaVar != null) {
+                    transformers = new ArrayList<>();
+                    transformers.add(new MetaKeyToFieldAccessTransform(metaVar));
+                }
+            }
+            if (!dataSource.hasMeta() && transformers == null) {
+                return inputTransfomer;
+            };
+            if (metaVar != null) {
+                currentTransformer = new LogicalExpressionReferenceTransform(dataVar, metaVar);
+            }
+            if (inputTransfomer.equals(NoOpExpressionReferenceTransform.INSTANCE) && transformers == null) {
                 return currentTransformer;
+            } else if (inputTransfomer.equals(NoOpExpressionReferenceTransform.INSTANCE)
+                    && currentTransformer == null) {
+                return transformers.get(0);
             } else {
                 // Requires an argument variable to resolve ambiguity.
-                List<ILogicalExpressionReferenceTransformWithCondition> transformers = new ArrayList<>();
-                inputTransfomer.setVariableRequired();
+                if (transformers == null) {
+                    transformers = new ArrayList<>();
+                }
+                if (!inputTransfomer.equals(NoOpExpressionReferenceTransform.INSTANCE)) {
+                    inputTransfomer.setVariableRequired();
+                    transformers.add(inputTransfomer);
+                }
                 currentTransformer.setVariableRequired();
-                transformers.add(inputTransfomer);
                 transformers.add(currentTransformer);
                 return new CompositeExpressionReferenceTransform(transformers);
             }
@@ -124,7 +154,7 @@ interface ILogicalExpressionReferenceTransformWithCondition extends ILogicalExpr
 }
 
 class NoOpExpressionReferenceTransform implements ILogicalExpressionReferenceTransformWithCondition {
-    static NoOpExpressionReferenceTransform INSTANCE = new NoOpExpressionReferenceTransform();
+    static final NoOpExpressionReferenceTransform INSTANCE = new NoOpExpressionReferenceTransform();
 
     private NoOpExpressionReferenceTransform() {
 
@@ -216,5 +246,82 @@ class CompositeExpressionReferenceTransform implements ILogicalExpressionReferen
         }
         return false;
     }
+}
+
+class MetaKeyToFieldAccessTransform implements ILogicalExpressionReferenceTransformWithCondition {
+    private final LogicalVariable metaVar;
+
+    MetaKeyToFieldAccessTransform(LogicalVariable recordVar) {
+        this.metaVar = recordVar;
+    }
+
+    @Override
+    public boolean transform(Mutable<ILogicalExpression> exprRef) throws AlgebricksException {
+        ILogicalExpression expr = exprRef.getValue();
+        if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return false;
+        }
+        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+        if (!funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.META_KEY)) {
+            return false;
+        }
+        // Get arguments
+        // first argument : Resource key
+        // second argument: field
+        List<Mutable<ILogicalExpression>> args = funcExpr.getArguments();
+        ConstantExpression fieldNameExpression = (ConstantExpression) args.get(1).getValue();
+        AsterixConstantValue fieldNameValue = (AsterixConstantValue) fieldNameExpression.getValue();
+        IAType fieldNameType = fieldNameValue.getObject().getType();
+        FunctionIdentifier functionIdentifier;
+        switch (fieldNameType.getTypeTag()) {
+            case ORDEREDLIST:
+                // Field access nested
+                functionIdentifier = AsterixBuiltinFunctions.FIELD_ACCESS_NESTED;
+                break;
+            case STRING:
+                // field access by name
+                functionIdentifier = AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME;
+                break;
+            default:
+                throw new AlgebricksException("Unsupported field name type " + fieldNameType.getTypeTag());
+        }
+        IFunctionInfo finfoAccess = FunctionUtil.getFunctionInfo(functionIdentifier);
+        ArrayList<Mutable<ILogicalExpression>> argExprs = new ArrayList<Mutable<ILogicalExpression>>(2);
+        argExprs.add(new MutableObject<>(new VariableReferenceExpression(metaVar)));
+        argExprs.add(new MutableObject<>(fieldNameExpression));
+        exprRef.setValue(new ScalarFunctionCallExpression(finfoAccess, argExprs));
+        return true;
+    }
+}
+
+class MetaKeyExpressionReferenceTransform implements ILogicalExpressionReferenceTransformWithCondition {
+    private final List<LogicalVariable> keyVars;
+    private final List<ScalarFunctionCallExpression> metaKeyAccessExpressions;
+
+    MetaKeyExpressionReferenceTransform(List<LogicalVariable> keyVars,
+            List<ScalarFunctionCallExpression> metaKeyAccessExpressions) {
+        this.keyVars = keyVars;
+        this.metaKeyAccessExpressions = metaKeyAccessExpressions;
+    }
+
+    @Override
+    public boolean transform(Mutable<ILogicalExpression> exprRef) throws AlgebricksException {
+        ILogicalExpression expr = exprRef.getValue();
+        if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
+            return false;
+        }
+        AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+        if (!funcExpr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.META_KEY)) {
+            return false;
+        }
 
+        // Function is meta key access
+        for (int i = 0; i < metaKeyAccessExpressions.size(); i++) {
+            if (metaKeyAccessExpressions.get(i).equals(funcExpr)) {
+                exprRef.setValue(new VariableReferenceExpression(keyVars.get(i)));
+                return true;
+            }
+        }
+        return false;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ReplaceSinkOpWithCommitOpRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ReplaceSinkOpWithCommitOpRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ReplaceSinkOpWithCommitOpRule.java
index ef8b4a3..0628681 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ReplaceSinkOpWithCommitOpRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ReplaceSinkOpWithCommitOpRule.java
@@ -100,15 +100,22 @@ public class ReplaceSinkOpWithCommitOpRule implements IAlgebraicRewriteRule {
                     if (insertDeleteUpsertOperator.getOperation() == Kind.UPSERT) {
                         //we need to add a function that checks if previous record was found
                         upsertVar = context.newVar();
-                        //introduce casting to enforced type
-                        AbstractFunctionCallExpression isNullFunc = new ScalarFunctionCallExpression(
+                        AbstractFunctionCallExpression orFunc = new ScalarFunctionCallExpression(
+                                FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.OR));
+                        // is new value null? -> this means that the expected operation is delete
+                        AbstractFunctionCallExpression isNewNullFunc = new ScalarFunctionCallExpression(
                                 FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.IS_NULL));
-                        // The first argument is the record
-                        isNullFunc.getArguments().add(new MutableObject<ILogicalExpression>(
+                        isNewNullFunc.getArguments().add(insertDeleteUpsertOperator.getPayloadExpression());
+                        AbstractFunctionCallExpression isPrevNullFunc = new ScalarFunctionCallExpression(
+                                FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.IS_NULL));
+                        // argument is the previous record
+                        isPrevNullFunc.getArguments().add(new MutableObject<ILogicalExpression>(
                                 new VariableReferenceExpression(insertDeleteUpsertOperator.getPrevRecordVar())));
+                        orFunc.getArguments().add(new MutableObject<ILogicalExpression>(isPrevNullFunc));
+                        orFunc.getArguments().add(new MutableObject<ILogicalExpression>(isNewNullFunc));
+
                         // AssignOperator puts in the cast var the casted record
-                        upsertFlagAssign = new AssignOperator(upsertVar,
-                                new MutableObject<ILogicalExpression>(isNullFunc));
+                        upsertFlagAssign = new AssignOperator(upsertVar, new MutableObject<ILogicalExpression>(orFunc));
                         // Connect the current top of the plan to the cast operator
                         upsertFlagAssign.getInputs()
                                 .add(new MutableObject<ILogicalOperator>(sinkOperator.getInputs().get(0).getValue()));
@@ -156,5 +163,4 @@ public class ReplaceSinkOpWithCommitOpRule implements IAlgebraicRewriteRule {
         opRef.setValue(extensionOperator);
         return true;
     }
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
index bcaaaeb..1af312d 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
@@ -50,7 +50,7 @@ import org.apache.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
 
 public class SetClosedRecordConstructorsRule implements IAlgebraicRewriteRule {
 
-    private SettingClosedRecordVisitor recordVisitor;
+    final private SettingClosedRecordVisitor recordVisitor;
 
     public SetClosedRecordConstructorsRule() {
         this.recordVisitor = new SettingClosedRecordVisitor();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/UnnestToDataScanRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/UnnestToDataScanRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/UnnestToDataScanRule.java
index 1f965d6..d1c86f0 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/UnnestToDataScanRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/UnnestToDataScanRule.java
@@ -24,6 +24,8 @@ import java.util.List;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.external.feed.api.IFeedLifecycleListener.ConnectionLocation;
 import org.apache.asterix.external.feed.watch.FeedActivity.FeedActivityDetails;
+import org.apache.asterix.external.util.ExternalDataUtils;
+import org.apache.asterix.external.util.FeedUtils;
 import org.apache.asterix.metadata.declared.AqlDataSource;
 import org.apache.asterix.metadata.declared.AqlMetadataProvider;
 import org.apache.asterix.metadata.declared.AqlSourceId;
@@ -32,6 +34,7 @@ import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.entities.Dataverse;
 import org.apache.asterix.metadata.entities.Feed;
 import org.apache.asterix.metadata.entities.FeedPolicyEntity;
+import org.apache.asterix.metadata.entities.InternalDatasetDetails;
 import org.apache.asterix.metadata.feeds.BuiltinFeedPolicies;
 import org.apache.asterix.metadata.utils.DatasetUtils;
 import org.apache.asterix.om.base.AString;
@@ -41,6 +44,7 @@ import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.ATypeTag;
 import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.optimizer.rules.util.EquivalenceClassUtils;
+import org.apache.asterix.translator.util.PlanTranslationUtil;
 import org.apache.commons.lang3.mutable.Mutable;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.common.utils.Pair;
@@ -53,6 +57,7 @@ import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
 import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
 import org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
 import org.apache.hyracks.algebricks.core.algebra.expressions.IAlgebricksConstantValue;
+import org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
@@ -147,7 +152,7 @@ public class UnnestToDataScanRule implements IAlgebraicRewriteRule {
 
             if (fid.equals(AsterixBuiltinFunctions.FEED_COLLECT)) {
                 if (unnest.getPositionalVariable() != null) {
-                    throw new AlgebricksException("No positional variables are allowed over datasets.");
+                    throw new AlgebricksException("No positional variables are allowed over feeds.");
                 }
 
                 String dataverse = getStringArgument(f, 0);
@@ -169,21 +174,31 @@ public class UnnestToDataScanRule implements IAlgebraicRewriteRule {
                     }
                 }
 
-                ArrayList<LogicalVariable> v = new ArrayList<LogicalVariable>();
-                v.add(unnest.getVariable());
+                ArrayList<LogicalVariable> feedDataScanOutputVariables = new ArrayList<LogicalVariable>();
 
                 String csLocations = metadataProvider.getConfig().get(FeedActivityDetails.COLLECT_LOCATIONS);
-                AqlDataSource dataSource = createFeedDataSource(asid, targetDataset, sourceFeedName,
-                        subscriptionLocation, metadataProvider, policy, outputType,
-                        null /* TODO(Abdullah): to figure out the meta type name*/, csLocations);
-                DataSourceScanOperator scan = new DataSourceScanOperator(v, dataSource);
+                List<LogicalVariable> pkVars = new ArrayList<>();
+                FeedDataSource ds = createFeedDataSource(asid, targetDataset, sourceFeedName, subscriptionLocation,
+                        metadataProvider, policy, outputType, csLocations, unnest.getVariable(), context, pkVars);
+                // The order for feeds is <Record-Meta-PK>
+                feedDataScanOutputVariables.add(unnest.getVariable());
+                // Does it produce meta?
+                if (ds.hasMeta()) {
+                    feedDataScanOutputVariables.add(context.newVar());
+                }
+                // Does it produce pk?
+                if (ds.isChange()) {
+                    int numOfPKs = ds.getPkTypes().size();
+                    for (int i = 0; i < numOfPKs; i++) {
+                        feedDataScanOutputVariables.addAll(pkVars);
+                    }
+                }
 
+                DataSourceScanOperator scan = new DataSourceScanOperator(feedDataScanOutputVariables, ds);
                 List<Mutable<ILogicalOperator>> scanInpList = scan.getInputs();
                 scanInpList.addAll(unnest.getInputs());
                 opRef.setValue(scan);
-                addPrimaryKey(v, dataSource, context);
                 context.computeAndSetTypeEnvironmentForOperator(scan);
-
                 return true;
             }
 
@@ -201,20 +216,59 @@ public class UnnestToDataScanRule implements IAlgebraicRewriteRule {
         context.addPrimaryKey(pk);
     }
 
-    private AqlDataSource createFeedDataSource(AqlSourceId aqlId, String targetDataset, String sourceFeedName,
+    private FeedDataSource createFeedDataSource(AqlSourceId aqlId, String targetDataset, String sourceFeedName,
             String subscriptionLocation, AqlMetadataProvider metadataProvider, FeedPolicyEntity feedPolicy,
-            String outputType, String outputMetaType, String locations) throws AlgebricksException {
+            String outputType, String locations, LogicalVariable recordVar, IOptimizationContext context,
+            List<LogicalVariable> pkVars) throws AlgebricksException {
         if (!aqlId.getDataverseName().equals(metadataProvider.getDefaultDataverse() == null ? null
                 : metadataProvider.getDefaultDataverse().getDataverseName())) {
             return null;
         }
-        IAType feedOutputType = metadataProvider.findType(aqlId.getDataverseName(), outputType);
-        IAType feedOutputMetaType = metadataProvider.findType(aqlId.getDataverseName(), outputMetaType);
+        Dataset dataset = metadataProvider.findDataset(aqlId.getDataverseName(), targetDataset);
+        ARecordType feedOutputType = (ARecordType) metadataProvider.findType(aqlId.getDataverseName(), outputType);
         Feed sourceFeed = metadataProvider.findFeed(aqlId.getDataverseName(), sourceFeedName);
-
-        FeedDataSource feedDataSource = new FeedDataSource(aqlId, targetDataset, feedOutputType, feedOutputMetaType,
-                AqlDataSource.AqlDataSourceType.FEED, sourceFeed.getFeedId(), sourceFeed.getFeedType(),
-                ConnectionLocation.valueOf(subscriptionLocation), locations.split(","));
+        ARecordType metaType = null;
+        // Does dataset have meta?
+        if (dataset.hasMetaPart()) {
+            String metaTypeName = FeedUtils.getFeedMetaTypeName(sourceFeed.getAdapterConfiguration());
+            if (metaTypeName == null) {
+                throw new AlgebricksException("Feed to a dataset with metadata doesn't have meta type specified");
+            }
+            metaType = (ARecordType) metadataProvider.findType(aqlId.getDataverseName(), metaTypeName);
+        }
+        // Is a change feed?
+        List<IAType> pkTypes = null;
+        List<List<String>> partitioningKeys = null;
+        List<Integer> keySourceIndicator = null;
+        List<Mutable<ILogicalExpression>> keyAccessExpression = null;
+        List<ScalarFunctionCallExpression> keyAccessScalarFunctionCallExpression;
+        if (ExternalDataUtils.isChangeFeed(sourceFeed.getAdapterConfiguration())) {
+            keyAccessExpression = new ArrayList<>();
+            keyAccessScalarFunctionCallExpression = new ArrayList<>();
+            pkTypes = ((InternalDatasetDetails) dataset.getDatasetDetails()).getPrimaryKeyType();
+            partitioningKeys = ((InternalDatasetDetails) dataset.getDatasetDetails()).getPartitioningKey();
+            if (dataset.hasMetaPart()) {
+                keySourceIndicator = ((InternalDatasetDetails) dataset.getDatasetDetails()).getKeySourceIndicator();
+            }
+            for (int i = 0; i < partitioningKeys.size(); i++) {
+                List<String> key = partitioningKeys.get(i);
+                if (keySourceIndicator == null || keySourceIndicator.get(i).intValue() == 0) {
+                    PlanTranslationUtil.prepareVarAndExpression(key, recordVar, pkVars, keyAccessExpression, null,
+                            context);
+                } else {
+                    PlanTranslationUtil.prepareMetaKeyAccessExpression(key, recordVar, keyAccessExpression, pkVars,
+                            null, context);
+                }
+            }
+            keyAccessExpression.forEach(
+                    expr -> keyAccessScalarFunctionCallExpression.add((ScalarFunctionCallExpression) expr.getValue()));
+        } else {
+            keyAccessExpression = null;
+            keyAccessScalarFunctionCallExpression = null;
+        }
+        FeedDataSource feedDataSource = new FeedDataSource(sourceFeed, aqlId, targetDataset, feedOutputType, metaType,
+                pkTypes, partitioningKeys, keyAccessScalarFunctionCallExpression, sourceFeed.getFeedId(),
+                sourceFeed.getFeedType(), ConnectionLocation.valueOf(subscriptionLocation), locations.split(","));
         feedDataSource.getProperties().put(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY, feedPolicy);
         return feedDataSource;
     }
@@ -256,5 +310,4 @@ public class UnnestToDataScanRule implements IAlgebraicRewriteRule {
         String argument = ((AString) acv2.getObject()).getStringValue();
         return argument;
     }
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java b/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java
index 5dc9f18..9d092a7 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/translator/CompiledStatements.java
@@ -66,8 +66,8 @@ public class CompiledStatements {
 
     // added by yasser
     public static class CompiledCreateDataverseStatement implements ICompiledStatement {
-        private String dataverseName;
-        private String format;
+        private final String dataverseName;
+        private final String format;
 
         public CompiledCreateDataverseStatement(String dataverseName, String format) {
             this.dataverseName = dataverseName;
@@ -89,7 +89,7 @@ public class CompiledStatements {
     }
 
     public static class CompiledNodeGroupDropStatement implements ICompiledStatement {
-        private String nodeGroupName;
+        private final String nodeGroupName;
 
         public CompiledNodeGroupDropStatement(String nodeGroupName) {
             this.nodeGroupName = nodeGroupName;
@@ -106,9 +106,9 @@ public class CompiledStatements {
     }
 
     public static class CompiledIndexDropStatement implements ICompiledStatement {
-        private String dataverseName;
-        private String datasetName;
-        private String indexName;
+        private final String dataverseName;
+        private final String datasetName;
+        private final String indexName;
 
         public CompiledIndexDropStatement(String dataverseName, String datasetName, String indexName) {
             this.dataverseName = dataverseName;
@@ -135,8 +135,8 @@ public class CompiledStatements {
     }
 
     public static class CompiledDataverseDropStatement implements ICompiledStatement {
-        private String dataverseName;
-        private boolean ifExists;
+        private final String dataverseName;
+        private final boolean ifExists;
 
         public CompiledDataverseDropStatement(String dataverseName, boolean ifExists) {
             this.dataverseName = dataverseName;
@@ -158,7 +158,7 @@ public class CompiledStatements {
     }
 
     public static class CompiledTypeDropStatement implements ICompiledStatement {
-        private String typeName;
+        private final String typeName;
 
         public CompiledTypeDropStatement(String nodeGroupName) {
             this.typeName = nodeGroupName;
@@ -247,11 +247,11 @@ public class CompiledStatements {
     }
 
     public static class CompiledLoadFromFileStatement implements ICompiledDmlStatement {
-        private String dataverseName;
-        private String datasetName;
-        private boolean alreadySorted;
-        private String adapter;
-        private Map<String, String> properties;
+        private final String dataverseName;
+        private final String datasetName;
+        private final boolean alreadySorted;
+        private final String adapter;
+        private final Map<String, String> properties;
 
         public CompiledLoadFromFileStatement(String dataverseName, String datasetName, String adapter,
                 Map<String, String> properties, boolean alreadySorted) {
@@ -340,12 +340,12 @@ public class CompiledStatements {
     }
 
     public static class CompiledConnectFeedStatement implements ICompiledDmlStatement {
-        private String dataverseName;
-        private String feedName;
-        private String datasetName;
-        private String policyName;
-        private Query query;
-        private int varCounter;
+        private final String dataverseName;
+        private final String feedName;
+        private final String datasetName;
+        private final String policyName;
+        private final Query query;
+        private final int varCounter;
 
         public CompiledConnectFeedStatement(String dataverseName, String feedName, String datasetName,
                 String policyName, Query query, int varCounter) {
@@ -379,10 +379,6 @@ public class CompiledStatements {
             return query;
         }
 
-        public void setQuery(Query query) {
-            this.query = query;
-        }
-
         @Override
         public Kind getKind() {
             return Kind.CONNECT_FEED;
@@ -395,13 +391,11 @@ public class CompiledStatements {
 
     public static class CompiledSubscribeFeedStatement implements ICompiledDmlStatement {
 
-        private final FeedConnectionRequest request;
-        private Query query;
+        private FeedConnectionRequest request;
         private final int varCounter;
 
-        public CompiledSubscribeFeedStatement(FeedConnectionRequest request, Query query, int varCounter) {
+        public CompiledSubscribeFeedStatement(FeedConnectionRequest request, int varCounter) {
             this.request = request;
-            this.query = query;
             this.varCounter = varCounter;
         }
 
@@ -410,6 +404,10 @@ public class CompiledStatements {
             return request.getReceivingFeedId().getDataverse();
         }
 
+        public String getFeedName() {
+            return request.getReceivingFeedId().getFeedName();
+        }
+
         @Override
         public String getDatasetName() {
             return request.getTargetDataset();
@@ -419,27 +417,16 @@ public class CompiledStatements {
             return varCounter;
         }
 
-        public Query getQuery() {
-            return query;
-        }
-
-        public void setQuery(Query query) {
-            this.query = query;
-        }
-
         @Override
         public Kind getKind() {
             return Kind.SUBSCRIBE_FEED;
         }
-
     }
 
     public static class CompiledDisconnectFeedStatement implements ICompiledDmlStatement {
-        private String dataverseName;
-        private String datasetName;
-        private String feedName;
-        private Query query;
-        private int varCounter;
+        private final String dataverseName;
+        private final String datasetName;
+        private final String feedName;
 
         public CompiledDisconnectFeedStatement(String dataverseName, String feedName, String datasetName) {
             this.dataverseName = dataverseName;
@@ -461,14 +448,6 @@ public class CompiledStatements {
             return feedName;
         }
 
-        public int getVarCounter() {
-            return varCounter;
-        }
-
-        public Query getQuery() {
-            return query;
-        }
-
         @Override
         public Kind getKind() {
             return Kind.DISCONNECT_FEED;
@@ -477,11 +456,11 @@ public class CompiledStatements {
     }
 
     public static class CompiledDeleteStatement implements ICompiledDmlStatement {
-        private String dataverseName;
-        private String datasetName;
-        private Expression condition;
-        private int varCounter;
-        private Query query;
+        private final String dataverseName;
+        private final String datasetName;
+        private final Expression condition;
+        private final int varCounter;
+        private final Query query;
 
         public CompiledDeleteStatement(VariableExpr var, String dataverseName, String datasetName, Expression condition,
                 int varCounter, Query query) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java b/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
index 4739b71..c7975c6 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
@@ -77,9 +77,10 @@ import org.apache.asterix.metadata.declared.ResultSetDataSink;
 import org.apache.asterix.metadata.declared.ResultSetSinkId;
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.entities.Function;
+import org.apache.asterix.metadata.entities.InternalDatasetDetails;
+import org.apache.asterix.metadata.feeds.FeedMetadataUtil;
 import org.apache.asterix.metadata.functions.ExternalFunctionCompilerUtil;
 import org.apache.asterix.metadata.utils.DatasetUtils;
-import org.apache.asterix.om.base.AOrderedList;
 import org.apache.asterix.om.base.AString;
 import org.apache.asterix.om.constants.AsterixConstantValue;
 import org.apache.asterix.om.functions.AsterixBuiltinFunctions;
@@ -89,7 +90,9 @@ import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.om.util.AsterixAppContextInfo;
 import org.apache.asterix.runtime.formats.FormatUtils;
 import org.apache.asterix.translator.CompiledStatements.CompiledLoadFromFileStatement;
+import org.apache.asterix.translator.CompiledStatements.CompiledSubscribeFeedStatement;
 import org.apache.asterix.translator.CompiledStatements.ICompiledDmlStatement;
+import org.apache.asterix.translator.util.PlanTranslationUtil;
 import org.apache.commons.lang3.mutable.Mutable;
 import org.apache.commons.lang3.mutable.MutableObject;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -99,6 +102,7 @@ import org.apache.hyracks.algebricks.core.algebra.base.Counter;
 import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
 import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
 import org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan;
+import org.apache.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
 import org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
 import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
 import org.apache.hyracks.algebricks.core.algebra.base.OperatorAnnotations;
@@ -157,9 +161,8 @@ class LangExpressionToPlanTranslator
 
     protected final AqlMetadataProvider metadataProvider;
     protected final TranslationContext context;
-    private static AtomicLong outputFileID = new AtomicLong(0);
+    private static final AtomicLong outputFileID = new AtomicLong(0);
     private static final String OUTPUT_FILE_PREFIX = "OUTPUT_";
-    private static LogicalVariable METADATA_DUMMY_VAR = new LogicalVariable(-1);
 
     public LangExpressionToPlanTranslator(AqlMetadataProvider metadataProvider, int currentVarCounter)
             throws AlgebricksException {
@@ -188,6 +191,10 @@ class LangExpressionToPlanTranslator
         DatasetDataSource targetDatasource = validateDatasetInfo(metadataProvider, stmt.getDataverseName(),
                 stmt.getDatasetName());
         List<List<String>> partitionKeys = DatasetUtils.getPartitioningKeys(targetDatasource.getDataset());
+        if (dataset.hasMetaPart()) {
+            throw new AlgebricksException(
+                    dataset.getDatasetName() + ": load dataset is not supported on Datasets with Meta records");
+        }
 
         LoadableDataSource lds;
         try {
@@ -218,7 +225,8 @@ class LangExpressionToPlanTranslator
         List<Mutable<ILogicalExpression>> varRefsForLoading = new ArrayList<Mutable<ILogicalExpression>>();
         LogicalVariable payloadVar = payloadVars.get(0);
         for (List<String> keyFieldName : partitionKeys) {
-            prepareVarAndExpression(keyFieldName, payloadVar, pkVars, pkExprs, varRefsForLoading);
+            PlanTranslationUtil.prepareVarAndExpression(keyFieldName, payloadVar, pkVars, pkExprs, varRefsForLoading,
+                    context);
         }
 
         AssignOperator assign = new AssignOperator(pkVars, pkExprs);
@@ -242,8 +250,8 @@ class LangExpressionToPlanTranslator
             additionalFilteringVars = new ArrayList<LogicalVariable>();
             additionalFilteringAssignExpressions = new ArrayList<Mutable<ILogicalExpression>>();
             additionalFilteringExpressions = new ArrayList<Mutable<ILogicalExpression>>();
-            prepareVarAndExpression(additionalFilteringField, payloadVar, additionalFilteringVars,
-                    additionalFilteringAssignExpressions, additionalFilteringExpressions);
+            PlanTranslationUtil.prepareVarAndExpression(additionalFilteringField, payloadVar, additionalFilteringVars,
+                    additionalFilteringAssignExpressions, additionalFilteringExpressions, context);
             additionalFilteringAssign = new AssignOperator(additionalFilteringVars,
                     additionalFilteringAssignExpressions);
         }
@@ -264,6 +272,7 @@ class LangExpressionToPlanTranslator
         return new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(leafOperator));
     }
 
+    @SuppressWarnings("unchecked")
     @Override
     public ILogicalPlan translate(Query expr, String outputDatasetName, ICompiledDmlStatement stmt)
             throws AlgebricksException, AsterixException {
@@ -272,6 +281,7 @@ class LangExpressionToPlanTranslator
         ArrayList<Mutable<ILogicalOperator>> globalPlanRoots = new ArrayList<Mutable<ILogicalOperator>>();
         ILogicalOperator topOp = p.first;
         ProjectOperator project = (ProjectOperator) topOp;
+        LogicalVariable unnestVar = project.getVariables().get(0);
         LogicalVariable resVar = project.getVariables().get(0);
 
         if (outputDatasetName == null) {
@@ -296,11 +306,10 @@ class LangExpressionToPlanTranslator
             }
         } else {
             /**
-             * add the collection-to-sequence right before the final project,
+             * add the collection-to-sequence right before the project,
              * because dataset only accept non-collection records
              */
             LogicalVariable seqVar = context.newVar();
-            @SuppressWarnings("unchecked")
             /** This assign adds a marker function collection-to-sequence: if the input is a singleton collection, unnest it; otherwise do nothing. */
             AssignOperator assignCollectionToSequence = new AssignOperator(seqVar,
                     new MutableObject<ILogicalExpression>(new ScalarFunctionCallExpression(
@@ -311,17 +320,28 @@ class LangExpressionToPlanTranslator
             project.getInputs().get(0).setValue(assignCollectionToSequence);
             project.getVariables().set(0, seqVar);
             resVar = seqVar;
-
             DatasetDataSource targetDatasource = validateDatasetInfo(metadataProvider, stmt.getDataverseName(),
                     stmt.getDatasetName());
+            List<Integer> keySourceIndicator = ((InternalDatasetDetails) targetDatasource.getDataset()
+                    .getDatasetDetails()).getKeySourceIndicator();
             ArrayList<LogicalVariable> vars = new ArrayList<LogicalVariable>();
             ArrayList<Mutable<ILogicalExpression>> exprs = new ArrayList<Mutable<ILogicalExpression>>();
             List<Mutable<ILogicalExpression>> varRefsForLoading = new ArrayList<Mutable<ILogicalExpression>>();
             List<List<String>> partitionKeys = DatasetUtils.getPartitioningKeys(targetDatasource.getDataset());
-            for (List<String> keyFieldName : partitionKeys) {
-                prepareVarAndExpression(keyFieldName, resVar, vars, exprs, varRefsForLoading);
+            int numOfPrimaryKeys = partitionKeys.size();
+            for (int i = 0; i < numOfPrimaryKeys; i++) {
+                if (keySourceIndicator == null || keySourceIndicator.get(i).intValue() == 0) {
+                    // record part
+                    PlanTranslationUtil.prepareVarAndExpression(partitionKeys.get(i), resVar, vars, exprs,
+                            varRefsForLoading, context);
+                } else {
+                    // meta part
+                    PlanTranslationUtil.prepareMetaKeyAccessExpression(partitionKeys.get(i), unnestVar, exprs, vars,
+                            varRefsForLoading, context);
+                }
             }
 
+            AssignOperator assign = new AssignOperator(vars, exprs);
             List<String> additionalFilteringField = DatasetUtils.getFilterField(targetDatasource.getDataset());
             List<LogicalVariable> additionalFilteringVars = null;
             List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions = null;
@@ -332,16 +352,11 @@ class LangExpressionToPlanTranslator
                 additionalFilteringAssignExpressions = new ArrayList<Mutable<ILogicalExpression>>();
                 additionalFilteringExpressions = new ArrayList<Mutable<ILogicalExpression>>();
 
-                prepareVarAndExpression(additionalFilteringField, resVar, additionalFilteringVars,
-                        additionalFilteringAssignExpressions, additionalFilteringExpressions);
+                PlanTranslationUtil.prepareVarAndExpression(additionalFilteringField, resVar, additionalFilteringVars,
+                        additionalFilteringAssignExpressions, additionalFilteringExpressions, context);
 
                 additionalFilteringAssign = new AssignOperator(additionalFilteringVars,
                         additionalFilteringAssignExpressions);
-            }
-
-            AssignOperator assign = new AssignOperator(vars, exprs);
-
-            if (additionalFilteringAssign != null) {
                 additionalFilteringAssign.getInputs().add(new MutableObject<ILogicalOperator>(project));
                 assign.getInputs().add(new MutableObject<ILogicalOperator>(additionalFilteringAssign));
             } else {
@@ -354,6 +369,10 @@ class LangExpressionToPlanTranslator
 
             switch (stmt.getKind()) {
                 case INSERT: {
+                    if (targetDatasource.getDataset().hasMetaPart()) {
+                        throw new AlgebricksException(targetDatasource.getDataset().getDatasetName()
+                                + ": insert into dataset is not supported on Datasets with Meta records");
+                    }
                     InsertDeleteUpsertOperator insertOp = new InsertDeleteUpsertOperator(targetDatasource, varRef,
                             varRefsForLoading, InsertDeleteUpsertOperator.Kind.INSERT, false);
                     insertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
@@ -363,6 +382,10 @@ class LangExpressionToPlanTranslator
                     break;
                 }
                 case UPSERT: {
+                    if (targetDatasource.getDataset().hasMetaPart()) {
+                        throw new AlgebricksException(targetDatasource.getDataset().getDatasetName()
+                                + ": upsert into dataset is not supported on Datasets with Meta records");
+                    }
                     InsertDeleteUpsertOperator upsertOp = new InsertDeleteUpsertOperator(targetDatasource, varRef,
                             varRefsForLoading, InsertDeleteUpsertOperator.Kind.UPSERT, false);
                     upsertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
@@ -380,6 +403,10 @@ class LangExpressionToPlanTranslator
                     break;
                 }
                 case DELETE: {
+                    if (targetDatasource.getDataset().hasMetaPart()) {
+                        throw new AlgebricksException(targetDatasource.getDataset().getDatasetName()
+                                + ": delete from dataset is not supported on Datasets with Meta records");
+                    }
                     InsertDeleteUpsertOperator deleteOp = new InsertDeleteUpsertOperator(targetDatasource, varRef,
                             varRefsForLoading, InsertDeleteUpsertOperator.Kind.DELETE, false);
                     deleteOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
@@ -398,12 +425,77 @@ class LangExpressionToPlanTranslator
                     break;
                 }
                 case SUBSCRIBE_FEED: {
-                    InsertDeleteUpsertOperator insertOp = new InsertDeleteUpsertOperator(targetDatasource, varRef,
-                            varRefsForLoading, InsertDeleteUpsertOperator.Kind.INSERT, false);
-                    insertOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
-                    insertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
+                    // if the feed is a change feed (i.e, performs different operations), we need to project op variable
+                    CompiledSubscribeFeedStatement sfs = (CompiledSubscribeFeedStatement) stmt;
+                    InsertDeleteUpsertOperator feedModificationOp;
+                    AssignOperator metaAndKeysAssign = null;
+                    List<LogicalVariable> metaAndKeysVars = null;
+                    List<Mutable<ILogicalExpression>> metaAndKeysExprs = null;
+                    List<Mutable<ILogicalExpression>> metaExpSingletonList = null;
+                    boolean isChangeFeed = FeedMetadataUtil.isChangeFeed(metadataProvider, sfs.getDataverseName(),
+                            sfs.getFeedName());
+                    if (targetDatasource.getDataset().hasMetaPart() || isChangeFeed) {
+                        metaAndKeysVars = new ArrayList<>();
+                        metaAndKeysExprs = new ArrayList<>();
+                    }
+                    if (targetDatasource.getDataset().hasMetaPart()) {
+                        // add the meta function
+                        IFunctionInfo finfoMeta = FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.META);
+                        ScalarFunctionCallExpression metaFunction = new ScalarFunctionCallExpression(finfoMeta,
+                                new MutableObject<ILogicalExpression>(new VariableReferenceExpression(unnestVar)));
+                        // create assign for the meta part
+                        LogicalVariable metaVar = context.newVar();
+                        metaExpSingletonList = new ArrayList<>(1);
+                        metaExpSingletonList
+                                .add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(metaVar)));
+                        metaAndKeysVars.add(metaVar);
+                        metaAndKeysExprs.add(new MutableObject<ILogicalExpression>(metaFunction));
+                        project.getVariables().add(metaVar);
+                    }
+                    if (isChangeFeed) {
+                        varRefsForLoading.clear();
+                        for (Mutable<ILogicalExpression> assignExpr : exprs) {
+                            if (assignExpr.getValue().getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+                                AbstractFunctionCallExpression funcCall = (AbstractFunctionCallExpression) assignExpr
+                                        .getValue();
+                                funcCall.substituteVar(resVar, unnestVar);
+                                LogicalVariable pkVar = context.newVar();
+                                metaAndKeysVars.add(pkVar);
+                                metaAndKeysExprs.add(new MutableObject<ILogicalExpression>(assignExpr.getValue()));
+                                project.getVariables().add(pkVar);
+                                varRefsForLoading.add(
+                                        new MutableObject<ILogicalExpression>(new VariableReferenceExpression(pkVar)));
+                            }
+                        }
+                        // A change feed, we don't need the assign to access PKs
+                        feedModificationOp = new InsertDeleteUpsertOperator(targetDatasource, varRef, varRefsForLoading,
+                                metaExpSingletonList, InsertDeleteUpsertOperator.Kind.UPSERT, false);
+                        // Create and add a new variable used for representing the original record
+                        feedModificationOp.setPrevRecordVar(context.newVar());
+                        feedModificationOp.setPrevRecordType(targetDatasource.getItemType());
+                        if (additionalFilteringField != null) {
+                            feedModificationOp.setPrevFilterVar(context.newVar());
+                            feedModificationOp.setPrevFilterType(((ARecordType) targetDatasource.getItemType())
+                                    .getFieldType(additionalFilteringField.get(0)));
+                            additionalFilteringAssign.getInputs().clear();
+                            additionalFilteringAssign.getInputs().add(assign.getInputs().get(0));
+                            feedModificationOp.getInputs().add(new MutableObject<>(additionalFilteringAssign));
+                        } else {
+                            feedModificationOp.getInputs().add(assign.getInputs().get(0));
+                        }
+                    } else {
+                        feedModificationOp = new InsertDeleteUpsertOperator(targetDatasource, varRef, varRefsForLoading,
+                                metaExpSingletonList, InsertDeleteUpsertOperator.Kind.INSERT, false);
+                        feedModificationOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
+                    }
+                    if (targetDatasource.getDataset().hasMetaPart() || isChangeFeed) {
+                        metaAndKeysAssign = new AssignOperator(metaAndKeysVars, metaAndKeysExprs);
+                        metaAndKeysAssign.getInputs().add(project.getInputs().get(0));
+                        project.getInputs().set(0, new MutableObject<ILogicalOperator>(metaAndKeysAssign));
+                    }
+                    feedModificationOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
                     leafOperator = new SinkOperator();
-                    leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(insertOp));
+                    leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(feedModificationOp));
                     break;
                 }
                 default:
@@ -417,33 +509,6 @@ class LangExpressionToPlanTranslator
         return plan;
     }
 
-    @SuppressWarnings("unchecked")
-    private void prepareVarAndExpression(List<String> field, LogicalVariable resVar,
-            List<LogicalVariable> additionalFilteringVars,
-            List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions,
-            List<Mutable<ILogicalExpression>> varRefs) {
-        IFunctionInfo finfoAccess;
-        ScalarFunctionCallExpression f;
-        if (field.size() > 1) {
-            finfoAccess = FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_NESTED);
-            f = new ScalarFunctionCallExpression(finfoAccess,
-                    new MutableObject<ILogicalExpression>(new VariableReferenceExpression(METADATA_DUMMY_VAR)),
-                    new MutableObject<ILogicalExpression>(
-                            new ConstantExpression(new AsterixConstantValue(new AOrderedList(field)))));
-        } else {
-            finfoAccess = FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME);
-            f = new ScalarFunctionCallExpression(finfoAccess,
-                    new MutableObject<ILogicalExpression>(new VariableReferenceExpression(METADATA_DUMMY_VAR)),
-                    new MutableObject<ILogicalExpression>(
-                            new ConstantExpression(new AsterixConstantValue(new AString(field.get(0))))));
-        }
-        f.substituteVar(METADATA_DUMMY_VAR, resVar);
-        additionalFilteringAssignExpressions.add(new MutableObject<ILogicalExpression>(f));
-        LogicalVariable v = context.newVar();
-        additionalFilteringVars.add(v);
-        varRefs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(v)));
-    }
-
     private DatasetDataSource validateDatasetInfo(AqlMetadataProvider metadataProvider, String dataverseName,
             String datasetName) throws AlgebricksException {
         Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
@@ -651,6 +716,7 @@ class LangExpressionToPlanTranslator
         throw new IllegalStateException("Function declarations should be inlined at AST rewriting phase.");
     }
 
+    @SuppressWarnings("unchecked")
     @Override
     public Pair<ILogicalOperator, LogicalVariable> visit(GroupbyClause gc, Mutable<ILogicalOperator> tupSource)
             throws AsterixException {
@@ -844,7 +910,7 @@ class LangExpressionToPlanTranslator
         }
 
         // Add hints as annotations.
-        if (op.hasHints() && currExpr instanceof AbstractFunctionCallExpression) {
+        if (op.hasHints() && (currExpr instanceof AbstractFunctionCallExpression)) {
             AbstractFunctionCallExpression currFuncExpr = (AbstractFunctionCallExpression) currExpr;
             for (IExpressionAnnotation hint : op.getHints()) {
                 currFuncExpr.getAnnotations().put(hint, hint);
@@ -1237,10 +1303,10 @@ class LangExpressionToPlanTranslator
 
     protected boolean expressionNeedsNoNesting(Expression expr) {
         Kind k = expr.getKind();
-        return k == Kind.LITERAL_EXPRESSION || k == Kind.LIST_CONSTRUCTOR_EXPRESSION
-                || k == Kind.RECORD_CONSTRUCTOR_EXPRESSION || k == Kind.VARIABLE_EXPRESSION || k == Kind.CALL_EXPRESSION
-                || k == Kind.OP_EXPRESSION || k == Kind.FIELD_ACCESSOR_EXPRESSION || k == Kind.INDEX_ACCESSOR_EXPRESSION
-                || k == Kind.UNARY_EXPRESSION;
+        return (k == Kind.LITERAL_EXPRESSION) || (k == Kind.LIST_CONSTRUCTOR_EXPRESSION)
+                || (k == Kind.RECORD_CONSTRUCTOR_EXPRESSION) || (k == Kind.VARIABLE_EXPRESSION)
+                || (k == Kind.CALL_EXPRESSION) || (k == Kind.OP_EXPRESSION) || (k == Kind.FIELD_ACCESSOR_EXPRESSION)
+                || (k == Kind.INDEX_ACCESSOR_EXPRESSION) || (k == Kind.UNARY_EXPRESSION);
     }
 
     protected <T> List<T> mkSingletonArrayList(T item) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-algebra/src/main/java/org/apache/asterix/translator/util/PlanTranslationUtil.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/translator/util/PlanTranslationUtil.java b/asterix-algebra/src/main/java/org/apache/asterix/translator/util/PlanTranslationUtil.java
new file mode 100644
index 0000000..63d1908
--- /dev/null
+++ b/asterix-algebra/src/main/java/org/apache/asterix/translator/util/PlanTranslationUtil.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.translator.util;
+
+import java.util.List;
+
+import org.apache.asterix.lang.common.util.FunctionUtil;
+import org.apache.asterix.om.base.AOrderedList;
+import org.apache.asterix.om.base.AString;
+import org.apache.asterix.om.base.IAObject;
+import org.apache.asterix.om.constants.AsterixConstantValue;
+import org.apache.asterix.om.functions.AsterixBuiltinFunctions;
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import org.apache.hyracks.algebricks.core.algebra.base.IVariableContext;
+import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+import org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import org.apache.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
+
+public class PlanTranslationUtil {
+    private static final LogicalVariable DUMMY_VAR = new LogicalVariable(-1);
+
+    public static void prepareMetaKeyAccessExpression(List<String> field, LogicalVariable resVar,
+            List<Mutable<ILogicalExpression>> assignExpressions, List<LogicalVariable> vars,
+            List<Mutable<ILogicalExpression>> varRefs, IVariableContext context) {
+        IAObject value = (field.size() > 1) ? new AOrderedList(field) : new AString(field.get(0));
+        ScalarFunctionCallExpression metaKeyFunction = new ScalarFunctionCallExpression(
+                FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.META_KEY));
+        metaKeyFunction.getArguments()
+                .add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(resVar)));
+        metaKeyFunction.getArguments()
+                .add(new MutableObject<>(new ConstantExpression(new AsterixConstantValue(value))));
+        assignExpressions.add(new MutableObject<ILogicalExpression>(metaKeyFunction));
+        LogicalVariable v = context.newVar();
+        vars.add(v);
+        if (varRefs != null) {
+            varRefs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(v)));
+        }
+    }
+
+    public static void prepareVarAndExpression(List<String> field, LogicalVariable resVar, List<LogicalVariable> vars,
+            List<Mutable<ILogicalExpression>> assignExpressions, List<Mutable<ILogicalExpression>> varRefs,
+            IVariableContext context) {
+        ScalarFunctionCallExpression f = createFieldAccessExpression(new VariableReferenceExpression(DUMMY_VAR), field);
+        f.substituteVar(DUMMY_VAR, resVar);
+        assignExpressions.add(new MutableObject<ILogicalExpression>(f));
+        LogicalVariable v = context.newVar();
+        vars.add(v);
+        if (varRefs != null) {
+            varRefs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(v)));
+        }
+    }
+
+    @SuppressWarnings("unchecked")
+    private static ScalarFunctionCallExpression createFieldAccessExpression(ILogicalExpression target,
+            List<String> field) {
+        FunctionIdentifier functionIdentifier;
+        IAObject value;
+        if (field.size() > 1) {
+            functionIdentifier = AsterixBuiltinFunctions.FIELD_ACCESS_NESTED;
+            value = new AOrderedList(field);
+        } else {
+            functionIdentifier = AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME;
+            value = new AString(field.get(0));
+        }
+        IFunctionInfo finfoAccess = FunctionUtil.getFunctionInfo(functionIdentifier);
+        return new ScalarFunctionCallExpression(finfoAccess, new MutableObject<>(target),
+                new MutableObject<>(new ConstantExpression(new AsterixConstantValue(value))));
+    }
+}


[04/19] incubator-asterixdb git commit: Support Change Feeds and Ingestion of Records with MetaData

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3338f66/asterix-external-data/src/test/resources/results/beer.txt
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/resources/results/beer.txt b/asterix-external-data/src/test/resources/results/beer.txt
new file mode 100644
index 0000000..693b5a1
--- /dev/null
+++ b/asterix-external-data/src/test/resources/results/beer.txt
@@ -0,0 +1,10995 @@
+{ "name": "Alameda Brewhouse", "city": "Portland", "state": "Oregon", "code": "97213", "country": "United States", "phone": "1-503-460-9025", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [ "4765 NE Fremont" ], "geo": { "accuracy": "ROOFTOP", "lat": 45.5484d, "lon": -122.619d } }
+{ "id": "alameda_brewhouse", "flags": 0i32, "expiration": 0, "cas": 244368670720, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"alameda_brewhouse"
+{ "name": "Abbey Wright Brewing/Valley Inn", "city": "Williamsport", "state": "Pennsylvania", "code": "17702", "country": "United States", "phone": "570.326.3383", "website": "http://www.valleyinnonline.com/", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [ "204 Valley Street" ], "geo": { "accuracy": "RANGE_INTERPOLATED", "lat": 41.2225d, "lon": -77.0369d } }
+{ "id": "abbey_wright_brewing_valley_inn", "flags": 0i32, "expiration": 0, "cas": 244371881984, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"abbey_wright_brewing_valley_inn"
+{ "name": "Allguer Brauhaus AG Kempten", "city": "Kempten", "state": "Bayern", "code": "", "country": "Germany", "phone": "49-(0)831-/-2050-0", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [ "Beethovenstrae 7" ], "geo": { "accuracy": "ROOFTOP", "lat": 47.7487d, "lon": 10.5694d } }
+{ "id": "allguer_brauhaus_ag_kempten", "flags": 0i32, "expiration": 0, "cas": 244377518080, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"allguer_brauhaus_ag_kempten"
+{ "name": "Ali`i Brewing", "city": "Honolulu", "state": "Hawaii", "code": "", "country": "United States", "phone": "", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [  ], "geo": { "accuracy": "APPROXIMATE", "lat": 21.3069d, "lon": -157.858d } }
+{ "id": "ali_i_brewing", "flags": 0i32, "expiration": 0, "cas": 244368670721, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"ali_i_brewing"
+{ "name": "Asheville Pizza and Brewing Co.", "city": "Asheville", "state": "North Carolina", "code": "28804", "country": "United States", "phone": "(828) 254-1281", "website": "http://www.ashevillepizza.com/", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "Asheville Brewpub, Brewery, Movie Theater and Pizzeria all rolled into 2 great places to meet and eat. Award winning beer and award winning pizza.", "address": [ "675 Merrimon Avenue" ], "geo": { "accuracy": "ROOFTOP", "lat": 35.6221d, "lon": -82.5536d } }
+{ "id": "asheville_pizza_and_brewing_co", "flags": 0i32, "expiration": 0, "cas": 244371947520, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"asheville_pizza_and_brewing_co"
+{ "name": "Amstel Light", "abv": 3.5d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "amstel_brouwerij", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Light Lager", "category": "North American Lager" }
+{ "id": "amstel_brouwerij-amstel_light", "flags": 0i32, "expiration": 0, "cas": 244377583616, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"amstel_brouwerij-amstel_light"
+{ "name": "Allagash Fluxus 09", "abv": 8.3d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "allagash_brewing", "updated": "2010-07-22 20:00:20", "description": "", "style": "French & Belgian-Style Saison", "category": "Belgian and French Ale" }
+{ "id": "allagash_brewing-allagash_fluxus_09", "flags": 0i32, "expiration": 0, "cas": 244368736256, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"allagash_brewing-allagash_fluxus_09"
+{ "name": "Dirty Blond", "abv": 4.5d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "atwater_block_brewing", "updated": "2010-07-22 20:00:20", "description": "Made with unmalted wheat, coriander and orange peel to help you live smart and enjoy everyday!", "style": "Light American Wheat Ale or Lager", "category": "Other Style" }
+{ "id": "atwater_block_brewing-dirty_blond", "flags": 0i32, "expiration": 0, "cas": 244372013056, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"atwater_block_brewing-dirty_blond"
+{ "name": "Andechser Klosterbrauerei", "city": "Andechs", "state": "Bayern", "code": "", "country": "Germany", "phone": "49-(0)8152-/-376-0", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [ "Bergstrae 2" ], "geo": { "accuracy": "ROOFTOP", "lat": 47.9775d, "lon": 11.185d } }
+{ "id": "andechser_klosterbrauerei", "flags": 0i32, "expiration": 0, "cas": 244377649152, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"andechser_klosterbrauerei"
+{ "name": "Allentown Brew Works", "city": "Allentown", "state": "Pennsylvania", "code": "18101", "country": "United States", "phone": "610.433.7777", "website": "http://www.thebrewworks.com/allentown-brewworks/", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "The Allentown Brew Works is housed inside the historic Harold's Furniture Building at 812 W Hamilton Street. Years in the making, the new Allentown restaurant and brewery are part of the continuing vision of the Fegley family that established the original Bethlehem Brew Works in 1998. Proud to be part of the resurging downtown, the 400 seat restaurant is a testament to the vibrant heartbeat that thrives in this Queen City. Offering two and a half levels of dining, plus state of the art banquet facilities in The Hamilton Room, and multiple bars offering different atmospheres, as well as an outdoor casual Biergarten and a new seductive lower level lounge � Silk.", "address": [ "814 W Hamilton St" ], "geo":
  { "accuracy": "RANGE_INTERPOLATED", "lat": 40.6016d, "lon": -75.474d } }
+{ "id": "allentown_brew_works", "flags": 0i32, "expiration": 0, "cas": 244368736257, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"allentown_brew_works"
+{ "name": "Dunkel", "abv": 5.2d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "atwater_block_brewing", "updated": "2010-07-22 20:00:20", "description": "Our malty, sweet dark lager is a hometown favorite. Our Dunkel is packed with subtle roasted malt flavors without the excessive bitterness and heaviness of many dark beers and has a balanced hop finish.\r\n\r\nGABF Gold Winner", "style": "American-Style Lager", "category": "North American Lager" }
+{ "id": "atwater_block_brewing-dunkel", "flags": 0i32, "expiration": 0, "cas": 244372013057, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"atwater_block_brewing-dunkel"
+{ "name": "Budweiser", "abv": 5.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "anheuser_busch", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Light Lager", "category": "North American Lager" }
+{ "id": "anheuser_busch-budweiser", "flags": 0i32, "expiration": 0, "cas": 244377649153, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"anheuser_busch-budweiser"
+{ "name": "Bayrisch Hell", "abv": 4.7d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "allguer_brauhaus_ag_kempten", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "allguer_brauhaus_ag_kempten-bayrisch_hell", "flags": 0i32, "expiration": 0, "cas": 244368801792, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"allguer_brauhaus_ag_kempten-bayrisch_hell"
+{ "name": "Pilsner", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "atwater_block_brewing", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "atwater_block_brewing-pilsner", "flags": 0i32, "expiration": 0, "cas": 244372078592, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"atwater_block_brewing-pilsner"
+{ "name": "India Pale Ale", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "arcadia_brewing", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Pale Ale", "category": "North American Ale" }
+{ "id": "arcadia_brewing-india_pale_ale", "flags": 0i32, "expiration": 0, "cas": 244377649154, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"arcadia_brewing-india_pale_ale"
+{ "name": "American River Brewing", "city": "Auburn", "state": "California", "code": "", "country": "United States", "phone": "", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [  ], "geo": { "accuracy": "APPROXIMATE", "lat": 38.8966d, "lon": -121.077d } }
+{ "id": "american_river_brewing", "flags": 0i32, "expiration": 0, "cas": 244368801793, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"american_river_brewing"
+{ "name": "X-Line", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "atwater_block_brewing", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "atwater_block_brewing-x_line", "flags": 0i32, "expiration": 0, "cas": 244372078593, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"atwater_block_brewing-x_line"
+{ "name": "Caramel Bock", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "august_schell_brewing", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "august_schell_brewing-caramel_bock", "flags": 0i32, "expiration": 0, "cas": 244377714688, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"august_schell_brewing-caramel_bock"
+{ "name": "Summer Solstice Cerveza Crema", "abv": 5.6d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "anderson_valley_brewing", "updated": "2010-07-22 20:00:20", "description": "\"This copper colored ale is smooth, malty, and lightly sweet, with a delicate hint of spice for that oh-so-drinkable, extra velvety flavor.  The character is lighter in body than its cousin our wildly popular Winter Solstice Seasonal Ale.  This is a silky, creamy dream, perfect as a warm weather beer.\"", "style": "American-Style Cream Ale or Lager", "category": "Other Style" }
+{ "id": "anderson_valley_brewing-summer_solstice_cerveza_crema", "flags": 0i32, "expiration": 0, "cas": 244368867328, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"anderson_valley_brewing-summer_solstice_cerveza_crema"
+{ "name": "Czar Imperial Stout", "abv": 11.9d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "avery_brewing_company", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Stout", "category": "North American Ale" }
+{ "id": "avery_brewing_company-czar_imperial_stout", "flags": 0i32, "expiration": 0, "cas": 244372078594, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"avery_brewing_company-czar_imperial_stout"
+{ "name": "Dark Star", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "bbc_brewing_co_llc", "updated": "2010-07-22 20:00:20", "description": "", "style": "Porter", "category": "Irish Ale" }
+{ "id": "bbc_brewing_co_llc-dark_star", "flags": 0i32, "expiration": 0, "cas": 244377714689, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"bbc_brewing_co_llc-dark_star"
+{ "name": "Tannhauser", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "augusta_brewing", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Pale Ale", "category": "North American Ale" }
+{ "id": "augusta_brewing-tannhauser", "flags": 0i32, "expiration": 0, "cas": 244368867329, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"augusta_brewing-tannhauser"
+{ "name": "Apex", "abv": 7.5d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "bear_republic_brewery", "updated": "2010-07-22 20:00:20", "description": "Crafted with a blend of American and English malts and aggressively hopped with Pacific Northwest hops, this beer reflects what our brewers believe to be the Apex of IPA.", "style": "Imperial or Double India Pale Ale", "category": "North American Ale" }
+{ "id": "bear_republic_brewery-apex", "flags": 0i32, "expiration": 0, "cas": 244372078595, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"bear_republic_brewery-apex"
+{ "name": "Trombipulator", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "big_time_brewing", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "big_time_brewing-trombipulator", "flags": 0i32, "expiration": 0, "cas": 244377714690, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"big_time_brewing-trombipulator"
+{ "name": "Weißbier", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "augustiner_brau_munchen", "updated": "2010-07-22 20:00:20", "description": "", "style": "Light American Wheat Ale or Lager", "category": "Other Style" }
+{ "id": "augustiner_brau_munchen-weissbier", "flags": 0i32, "expiration": 0, "cas": 244368932864, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"augustiner_brau_munchen-weissbier"
+{ "name": "Robertus", "abv": 6.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "bierbrouwerij_st_christoffel", "updated": "2010-07-22 20:00:20", "description": "Christoffel Robertus is a low-fermenting ruby-red beer, brewed in the Münchener-style. It is a malty, fresh beer with a light sweetness. The typical hop bitterness found in Blond, is very lightly present in Robertus. The use of an extensive amount of selected barley gives Robertus the special malty taste and aroma.", "style": "American-Style Lager", "category": "North American Lager" }
+{ "id": "bierbrouwerij_st_christoffel-robertus", "flags": 0i32, "expiration": 0, "cas": 244372078596, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"bierbrouwerij_st_christoffel-robertus"
+{ "name": "Samuel Adams Irish Red", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "boston_beer_company", "updated": "2010-07-22 20:00:20", "description": "Malty and slightly sweet, balanced by earthy notes from the hops.  The gentle rain and fertile soil of Ireland helped inspire this style of ale, known for being remarkably balanced. Pale and Caramel malts give the beer its rich, deep red color and distinctive caramel flavor. The sweetness of the malt is pleasantly balanced by a pronounced hop bitterness and an earthy note from the East Kent Goldings hops. Samuel Adams® Irish Red finishes smooth and leaves you wanting to take another sip.", "style": "Irish-Style Red Ale", "category": "Irish Ale" }
+{ "id": "boston_beer_company-samuel_adams_irish_red", "flags": 0i32, "expiration": 0, "cas": 244377780224, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"boston_beer_company-samuel_adams_irish_red"
+{ "name": "Baron Pilsner", "abv": 4.7d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "baron_brewing_company", "updated": "2010-07-22 20:00:20", "description": "Our pilsner is a traditional Northern German Style Pilsner. It has a fantastic malty aroma with a slight spice from the hops. The head is brilliant white and floats on the clean pale lager. The sparkling mouthfeel gives way to a soft malt sweetness that is followed by a long, dry, crisp finish. The balanced clean finish taunts the mouth to take another drink. Lagered for a minimum of 8-12 weeks to ensure smoothness and drinkability.\r\n\r\nAll ingredients for the beer are imported from Germany. Brewed in accordance to the German Beer Purity Law (Reinheitsgebot) of 1516.", "style": "German-Style Pilsener", "category": "German Lager" }
+{ "id": "baron_brewing_company-baron_pilsner", "flags": 0i32, "expiration": 0, "cas": 244368932865, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"baron_brewing_company-baron_pilsner"
+{ "name": "Stout", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "big_buck_brewery_and_steakhouse_2", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Stout", "category": "North American Ale" }
+{ "id": "big_buck_brewery_and_steakhouse_2-stout", "flags": 0i32, "expiration": 0, "cas": 244372144128, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"big_buck_brewery_and_steakhouse_2-stout"
+{ "name": "3 Monts", "abv": 8.5d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brasserie_de_saint_sylvestre", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "brasserie_de_saint_sylvestre-3_monts", "flags": 0i32, "expiration": 0, "cas": 244377780225, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"brasserie_de_saint_sylvestre-3_monts"
+{ "name": "Club Colombia", "abv": 4.7d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "bavaria", "updated": "2010-07-22 20:00:20", "description": "La mejor cerveza de Colombia." }
+{ "id": "bavaria-club_colombia", "flags": 0i32, "expiration": 0, "cas": 244368932866, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"bavaria-club_colombia"
+{ "name": "34th Street Porter", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "bootleggers_steakhouse_and_brewery", "updated": "2010-07-22 20:00:20", "description": "", "style": "Porter", "category": "Irish Ale" }
+{ "id": "bootleggers_steakhouse_and_brewery-34th_street_porter", "flags": 0i32, "expiration": 0, "cas": 244372144129, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"bootleggers_steakhouse_and_brewery-34th_street_porter"
+{ "name": "Brasserie des Gants", "city": "Irchonwelz", "state": "Hainaut", "code": "", "country": "Belgium", "phone": "32-068-28-79-36", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [ "rue du Castel, 19" ], "geo": { "accuracy": "GEOMETRIC_CENTER", "lat": 50.6204d, "lon": 3.7592d } }
+{ "id": "brasserie_des_gants", "flags": 0i32, "expiration": 0, "cas": 244377780226, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"brasserie_des_gants"
+{ "name": "Big Bear Black Stout", "abv": 8.1d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "bear_republic_brewery", "updated": "2010-07-22 20:00:20", "description": "Big Bear, as the name implies, is a hefty, black, Russian Imperial-style stout. This bold stout boasts a rich, caramel sweetness lavished by a robust, deep-roasted heartiness you can sink your teeth into. ...Big Bear's bold flavors are produced using a blend of Belgian and English roasted barley and crystal malts. Some unique flavors come forth in the malt character. ...Louisiana sweet molasses and dark brown sugar. This dark brew is well hopped with Chinook and Cascade hops, which are somewhat, masked by the malt. This is a balanced bold brew boasting an A.V.B. of 8.1% that can creep up on you, \"so don't get mauled\". It has a dry roasted quality that masks its' high alchohol content, so drink responsibly. 2004 California State Fair, Silver Medal Winner; 2002 World Beer Cup, Gold Medal Winner; \r
 \n2002 Annual Bistro Beer Festival, Hayward, Gold Medal Winner; 2001 North American Brewers' Award, Honorable Mention - og 1.076, ABV 8.1%, IBU 68.", "style": "American-Style Imperial Stout", "category": "North American Ale" }
+{ "id": "bear_republic_brewery-big_bear_black_stout", "flags": 0i32, "expiration": 0, "cas": 244368998400, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"bear_republic_brewery-big_bear_black_stout"
+{ "name": "Bootleggers Steakhouse and Brewery", "city": "Bakersfield", "state": "California", "code": "", "country": "United States", "phone": "", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [  ], "geo": { "accuracy": "APPROXIMATE", "lat": 35.3733d, "lon": -119.019d } }
+{ "id": "bootleggers_steakhouse_and_brewery", "flags": 0i32, "expiration": 0, "cas": 244372144130, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"bootleggers_steakhouse_and_brewery"
+{ "name": "Ngoma Awooyo Special", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brasserie_du_benin", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "brasserie_du_benin-ngoma_awooyo_special", "flags": 0i32, "expiration": 0, "cas": 244377780227, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"brasserie_du_benin-ngoma_awooyo_special"
+{ "name": "Berliner Kindl Brauerei AG", "city": "Berlin", "state": "Berlin", "code": "", "country": "Germany", "phone": "49-(0)30-/-68992-0", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [ "Werbellinstrasse 50" ], "geo": { "accuracy": "ROOFTOP", "lat": 52.4793d, "lon": 13.4293d } }
+{ "id": "berliner_kindl_brauerei_ag", "flags": 0i32, "expiration": 0, "cas": 244368998401, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"berliner_kindl_brauerei_ag"
+{ "name": "Samuel Adams Scotch Ale", "abv": 5.4d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "boston_beer_company", "updated": "2010-07-22 20:00:20", "description": "This is a brew for adventurous beer drinkers. It is brewed with four malts: two row pale Harrington, Munich malt, chocolate malt, and a rare peat smoked malt commonly used by distillers of Scotch malt whiskey. This unique malt gives Samuel Adams® Scotch Ale its distinct, subtle smoky character and deep amber hue. Samuel Adams® Scotch Ale is brewed using traditional English hops, Goldings and Fuggles. This is a big brew dominated by malt flavors and aromas, rich and full bodied, slightly sweet. Its layered malt complexity lingers to a smooth and silky finish.", "style": "Scotch Ale", "category": "British Ale" }
+{ "id": "boston_beer_company-samuel_adams_scotch_ale", "flags": 0i32, "expiration": 0, "cas": 244372144131, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"boston_beer_company-samuel_adams_scotch_ale"
+{ "name": "Hefe-Weizen", "abv": 5.2d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brauerei_schwelm", "updated": "2010-07-22 20:00:20", "description": "", "style": "South German-Style Hefeweizen", "category": "German Ale" }
+{ "id": "brauerei_schwelm-hefe_weizen", "flags": 0i32, "expiration": 0, "cas": 244495941632, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"brauerei_schwelm-hefe_weizen"
+{ "name": "Amendment Pale Ale", "abv": 5.2d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "21st_amendment_brewery_cafe", "updated": "2010-07-22 20:00:20", "description": "Rich golden hue color. Floral hop with sweet malt aroma. Medium mouth feel with malt sweetness, hop quenching flavor and well-balanced bitterness.", "style": "American-Style Pale Ale", "category": "North American Ale" }
+{ "id": "21st_amendment_brewery_cafe-amendment_pale_ale", "flags": 0i32, "expiration": 0, "cas": 244375420928, "rev": 1i32, "vbid": 45i32, "dtype": 1i32 }
+"21st_amendment_brewery_cafe-amendment_pale_ale"
+{ "name": "Thundermuck Stout", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "bill_s_tavern_brewhouse", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Stout", "category": "North American Ale" }
+{ "id": "bill_s_tavern_brewhouse-thundermuck_stout", "flags": 0i32, "expiration": 0, "cas": 244369063936, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"bill_s_tavern_brewhouse-thundermuck_stout"
+{ "name": "GABF 25th Year Beer", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "boulder_beer_company", "updated": "2010-07-22 20:00:20", "description": "", "style": "Extra Special Bitter", "category": "British Ale" }
+{ "id": "boulder_beer_company-gabf_25th_year_beer", "flags": 0i32, "expiration": 0, "cas": 244372209664, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"boulder_beer_company-gabf_25th_year_beer"
+{ "name": "Proletary", "abv": 5.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brewer_s_art", "updated": "2010-07-22 20:00:20", "description": "Black, smooth and easy to drink, this is a beer for the people!" }
+{ "id": "brewer_s_art-proletary", "flags": 0i32, "expiration": 0, "cas": 244495941633, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"brewer_s_art-proletary"
+{ "name": "Old Numbskull 2003", "abv": 10.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "alesmith_brewing", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "alesmith_brewing-old_numbskull_2003", "flags": 0i32, "expiration": 0, "cas": 244375420929, "rev": 1i32, "vbid": 45i32, "dtype": 1i32 }
+"alesmith_brewing-old_numbskull_2003"
+{ "name": "Nora", "abv": 7.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "birrificia_le_baladin", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "birrificia_le_baladin-nora", "flags": 0i32, "expiration": 0, "cas": 244369063937, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"birrificia_le_baladin-nora"
+{ "name": "Yeti", "abv": 8.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brasserie_des_cimes", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "brasserie_des_cimes-yeti", "flags": 0i32, "expiration": 0, "cas": 244372209665, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"brasserie_des_cimes-yeti"
+{ "name": "Brewmasters Restaurant and Brewery South", "city": "Kenosha", "state": "Wisconsin", "code": "", "country": "United States", "phone": "", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [  ], "geo": { "accuracy": "APPROXIMATE", "lat": 42.5847d, "lon": -87.8212d } }
+{ "id": "brewmasters_restaurant_and_brewery_south", "flags": 0i32, "expiration": 0, "cas": 244495941634, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"brewmasters_restaurant_and_brewery_south"
+{ "name": "Boltwood Bock", "abv": 5.7d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "amherst_brewing_company", "updated": "2010-07-22 20:00:20", "description": "Light in color, full bodied and very malty, this lager has a toasted malt flavor. Brewed in February and usually on tap by May with a keg of the previous year's batch.", "style": "Traditional German-Style Bock", "category": "German Lager" }
+{ "id": "amherst_brewing_company-boltwood_bock", "flags": 0i32, "expiration": 0, "cas": 244375420930, "rev": 1i32, "vbid": 45i32, "dtype": 1i32 }
+"amherst_brewing_company-boltwood_bock"
+{ "name": "P.M. Porter", "abv": 6.4d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "bj_s_restaurant_and_brewery", "updated": "2010-07-22 20:00:20", "description": "", "style": "Porter", "category": "Irish Ale" }
+{ "id": "bj_s_restaurant_and_brewery-p_m_porter", "flags": 0i32, "expiration": 0, "cas": 244369063938, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"bj_s_restaurant_and_brewery-p_m_porter"
+{ "name": "Triple Moine", "abv": 7.2d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brasserie_du_bocq", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "brasserie_du_bocq-triple_moine", "flags": 0i32, "expiration": 0, "cas": 244372275200, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"brasserie_du_bocq-triple_moine"
+{ "name": "Trappist Westvleteren 12", "abv": 11.2d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brouwerij_abdij_saint_sixtus", "updated": "2010-07-22 20:00:20", "description": "This Belgian beer has an everlasting tast.  It has been choosen as the best beer in the world for several years!", "style": "Belgian-Style Quadrupel", "category": "Belgian and French Ale" }
+{ "id": "brouwerij_abdij_saint_sixtus-trappist_westvleteren_12", "flags": 0i32, "expiration": 0, "cas": 244496007168, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"brouwerij_abdij_saint_sixtus-trappist_westvleteren_12"
+{ "name": "Nitro Stout", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "anderson_valley_brewing", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Stout", "category": "North American Ale" }
+{ "id": "anderson_valley_brewing-nitro_stout", "flags": 0i32, "expiration": 0, "cas": 244375420931, "rev": 1i32, "vbid": 45i32, "dtype": 1i32 }
+"anderson_valley_brewing-nitro_stout"
+{ "name": "Piranha Pale Ale", "abv": 5.7d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "bj_s_restaurant_and_brewery", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Pale Ale", "category": "North American Ale" }
+{ "id": "bj_s_restaurant_and_brewery-piranha_pale_ale", "flags": 0i32, "expiration": 0, "cas": 244369129472, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"bj_s_restaurant_and_brewery-piranha_pale_ale"
+{ "name": "Brasserie Dupont", "city": "Tourpes", "state": "Hainaut", "code": "", "country": "Belgium", "phone": "32-069-67-10-66", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [ "Rue Basse 5" ], "geo": { "accuracy": "RANGE_INTERPOLATED", "lat": 50.5718d, "lon": 3.6508d } }
+{ "id": "brasserie_dupont", "flags": 0i32, "expiration": 0, "cas": 244488732672, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"brasserie_dupont"
+{ "name": "Petrus Aged Pale", "abv": 7.3d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brouwerij_bavik_de_brabandere", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "brouwerij_bavik_de_brabandere-petrus_aged_pale", "flags": 0i32, "expiration": 0, "cas": 244496007169, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"brouwerij_bavik_de_brabandere-petrus_aged_pale"
+{ "name": "Batch 8000", "abv": 9.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "bell_s_brewery_inc", "updated": "2010-07-22 20:00:20", "description": "Batch 8,000 is part of our commemorative series celebrating our progress with special brews. Our 8,000th batch is a special recipe to be brewed only once. It is wheat ale spiced with Coriander, Orange Peel, and Paradise Seed. Best consumed fresh.", "style": "Belgian-Style White", "category": "Belgian and French Ale" }
+{ "id": "bell_s_brewery_inc-batch_8000", "flags": 0i32, "expiration": 0, "cas": 244375486464, "rev": 1i32, "vbid": 45i32, "dtype": 1i32 }
+"bell_s_brewery_inc-batch_8000"
+{ "name": "Single-Wide I.P.A.", "abv": 5.7d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "boulevard_brewing_company", "updated": "2010-07-22 20:00:20", "description": "The latest addition to the Boulevard family of year-around beers, Single-Wide I.P.A. is our take on a style that originated in 18th century Great Britain. This American version -- inspired by our Smokestack Series Double-Wide I.P.A. -- boasts a heady combination of six varieties of hops, some of which were employed for dry-hopping.", "style": "American-Style India Pale Ale", "category": "North American Ale" }
+{ "id": "boulevard_brewing_company-single_wide_i_p_a", "flags": 0i32, "expiration": 0, "cas": 244369129473, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"boulevard_brewing_company-single_wide_i_p_a"
+{ "name": "Brasserie Fantme", "city": "Soy", "state": "Luxembourg", "code": "", "country": "Belgium", "phone": "32-(0)86-47-70-44", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [ "Rue Pral 8" ], "geo": { "accuracy": "RANGE_INTERPOLATED", "lat": 50.286d, "lon": 5.5127d } }
+{ "id": "brasserie_fantme", "flags": 0i32, "expiration": 0, "cas": 244488798208, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"brasserie_fantme"
+{ "name": "Blanche de Bruges", "abv": 5.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brouwerij_de_gouden_boom", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "brouwerij_de_gouden_boom-blanche_de_bruges", "flags": 0i32, "expiration": 0, "cas": 244496007170, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"brouwerij_de_gouden_boom-blanche_de_bruges"
+{ "name": "Samuel Adams Honey Porter", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "boston_beer_company", "updated": "2010-07-22 20:00:20", "description": "A dark, full flavored English porter with Scottish heather honey.  Samuel Adams® Honey Porter is a full-flavored, full-bodied English porter with a substantial roasted malt character, offering a smooth, rounded finish. This beer is brewed with traditional English Ale hops and is dry-hopped with East Kent Goldings, known for their spicy aroma and distinctive, earthy flavor. We brew Honey Porter with Scottish heather honey which balances the spiciness of the hops.\r\n\r\nThis brew is the perfect complement to glazed ham, spicy chili, and roasted vegetables like beets and carrots, which bring out the herbal notes found in the hops and the sweetness of the honey. Samuel Adams® Honey Porter also pairs well with rich desserts such as baklava and molasses cookies.", "style": "Porter", "category": "Ir
 ish Ale" }
+{ "id": "boston_beer_company-samuel_adams_honey_porter", "flags": 0i32, "expiration": 0, "cas": 244375486465, "rev": 1i32, "vbid": 45i32, "dtype": 1i32 }
+"boston_beer_company-samuel_adams_honey_porter"
+{ "name": "Bière Darbyste", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brasserie_de_blaugies", "updated": "2010-07-22 20:00:20", "description": "", "style": "Light American Wheat Ale or Lager", "category": "Other Style" }
+{ "id": "brasserie_de_blaugies-biere_darbyste", "flags": 0i32, "expiration": 0, "cas": 244369129474, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"brasserie_de_blaugies-biere_darbyste"
+{ "name": "Brasserie La Caracole", "city": "Falmignoul", "state": "Namur", "code": "", "country": "Belgium", "phone": "32-082-74-40-80", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [ "Cte Marie-Thrse 86" ], "geo": { "accuracy": "RANGE_INTERPOLATED", "lat": 50.2024d, "lon": 4.8914d } }
+{ "id": "brasserie_la_caracole", "flags": 0i32, "expiration": 0, "cas": 244488798209, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"brasserie_la_caracole"
+{ "name": "Brouwerij Nacional Balashi", "city": "", "state": "", "code": "", "country": "Aruba", "phone": "297 854805", "website": "http://www.balashi.com/balashi/", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [  ] }
+{ "id": "brouwerij_nacional_balashi", "flags": 0i32, "expiration": 0, "cas": 244496072704, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"brouwerij_nacional_balashi"
+{ "name": "Bully! Porter", "abv": 5.4d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "boulevard_brewing_company", "updated": "2010-07-22 20:00:20", "description": "The intense flavors of dark-roasted malt in Boulevard’s rendition of the classic English porter are perfectly balanced by a generous and complex hop character. Bully! Porter’s robust nature makes it the ideal companion to a variety of foods, from seafood to chocolate.", "style": "Porter", "category": "Irish Ale" }
+{ "id": "boulevard_brewing_company-bully_porter", "flags": 0i32, "expiration": 0, "cas": 244375486466, "rev": 1i32, "vbid": 45i32, "dtype": 1i32 }
+"boulevard_brewing_company-bully_porter"
+{ "name": "Brasserie de Brunehaut Bio Bière Ambrée (Organic)", "abv": 6.5d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brasserie_de_brunehaut", "updated": "2010-07-22 20:00:20", "description": "Amber copper color with a beige head.\r\nCaramel malt aromas reminiscent of vanilla, along with toffee, butterscotch and ripe fruits. Top-fermented and bottle-conditioned, this is a clean, refreshing regional 'artisan' beer.\r\nHazy amber to brown coloured beer, with a fluffy off-white head. Nice aroma of spices, yeast and oak.  The alcohol subtle. Flavour is moderately spicy and slightly fruity, with balanced hops. \r\nThis beer is certified organic.", "style": "American-Style Amber/Red Ale", "category": "North American Ale" }
+{ "id": "brasserie_de_brunehaut-brasserie_de_brunehaut_bio_biere_ambree_organic", "flags": 0i32, "expiration": 0, "cas": 244369195008, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"brasserie_de_brunehaut-brasserie_de_brunehaut_bio_biere_ambree_organic"
+{ "name": "St.Pauli Girl Beer", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brauerei_beck", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Lager", "category": "North American Lager" }
+{ "id": "brauerei_beck-st_pauli_girl_beer", "flags": 0i32, "expiration": 0, "cas": 244488863744, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"brauerei_beck-st_pauli_girl_beer"
+{ "name": "Brouwerij Sint-Jozef", "city": "Opitter", "state": "Limburg", "code": "", "country": "Belgium", "phone": "32-089-86-47-11", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [ "Itterplein 19" ], "geo": { "accuracy": "RANGE_INTERPOLATED", "lat": 51.1168d, "lon": 5.6464d } }
+{ "id": "brouwerij_sint_jozef", "flags": 0i32, "expiration": 0, "cas": 244496072705, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"brouwerij_sint_jozef"
+{ "name": "Hinano", "abv": 5.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brasserie_de_tahiti", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Lager", "category": "North American Lager" }
+{ "id": "brasserie_de_tahiti-hinano", "flags": 0i32, "expiration": 0, "cas": 244375552000, "rev": 1i32, "vbid": 45i32, "dtype": 1i32 }
+"brasserie_de_tahiti-hinano"
+{ "name": "La Bière des Collines van de Saisis", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brasserie_ellezelloise", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "brasserie_ellezelloise-la_biere_des_collines_van_de_saisis", "flags": 0i32, "expiration": 0, "cas": 244486373376, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"brasserie_ellezelloise-la_biere_des_collines_van_de_saisis"
+{ "name": "Brauerei Beck", "city": "Bremen", "state": "Bremen", "code": "", "country": "Germany", "phone": "49-(0)421-/-50940", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [ "Am Deich 18-19" ], "geo": { "accuracy": "ROOFTOP", "lat": 53.0787d, "lon": 8.7901d } }
+{ "id": "brauerei_beck", "flags": 0i32, "expiration": 0, "cas": 244488863745, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"brauerei_beck"
+{ "name": "Hell", "abv": 5.1d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "burgerbrau_wolnzach", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "burgerbrau_wolnzach-hell", "flags": 0i32, "expiration": 0, "cas": 244496072706, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"burgerbrau_wolnzach-hell"
+{ "name": "Brasserie Pietra", "city": "Furiani", "state": "", "code": "", "country": "France", "phone": "33-04.95.30.14.70", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [ "Route de la Marana" ], "geo": { "accuracy": "GEOMETRIC_CENTER", "lat": 42.6483d, "lon": 9.4529d } }
+{ "id": "brasserie_pietra", "flags": 0i32, "expiration": 0, "cas": 244492533760, "rev": 1i32, "vbid": 45i32, "dtype": 1i32 }
+"brasserie_pietra"
+{ "name": "Brasserie Grain D'Orge", "city": "Ronchin", "state": "", "code": "", "country": "France", "phone": "", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "Once named Brasserie Jeanne D'Arc, this Brewery had changed their name due to change in management.", "address": [  ], "geo": { "accuracy": "APPROXIMATE", "lat": 50.6054d, "lon": 3.0775d } }
+{ "id": "brasserie_grain_d_orge", "flags": 0i32, "expiration": 0, "cas": 244486504448, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"brasserie_grain_d_orge"
+{ "name": "Brauerei Gbr. Maisel KG", "city": "Bayreuth", "state": "", "code": "95445", "country": "Germany", "phone": "+49 (0) 9 21/4 01-0", "website": "http://www.maisel.com/", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [ "Hindenburgstrasse 9" ], "geo": { "accuracy": "ROOFTOP", "lat": 49.9477d, "lon": 11.5659d } }
+{ "id": "brauerei_gbr_maisel_kg", "flags": 0i32, "expiration": 0, "cas": 244488863746, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"brauerei_gbr_maisel_kg"
+{ "name": "FA", "abv": 4.8d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "cains", "updated": "2011-05-17 03:15:54", "description": "FA is no small beer: despite its deceptively pale golden colour, it boasts a big, smooth flavour and strong punch. Brewed with the finest English malts, and conditioned in cask with dry hops to produce fresh hop aromas and a fuller flavour, delighting the mouth and stimulating the tongue.", "style": "Special Bitter or Best Bitter", "category": "British Ale" }
+{ "id": "cains-fa", "flags": 0i32, "expiration": 0, "cas": 244496138240, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"cains-fa"
+{ "name": "Weizen", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brauhaus_johann_albrecht_konstanz", "updated": "2010-07-22 20:00:20", "description": "", "style": "South German-Style Hefeweizen", "category": "German Ale" }
+{ "id": "brauhaus_johann_albrecht_konstanz-weizen", "flags": 0i32, "expiration": 0, "cas": 244492599296, "rev": 1i32, "vbid": 45i32, "dtype": 1i32 }
+"brauhaus_johann_albrecht_konstanz-weizen"
+{ "name": "471 Extra ESB", "abv": 7.8d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "breckenridge_brewery", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "breckenridge_brewery-471_extra_esb", "flags": 0i32, "expiration": 0, "cas": 244486504449, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"breckenridge_brewery-471_extra_esb"
+{ "name": "Raspberry Porter", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "breckenridge_bbq_of_omaha", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "breckenridge_bbq_of_omaha-raspberry_porter", "flags": 0i32, "expiration": 0, "cas": 244488863747, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"breckenridge_bbq_of_omaha-raspberry_porter"
+{ "name": "Rye On", "abv": 4.8d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "chama_river_brewing", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Lager", "category": "North American Lager" }
+{ "id": "chama_river_brewing-rye_on", "flags": 0i32, "expiration": 0, "cas": 244496138241, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"chama_river_brewing-rye_on"
+{ "name": "Brausttte der Steirerbrau Aktiengesellschaft", "city": "Graz", "state": "", "code": "", "country": "Austria", "phone": "43-0316/502-3545", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [ "Reiningshausstrae 1-7" ], "geo": { "accuracy": "APPROXIMATE", "lat": 47.0679d, "lon": 15.4417d } }
+{ "id": "brausttte_der_steirerbrau_aktiengesellschaft", "flags": 0i32, "expiration": 0, "cas": 244492664832, "rev": 1i32, "vbid": 45i32, "dtype": 1i32 }
+"brausttte_der_steirerbrau_aktiengesellschaft"
+{ "name": "Brewery Budweiser Budvar", "city": "", "state": "Ceske Budejovice", "code": "", "country": "Czech Republic", "phone": "", "website": "http://www.budvar.cz/", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [  ], "geo": { "accuracy": "APPROXIMATE", "lat": 48.9739d, "lon": 14.475d } }
+{ "id": "brewery_budweiser_budvar", "flags": 0i32, "expiration": 0, "cas": 244486569984, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"brewery_budweiser_budvar"
+{ "name": "Autumn Ale", "abv": 6.7d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "breckenridge_brewery", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Brown Ale", "category": "North American Ale" }
+{ "id": "breckenridge_brewery-autumn_ale", "flags": 0i32, "expiration": 0, "cas": 244488929280, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"breckenridge_brewery-autumn_ale"
+{ "name": "Cherryland Brewing", "city": "Sturgeon Bay", "state": "Wisconsin", "code": "", "country": "United States", "phone": "", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [  ], "geo": { "accuracy": "APPROXIMATE", "lat": 44.8342d, "lon": -87.377d } }
+{ "id": "cherryland_brewing", "flags": 0i32, "expiration": 0, "cas": 244496138242, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"cherryland_brewing"
+{ "name": "Paradox Speyside", "abv": 10.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brewdog_ltd", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Imperial Stout", "category": "North American Ale" }
+{ "id": "brewdog_ltd-paradox_speyside", "flags": 0i32, "expiration": 0, "cas": 244492664833, "rev": 1i32, "vbid": 45i32, "dtype": 1i32 }
+"brewdog_ltd-paradox_speyside"
+{ "name": "Old Knucklehead 1992", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "bridgeport_brewing", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "bridgeport_brewing-old_knucklehead_1992", "flags": 0i32, "expiration": 0, "cas": 244486569985, "rev": 1i32, "vbid": 30i32, "dtype": 1i32 }
+"bridgeport_brewing-old_knucklehead_1992"
+{ "name": "Chapeau Exotic Lambic", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brewery_de_troch", "updated": "2010-07-22 20:00:20", "description": "", "style": "Belgian-Style Fruit Lambic", "category": "Belgian and French Ale" }
+{ "id": "brewery_de_troch-chapeau_exotic_lambic", "flags": 0i32, "expiration": 0, "cas": 244488994816, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"brewery_de_troch-chapeau_exotic_lambic"
+{ "name": "Pale Ale", "abv": 5.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "clipper_city_brewing_co", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Pale Ale", "category": "North American Ale" }
+{ "id": "clipper_city_brewing_co-pale_ale", "flags": 0i32, "expiration": 0, "cas": 244496203776, "rev": 1i32, "vbid": 51i32, "dtype": 1i32 }
+"clipper_city_brewing_co-pale_ale"
+{ "name": "Irish Stout", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brewery_creek_brewing", "updated": "2010-07-22 20:00:20", "description": "\"Guinness\" is the prototype of all modern stouts. Many people, however, don't realize that there are different varieties of \"Guinness\" brewed around the world. \"Draught Guinness* and \"Foreign Extra Stout\" are the two primary types brewed in Ireland. Foreign Extra is the one I have emulated. It is closer in style to the London Porters of old than to modern stout. Very dark and rich, not as dry as Draught, about 6% abv and around 60 IBUs (that's hop bitterness). I used \"First Gold\" hops because that's what I could get. Guinness use Nitrogen mixed with carbon dioxide to dispense their stout which adds to the creamy mouth-feel. BTW: The \"Imported\" Guinness you buy here in the US comes from Canada. It could just as well be brewed in the US but the common wisdom in the brewing world is that Americans p
 refer \"imported\" beers and will pay more for them.", "style": "American-Style Stout", "category": "North American Ale" }
+{ "id": "brewery_creek_brewing-irish_stout", "flags": 0i32, "expiration": 0, "cas": 244492664834, "rev": 1i32, "vbid": 45i32, "dtype": 1i32 }
+"brewery_creek_brewing-irish_stout"
+{ "name": "Olde Suffolk", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "greene_king", "updated": "2010-07-22 20:00:20", "description": "", "style": "Old Ale", "category": "British Ale" }
+{ "id": "greene_king-olde_suffolk", "flags": 0i32, "expiration": 0, "cas": 244758806528, "rev": 1i32, "vbid": 62i32, "dtype": 1i32 }
+"greene_king-olde_suffolk"
+{ "name": "47 Bryg", "abv": 7.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "carlsberg_bryggerierne", "updated": "2010-07-22 20:00:20", "description": "", "style": "Traditional German-Style Bock", "category": "German Lager" }
+{ "id": "carlsberg_bryggerierne-47_bryg", "flags": 0i32, "expiration": 0, "cas": 244500660226, "rev": 1i32, "vbid": 63i32, "dtype": 1i32 }
+"carlsberg_bryggerierne-47_bryg"
+{ "name": "Sweeney Stout", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "america_s_brewing", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Stout", "category": "North American Ale" }
+{ "id": "america_s_brewing-sweeney_stout", "flags": 0i32, "expiration": 0, "cas": 244369981440, "rev": 1i32, "vbid": 32i32, "dtype": 1i32 }
+"america_s_brewing-sweeney_stout"
+{ "name": "Stout", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "keg_microbrewery_restaurant", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Stout", "category": "North American Ale" }
+{ "id": "keg_microbrewery_restaurant-stout", "flags": 0i32, "expiration": 0, "cas": 244758609921, "rev": 1i32, "vbid": 61i32, "dtype": 1i32 }
+"keg_microbrewery_restaurant-stout"
+{ "name": "Bohemian Blonde", "abv": 4.7d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "manayunk_brewery_and_restaurant", "updated": "2010-07-22 20:00:20", "description": "Our lightest beer – pale blonde in color with a crisp, softly sweet malt flavor, smooth finish and very subtle bitterness. Went to the final judging table at the GABF alongwith the Mega Breweries last year!", "style": "German-Style Pilsener", "category": "German Lager" }
+{ "id": "manayunk_brewery_and_restaurant-bohemian_blonde", "flags": 0i32, "expiration": 0, "cas": 244881620992, "rev": 1i32, "vbid": 60i32, "dtype": 1i32 }
+"manayunk_brewery_and_restaurant-bohemian_blonde"
+{ "name": "Stroh Brewery Company", "city": "Tampa", "state": "Florida", "code": "", "country": "United States", "phone": "", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [  ], "geo": { "accuracy": "APPROXIMATE", "lat": 27.9494d, "lon": -82.4651d } }
+{ "id": "stroh_brewery_company", "flags": 0i32, "expiration": 0, "cas": 245117419521, "rev": 1i32, "vbid": 45i32, "dtype": 1i32 }
+"stroh_brewery_company"
+{ "name": "Kölsch", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brauerei_reissdorf", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "brauerei_reissdorf-kolsch", "flags": 0i32, "expiration": 0, "cas": 244496728064, "rev": 1i32, "vbid": 53i32, "dtype": 1i32 }
+"brauerei_reissdorf-kolsch"
+{ "name": "Rogue Smoke", "abv": 6.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "rogue_ales", "updated": "2010-07-22 20:00:20", "description": "Rogue Smoke(previously known as Welkommen on draft) is a German style Rauchbier (Smoke Beer), which was inspired by the Fall of the Berlin Wall. Most rauch brews are bottom fermented, however Rogue Smoke, is top fermented. It is orange-amber in hue with a delicate smoke aroma and flavor with an intense hop finish.\r\n\r\nIn All About Beer, August, 1995 issue, Christopher Brooks writes \"Alder wood, indigenous to the Northwest, is the smoking agent, though a small amount of Bamberg malt is used in the mash, too. Beech is drier than alder, reports brewmaster John Maier, so we use a little of that for added complexity. Welkommen, a smoky, nutty ale, is also very dry, which given the 15 pounds of hops (perle and Saaz) added to each 15-barrel batch, is no surprise.\" The seven medals in nine years which Rogue Smoke won at 
 theGreat American Beer Festival in Denver are also a tribute to this unusual brew.\r\n\r\nRogue Smoke is brewed with Great Western Harrington, Klages, Munich, Hugh Baird Crystal, Carastan (30-37 and 13-17), Chucks Alderwood Smoked Munich and Bamberg Beechwood Smoked malts; plus Perle and Saaz hops. Rogue Smoke is available in the classic 22-ounce seriograph bottle (replacing the older 7 ounce bottle) and on draft." }
+{ "id": "rogue_ales-rogue_smoke", "flags": 0i32, "expiration": 0, "cas": 244992638977, "rev": 1i32, "vbid": 29i32, "dtype": 1i32 }
+"rogue_ales-rogue_smoke"
+{ "name": "No Doubt Stout", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "umpqua_brewing", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Stout", "category": "North American Ale" }
+{ "id": "umpqua_brewing-no_doubt_stout", "flags": 0i32, "expiration": 0, "cas": 245113880577, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"umpqua_brewing-no_doubt_stout"
+{ "name": "Pilsner", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "greenshields_brewery_and_pub", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "greenshields_brewery_and_pub-pilsner", "flags": 0i32, "expiration": 0, "cas": 244758806529, "rev": 1i32, "vbid": 62i32, "dtype": 1i32 }
+"greenshields_brewery_and_pub-pilsner"
+{ "name": "Chimay Dorée", "abv": 4.8d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "chimay_abbaye_notre_dame_de_scourmont", "updated": "2010-07-22 20:00:20", "description": "Brewed from very similar ingredients as the Red, but paler and spiced differently. It is intended only to be drunk at the abbey or at the nearby inn Auberge de Poteaupré which is associated with the abbey. The monks themselves drink this variety rather than the stronger three. The Dorée is not sold commercially and the rare bottles which make their way out are through unofficial sources. Even the brewery's own web site makes no mention of this variety.", "style": "Other Belgian-Style Ales", "category": "Belgian and French Ale" }
+{ "id": "chimay_abbaye_notre_dame_de_scourmont-chimay_doree", "flags": 0i32, "expiration": 0, "cas": 244500725760, "rev": 1i32, "vbid": 63i32, "dtype": 1i32 }
+"chimay_abbaye_notre_dame_de_scourmont-chimay_doree"
+{ "name": "Boont Amber Ale", "abv": 5.8d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "anderson_valley_brewing", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Amber/Red Ale", "category": "North American Ale" }
+{ "id": "anderson_valley_brewing-boont_amber_ale", "flags": 0i32, "expiration": 0, "cas": 244369981441, "rev": 1i32, "vbid": 32i32, "dtype": 1i32 }
+"anderson_valley_brewing-boont_amber_ale"
+{ "name": "Oktoberfest", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "kostritzer_schwarzbierbrauerei", "updated": "2010-07-22 20:00:20", "description": "", "style": "German-Style Oktoberfest", "category": "German Lager" }
+{ "id": "kostritzer_schwarzbierbrauerei-oktoberfest", "flags": 0i32, "expiration": 0, "cas": 244758675456, "rev": 1i32, "vbid": 61i32, "dtype": 1i32 }
+"kostritzer_schwarzbierbrauerei-oktoberfest"
+{ "name": "White Ale", "abv": 4.8d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "mendocino_brewing_saratoga_springs", "updated": "2010-07-22 20:00:20", "description": "This Limited Edition unfiltered Belgian Style Ale, brewed with premium unmatted wheat has a crisp & refreshing flavor. This thirst quenching ale has a blend of sweet orange peel, a subtle hint of coriander and a delicate twist of lemon.", "style": "Belgian-Style White", "category": "Belgian and French Ale" }
+{ "id": "mendocino_brewing_saratoga_springs-white_ale", "flags": 0i32, "expiration": 0, "cas": 244881620993, "rev": 1i32, "vbid": 60i32, "dtype": 1i32 }
+"mendocino_brewing_saratoga_springs-white_ale"
+{ "name": "Sweet Water Tavern and Brewery", "city": "Sterling", "state": "Virginia", "code": "20121", "country": "United States", "phone": "(703) 449-1108", "website": "http://www.greatamericanrestaurants.com/sweetMainSter/index.htm", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [ "45980 Waterview Plaza" ], "geo": { "accuracy": "RANGE_INTERPOLATED", "lat": 39.0324d, "lon": -77.4097d } }
+{ "id": "sweet_water_tavern_and_brewery", "flags": 0i32, "expiration": 0, "cas": 245117419522, "rev": 1i32, "vbid": 45i32, "dtype": 1i32 }
+"sweet_water_tavern_and_brewery"
+{ "name": "Rauchbier Lager", "abv": 4.6d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brauerei_spezial", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "brauerei_spezial-rauchbier_lager", "flags": 0i32, "expiration": 0, "cas": 244496793600, "rev": 1i32, "vbid": 53i32, "dtype": 1i32 }
+"brauerei_spezial-rauchbier_lager"
+{ "name": "Schlafly Tripel", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "saint_louis_brewery_schlafy_tap_room", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "saint_louis_brewery_schlafy_tap_room-schlafly_tripel", "flags": 0i32, "expiration": 0, "cas": 244992704512, "rev": 1i32, "vbid": 29i32, "dtype": 1i32 }
+"saint_louis_brewery_schlafy_tap_room-schlafly_tripel"
+{ "name": "Union Barrel Works Lager", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "union_barrel_works", "updated": "2010-07-22 20:00:20", "description": "Dortmunder style lager with full body and deep golden color. Brewed using four specialty malts with moderate hopping for a smooth clean flavor and aroma.", "style": "American-Style Lager", "category": "North American Lager" }
+{ "id": "union_barrel_works-union_barrel_works_lager", "flags": 0i32, "expiration": 0, "cas": 245113946112, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"union_barrel_works-union_barrel_works_lager"
+{ "name": "Grolsch Amber Ale", "abv": 5.3d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "grolsche_bierbrouwerij", "updated": "2010-07-22 20:00:20", "description": "", "style": "German-Style Brown Ale/Altbier", "category": "German Ale" }
+{ "id": "grolsche_bierbrouwerij-grolsch_amber_ale", "flags": 0i32, "expiration": 0, "cas": 244758872064, "rev": 1i32, "vbid": 62i32, "dtype": 1i32 }
+"grolsche_bierbrouwerij-grolsch_amber_ale"
+{ "name": "Nine Men Ale", "abv": 4.3d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "cooperstown_brewing_company", "updated": "2010-07-22 20:00:20", "description": "\"Nine Man\" is a golden ale, brewed from English pale and crystal malts, and with torrified wheat.  It is bittered with Cascade and Cluster hops and finished with Cascade hops.  \"Nine Man Ale\" was first brewed as a summer seasonal beer in 1996.   It was kegged the first season but not bottled until the opening of the baseball season in April 1997.", "style": "Golden or Blonde Ale", "category": "North American Ale" }
+{ "id": "cooperstown_brewing_company-nine_men_ale", "flags": 0i32, "expiration": 0, "cas": 244500725761, "rev": 1i32, "vbid": 63i32, "dtype": 1i32 }
+"cooperstown_brewing_company-nine_men_ale"
+{ "name": "Jolly Scot Scottish Ale", "abv": 5.2d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "appalachian_brewing_company", "updated": "2010-07-22 20:00:20", "description": "This flavorful sweet ale has a smooth malt finish balanced against a light hop flavor. This beer is very quaffable and has become a brewpub favorite throughout the United States. \r\n\"Jolly Scot\" was a famed local beer produced by R.H. Graupners Brewery that was located at 10th and Market – one block from our brewery.", "style": "Scotch Ale", "category": "British Ale" }
+{ "id": "appalachian_brewing_company-jolly_scot_scottish_ale", "flags": 0i32, "expiration": 0, "cas": 244369981442, "rev": 1i32, "vbid": 32i32, "dtype": 1i32 }
+"appalachian_brewing_company-jolly_scot_scottish_ale"
+{ "name": "Packs A Punch Porter", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "krogh_s_restaurant_and_brewpub", "updated": "2010-07-22 20:00:20", "description": "", "style": "Porter", "category": "Irish Ale" }
+{ "id": "krogh_s_restaurant_and_brewpub-packs_a_punch_porter", "flags": 0i32, "expiration": 0, "cas": 244758675457, "rev": 1i32, "vbid": 61i32, "dtype": 1i32 }
+"krogh_s_restaurant_and_brewpub-packs_a_punch_porter"
+{ "name": "Mendocino Brewing - Saratoga Springs", "city": "Saratoga Springs", "state": "New York", "code": "", "country": "United States", "phone": "", "website": "http://www.mendobrew.com/", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [  ], "geo": { "accuracy": "APPROXIMATE", "lat": 43.0831d, "lon": -73.7846d } }
+{ "id": "mendocino_brewing_saratoga_springs", "flags": 0i32, "expiration": 0, "cas": 244881686528, "rev": 1i32, "vbid": 60i32, "dtype": 1i32 }
+"mendocino_brewing_saratoga_springs"
+{ "name": "Surefire Stout", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "sweetwater_brewing_casper", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Stout", "category": "North American Ale" }
+{ "id": "sweetwater_brewing_casper-surefire_stout", "flags": 0i32, "expiration": 0, "cas": 245117485056, "rev": 1i32, "vbid": 45i32, "dtype": 1i32 }
+"sweetwater_brewing_casper-surefire_stout"
+{ "name": "Kupfer", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brauhaus_johann_albrecht_konstanz", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Lager", "category": "North American Lager" }
+{ "id": "brauhaus_johann_albrecht_konstanz-kupfer", "flags": 0i32, "expiration": 0, "cas": 244496793601, "rev": 1i32, "vbid": 53i32, "dtype": 1i32 }
+"brauhaus_johann_albrecht_konstanz-kupfer"
+{ "name": "Saison Athene", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "saint_somewhere_brewing_company", "updated": "2010-07-22 20:00:20", "description": "A bottle conditioned Saison with a spicy and moderately hoppy profile true to the traditions of the farmhouse ales of Wallonia.\r\n\r\n A spiced saison with chamomile, rosemary and black pepper.", "style": "French & Belgian-Style Saison", "category": "Belgian and French Ale" }
+{ "id": "saint_somewhere_brewing_company-saison_athene", "flags": 0i32, "expiration": 0, "cas": 244992704513, "rev": 1i32, "vbid": 29i32, "dtype": 1i32 }
+"saint_somewhere_brewing_company-saison_athene"
+{ "name": "Old Market Stout", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "upstream_brewing_old_market", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Stout", "category": "North American Ale" }
+{ "id": "upstream_brewing_old_market-old_market_stout", "flags": 0i32, "expiration": 0, "cas": 245113946113, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"upstream_brewing_old_market-old_market_stout"
+{ "name": "Negra Modelo", "abv": 6.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "grupo_modelo", "updated": "2010-07-22 20:00:20", "description": "Negra Modelo has long been the dark beer alternative for Mexican beer drinkers. It has been identified as one of the few surviving examples of Vienna style lager - a style that was largely replaced in European breweries with Oktoberfest, a slightly lighter lager, in the early twentieth century. \r\nNegra Modelo pours with an off-white, medium head. The body is clear with a rich amber/copper color. The aroma is sweet with hints of apple. The impression at the first sip is sweet. This gives way only a bit to some hops bitterness which gives some balance but leaves the beer firmly in the sweet category. It has no real lager snap at the end, just lingering hops. This actually makes the second sip more balanced than the first.\r\n\r\nThere is some complexity and depth here but the flavors are very delicate. They are obl
 iterated by the aggressive flavors of the Mexican food that Modelo is often served with making it a sweet balance to the savory and sometimes hot cuisine.", "style": "American-Style Lager", "category": "North American Lager" }
+{ "id": "grupo_modelo-negra_modelo", "flags": 0i32, "expiration": 0, "cas": 244758872065, "rev": 1i32, "vbid": 62i32, "dtype": 1i32 }
+"grupo_modelo-negra_modelo"
+{ "name": "Celebration Wheat", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "copper_kettle_brewery", "updated": "2010-07-22 20:00:20", "description": "A nice traditional cloudy wheat beer. It is light in color with a banana and yeasty fragrance. This would go nice on a hot summer day or anytime you want a refreshing drink.", "style": "Light American Wheat Ale or Lager", "category": "Other Style" }
+{ "id": "copper_kettle_brewery-celebration_wheat", "flags": 0i32, "expiration": 0, "cas": 244500725762, "rev": 1i32, "vbid": 63i32, "dtype": 1i32 }
+"copper_kettle_brewery-celebration_wheat"
+{ "name": "Adler Bräu Winter Ale", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "appleton_brewing", "updated": "2010-07-22 20:00:20", "description": "", "style": "Old Ale", "category": "British Ale" }
+{ "id": "appleton_brewing-adler_brau_winter_ale", "flags": 0i32, "expiration": 0, "cas": 244369981443, "rev": 1i32, "vbid": 32i32, "dtype": 1i32 }
+"appleton_brewing-adler_brau_winter_ale"
+{ "name": "Mönchshof Original Pils", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "kulmbacher_brauerei_ag", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "kulmbacher_brauerei_ag-monchshof_original_pils", "flags": 0i32, "expiration": 0, "cas": 244758740992, "rev": 1i32, "vbid": 61i32, "dtype": 1i32 }
+"kulmbacher_brauerei_ag-monchshof_original_pils"
+{ "name": "Oktoberfest", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "moosejaw_pizza_dells_brewing_company", "updated": "2010-07-22 20:00:20", "description": "", "style": "German-Style Oktoberfest", "category": "German Lager" }
+{ "id": "moosejaw_pizza_dells_brewing_company-oktoberfest", "flags": 0i32, "expiration": 0, "cas": 244881686529, "rev": 1i32, "vbid": 60i32, "dtype": 1i32 }
+"moosejaw_pizza_dells_brewing_company-oktoberfest"
+{ "name": "Nut Brown", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "taylor_s_restaurant_and_brewery", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Brown Ale", "category": "North American Ale" }
+{ "id": "taylor_s_restaurant_and_brewery-nut_brown", "flags": 0i32, "expiration": 0, "cas": 245117485057, "rev": 1i32, "vbid": 45i32, "dtype": 1i32 }
+"taylor_s_restaurant_and_brewery-nut_brown"
+{ "name": "Messing", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brauhaus_johann_albrecht_konstanz", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "brauhaus_johann_albrecht_konstanz-messing", "flags": 0i32, "expiration": 0, "cas": 244496859136, "rev": 1i32, "vbid": 53i32, "dtype": 1i32 }
+"brauhaus_johann_albrecht_konstanz-messing"
+{ "name": "Schooner Brewery", "city": "Dallas", "state": "Texas", "code": "", "country": "United States", "phone": "", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [  ], "geo": { "accuracy": "APPROXIMATE", "lat": 32.803d, "lon": -96.7699d } }
+{ "id": "schooner_brewery", "flags": 0i32, "expiration": 0, "cas": 244992704514, "rev": 1i32, "vbid": 29i32, "dtype": 1i32 }
+"schooner_brewery"
+{ "name": "Wachusetts IPA", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "wachusetts_brewing_company", "updated": "2010-07-22 20:00:20", "description": "Nice Hoppy IPA - available in MA & NY only", "style": "American-Style India Pale Ale", "category": "North American Ale" }
+{ "id": "wachusetts_brewing_company-wachusetts_ipa", "flags": 0i32, "expiration": 0, "cas": 245748858880, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"wachusetts_brewing_company-wachusetts_ipa"
+{ "name": "Harbor City Brewing", "city": "Port Washington", "state": "Wisconsin", "code": "53074", "country": "United States", "phone": "1-262-284-3118", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [ "535 West Grand Avenue" ], "geo": { "accuracy": "RANGE_INTERPOLATED", "lat": 43.3871d, "lon": -87.8795d } }
+{ "id": "harbor_city_brewing", "flags": 0i32, "expiration": 0, "cas": 244758937600, "rev": 1i32, "vbid": 62i32, "dtype": 1i32 }
+"harbor_city_brewing"
+{ "name": "Daas", "city": "Tournai", "state": "", "code": "", "country": "Belgium", "phone": "442032865958", "website": "http://www.daasbeer.com/", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "Daas beers are organically brewed Belgium beer produced in Hainaut the famous Belgian province known for its fine hand crafted beers.", "address": [  ], "geo": { "accuracy": "APPROXIMATE", "lat": 50.6059d, "lon": 3.3884d } }
+{ "id": "daas", "flags": 0i32, "expiration": 0, "cas": 244500791296, "rev": 1i32, "vbid": 63i32, "dtype": 1i32 }
+"daas"
+{ "name": "Kickstart Oatmeal Stout", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "barley_brothers_brewery_and_grill", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Stout", "category": "North American Ale" }
+{ "id": "barley_brothers_brewery_and_grill-kickstart_oatmeal_stout", "flags": 0i32, "expiration": 0, "cas": 244370046976, "rev": 1i32, "vbid": 32i32, "dtype": 1i32 }
+"barley_brothers_brewery_and_grill-kickstart_oatmeal_stout"
+{ "name": "Bavarian Bock", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "aksarben_brewing_bop", "updated": "2010-07-22 20:00:20", "description": "", "style": "Traditional German-Style Bock", "category": "German Lager" }
+{ "id": "aksarben_brewing_bop-bavarian_bock", "flags": 0i32, "expiration": 0, "cas": 244366180352, "rev": 1i32, "vbid": 24i32, "dtype": 1i32 }
+"aksarben_brewing_bop-bavarian_bock"
+{ "name": "Celtic Rose", "abv": 5.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "lancaster_brewing_co", "updated": "2010-07-22 20:00:20", "description": "Our version of the traditional Irish Amber Ale. This beer combines the richness of German and Austrian malts with the delicate and spicy British hops for a taste worthy of the Red Rose City.", "style": "Irish-Style Red Ale", "category": "Irish Ale" }
+{ "id": "lancaster_brewing_co-celtic_rose", "flags": 0i32, "expiration": 0, "cas": 244758740993, "rev": 1i32, "vbid": 61i32, "dtype": 1i32 }
+"lancaster_brewing_co-celtic_rose"
+{ "name": "White Christmas", "abv": 6.5d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "moylan_s_brewery_restaurant", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "moylan_s_brewery_restaurant-white_christmas", "flags": 0i32, "expiration": 0, "cas": 244881752064, "rev": 1i32, "vbid": 60i32, "dtype": 1i32 }
+"moylan_s_brewery_restaurant-white_christmas"
+{ "name": "New World Wheat", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "tied_house_cafe_brewery_san_jose", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Lager", "category": "North American Lager" }
+{ "id": "tied_house_cafe_brewery_san_jose-new_world_wheat", "flags": 0i32, "expiration": 0, "cas": 245117485058, "rev": 1i32, "vbid": 45i32, "dtype": 1i32 }
+"tied_house_cafe_brewery_san_jose-new_world_wheat"
+{ "name": "Independence Ale", "abv": 6.7d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brewpub_on_the_green", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Pale Ale", "category": "North American Ale" }
+{ "id": "brewpub_on_the_green-independence_ale", "flags": 0i32, "expiration": 0, "cas": 244496859137, "rev": 1i32, "vbid": 53i32, "dtype": 1i32 }
+"brewpub_on_the_green-independence_ale"
+{ "name": "Pugsley's Signature Series XXXX IPA", "abv": 9.25d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "shipyard_brewing_portland", "updated": "2010-07-22 20:00:20", "description": "XXXX IPA is a non-traditional American IPA with a brilliant copper color and the classic citrus nose of Cascade hops. This beer demonstrates a unique balance of malt-inspired, delicate red grapefruit sweetness and lingering hop dryness. The OG and final ABV provide the structure and body to balance the harmony of distinct flavours. Cascade, Warrior, Summit and Glacier Hops are used for bittering and Cascade Hops are added for dry hopping after fermentation. This hop blend is well balanced with Malted Wheat, Pale Ale, Crystal, and Caramalt Malts. To fully enjoy all the flavours, this ale is best drunk at 55 degrees Fahrenheit. This beer pairs well with Cajun dishes, blackened fish, and BBQ. XXXX draws its name from the British brewing convention of using X’s to denote style. 70
  BU’s, 1.092 OG, 9.25% ABV.", "style": "Imperial or Double India Pale Ale", "category": "North American Ale" }
+{ "id": "shipyard_brewing_portland-pugsley_s_signature_series_xxxx_ipa", "flags": 0i32, "expiration": 0, "cas": 244992770048, "rev": 1i32, "vbid": 29i32, "dtype": 1i32 }
+"shipyard_brewing_portland-pugsley_s_signature_series_xxxx_ipa"
+{ "name": "House Ale", "abv": 4.5d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "weyerbacher_brewing_company", "updated": "2010-07-22 20:00:20", "description": "House Ale, our 4.5% session ale, was first brewed in 2006.  Our goal was to brew a beer that was a little bit lower in alcohol, but did not lack for body and flavor. We're sure you'll agree, that's just what we achieved with this tasty brew.  Brewed with Pale, Caramunich, and Carapils malt for flavor and body, then hopped exclusively with expensive Tettnang hops, a very delicate, delicious hops that perfectly fits this beer with just the right snap of flavor.   Available only in Pennsylvania, in our Variety Pack Case.", "style": "American-Style Amber/Red Ale", "category": "North American Ale" }
+{ "id": "weyerbacher_brewing_company-house_ale", "flags": 0i32, "expiration": 0, "cas": 245748924416, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"weyerbacher_brewing_company-house_ale"
+{ "name": "Hook & Ladder Brewing Company", "city": "Silver Spring", "state": "Maryland", "code": "20910", "country": "United States", "phone": "301.565.4522", "website": "http://www.hookandladderbeer.com", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "At Hook & Ladder Brewing we believe in great beer in the company of good friends, so we bring you three great beers for your drinking pleasure (please drink responsibly). Each of our beers is carefully crafted with the finest quality ingredients for a distinctive taste we know you will enjoy. Try one tonight, you just might get hooked. Through our own experiences in the fire and rescue service we have chosen the Hook & Ladder as a symbol of pride and honor to pay tribute to the brave men and women who serve and protect our communities.", "address": [ "8113 Fenton St." ], "geo": { "accuracy": "ROOFTOP", "lat": 38.9911d, "lon": -77.0237d } }
+{ "id": "hook_ladder_brewing_company", "flags": 0i32, "expiration": 0, "cas": 244758937601, "rev": 1i32, "vbid": 62i32, "dtype": 1i32 }
+"hook_ladder_brewing_company"
+{ "name": "Valhalla Ale", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "denmark_brewing", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Lager", "category": "North American Lager" }
+{ "id": "denmark_brewing-valhalla_ale", "flags": 0i32, "expiration": 0, "cas": 244633305088, "rev": 1i32, "vbid": 63i32, "dtype": 1i32 }
+"denmark_brewing-valhalla_ale"
+{ "name": "Big Buck Brewery and Steakhouse #2", "city": "Grand Rapids", "state": "Michigan", "code": "", "country": "United States", "phone": "", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [  ], "geo": { "accuracy": "APPROXIMATE", "lat": 42.9634d, "lon": -85.6681d } }
+{ "id": "big_buck_brewery_and_steakhouse_2", "flags": 0i32, "expiration": 0, "cas": 244370046977, "rev": 1i32, "vbid": 32i32, "dtype": 1i32 }
+"big_buck_brewery_and_steakhouse_2"
+{ "name": "Witbier", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "aksarben_brewing_bop", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "aksarben_brewing_bop-witbier", "flags": 0i32, "expiration": 0, "cas": 244366180353, "rev": 1i32, "vbid": 24i32, "dtype": 1i32 }
+"aksarben_brewing_bop-witbier"
+{ "name": "Light", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "legends_brewhouse_eatery_of_green_bay", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "legends_brewhouse_eatery_of_green_bay-light", "flags": 0i32, "expiration": 0, "cas": 244758740994, "rev": 1i32, "vbid": 61i32, "dtype": 1i32 }
+"legends_brewhouse_eatery_of_green_bay-light"
+{ "name": "3C Extreme", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "nodding_head_brewpub", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Pale Ale", "category": "North American Ale" }
+{ "id": "nodding_head_brewpub-3c_extreme", "flags": 0i32, "expiration": 0, "cas": 244881752065, "rev": 1i32, "vbid": 60i32, "dtype": 1i32 }
+"nodding_head_brewpub-3c_extreme"
+{ "name": "Rock River Lager Beer", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "tunner_s_guild_brewing_systems", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Lager", "category": "North American Lager" }
+{ "id": "tunner_s_guild_brewing_systems-rock_river_lager_beer", "flags": 0i32, "expiration": 0, "cas": 245117485059, "rev": 1i32, "vbid": 45i32, "dtype": 1i32 }
+"tunner_s_guild_brewing_systems-rock_river_lager_beer"
+{ "name": "Trappist Blond", "abv": 8.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "brouwerij_de_achelse_kluis", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "brouwerij_de_achelse_kluis-trappist_blond", "flags": 0i32, "expiration": 0, "cas": 244496859138, "rev": 1i32, "vbid": 53i32, "dtype": 1i32 }
+"brouwerij_de_achelse_kluis-trappist_blond"
+{ "name": "Dark Wheat", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "sierra_nevada_brewing_co", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Lager", "category": "North American Lager" }
+{ "id": "sierra_nevada_brewing_co-dark_wheat", "flags": 0i32, "expiration": 0, "cas": 244992770049, "rev": 1i32, "vbid": 29i32, "dtype": 1i32 }
+"sierra_nevada_brewing_co-dark_wheat"
+{ "name": "ESB", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "wild_river_brewing_and_pizza_cave_junction", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "wild_river_brewing_and_pizza_cave_junction-esb", "flags": 0i32, "expiration": 0, "cas": 245748989952, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"wild_river_brewing_and_pizza_cave_junction-esb"
+{ "name": "Smashing Berry Ale", "abv": 4.3d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "hoppin_frog_brewery", "updated": "2010-07-22 20:00:20", "description": "An abundance of fresh, natural fruit flavor makes this beer something special. You would think we picked the fruit moments before brewing.", "style": "Fruit Beer", "category": "Other Style" }
+{ "id": "hoppin_frog_brewery-smashing_berry_ale", "flags": 0i32, "expiration": 0, "cas": 244759003136, "rev": 1i32, "vbid": 62i32, "dtype": 1i32 }
+"hoppin_frog_brewery-smashing_berry_ale"
+{ "name": "Jubel 2000", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "deschutes_brewery", "updated": "2010-07-22 20:00:20", "description": "", "style": "Old Ale", "category": "British Ale" }
+{ "id": "deschutes_brewery-jubel_2000", "flags": 0i32, "expiration": 0, "cas": 244633370624, "rev": 1i32, "vbid": 63i32, "dtype": 1i32 }
+"deschutes_brewery-jubel_2000"
+{ "name": "Black Lab Stout", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "big_dog_s_brewing_company", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Stout", "category": "North American Ale" }
+{ "id": "big_dog_s_brewing_company-black_lab_stout", "flags": 0i32, "expiration": 0, "cas": 244370112512, "rev": 1i32, "vbid": 32i32, "dtype": 1i32 }
+"big_dog_s_brewing_company-black_lab_stout"
+{ "name": "Puffers Smoked Porter", "abv": 4.6d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "amherst_brewing_company", "updated": "2010-07-22 20:00:20", "description": "Dark, full bodied ale with a prominent smoked malt flavor", "style": "Porter", "category": "Irish Ale" }
+{ "id": "amherst_brewing_company-puffers_smoked_porter", "flags": 0i32, "expiration": 0, "cas": 244366245888, "rev": 1i32, "vbid": 24i32, "dtype": 1i32 }
+"amherst_brewing_company-puffers_smoked_porter"
+{ "name": "Original", "abv": 5.2d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "lowenbrau_brauerei", "updated": "2010-07-22 20:00:20", "description": "" }
+{ "id": "lowenbrau_brauerei-original", "flags": 0i32, "expiration": 0, "cas": 244881883136, "rev": 1i32, "vbid": 61i32, "dtype": 1i32 }
+"lowenbrau_brauerei-original"
+{ "name": "Haymarket Pilsner", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "o_grady_s_brewery_and_pub_1", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Lager", "category": "North American Lager" }
+{ "id": "o_grady_s_brewery_and_pub_1-haymarket_pilsner", "flags": 0i32, "expiration": 0, "cas": 244881817600, "rev": 1i32, "vbid": 60i32, "dtype": 1i32 }
+"o_grady_s_brewery_and_pub_1-haymarket_pilsner"
+{ "name": "Tunner's Guild Brewing Systems", "city": "Saint Paul", "state": "Minnesota", "code": "", "country": "United States", "phone": "", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [  ], "geo": { "accuracy": "APPROXIMATE", "lat": 44.9442d, "lon": -93.0861d } }
+{ "id": "tunner_s_guild_brewing_systems", "flags": 0i32, "expiration": 0, "cas": 245117550592, "rev": 1i32, "vbid": 45i32, "dtype": 1i32 }
+"tunner_s_guild_brewing_systems"
+{ "name": "Billtown Blonde", "abv": 6.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "bullfrog_brewery", "updated": "2010-07-22 20:00:20", "description": "This lightly colored ale has a delicate floral aroma and flavor reminiscent of Williamsports first breweries, clean and crisp with just a touch of lingering sweetness leading to a dry, balanced finish." }
+{ "id": "bullfrog_brewery-billtown_blonde", "flags": 0i32, "expiration": 0, "cas": 244496924672, "rev": 1i32, "vbid": 53i32, "dtype": 1i32 }
+"bullfrog_brewery-billtown_blonde"
+{ "name": "Münchner Hell / Premium Lager", "abv": 5.2d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "spaten_franziskaner_brau", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Lager", "category": "North American Lager" }
+{ "id": "spaten_franziskaner_brau-munchner_hell_premium_lager", "flags": 0i32, "expiration": 0, "cas": 245110538240, "rev": 1i32, "vbid": 29i32, "dtype": 1i32 }
+"spaten_franziskaner_brau-munchner_hell_premium_lager"
+{ "name": "Sagebrush Stout", "abv": 6.1d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "wynkoop_brewing", "updated": "2010-07-22 20:00:20", "description": "A dark, deeply roasted and full-bodied ale. Rich with kisses of chocolate, coffee and oats, it's a glorious version of an American-style stout. A longtime house favorite.", "style": "American-Style Stout", "category": "North American Ale" }
+{ "id": "wynkoop_brewing-sagebrush_stout", "flags": 0i32, "expiration": 0, "cas": 245748989953, "rev": 1i32, "vbid": 36i32, "dtype": 1i32 }
+"wynkoop_brewing-sagebrush_stout"
+{ "name": "Hop Garden Pale Ale", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "jj_bitting_brewing", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Pale Ale", "category": "North American Ale" }
+{ "id": "jj_bitting_brewing-hop_garden_pale_ale", "flags": 0i32, "expiration": 0, "cas": 244759003137, "rev": 1i32, "vbid": 62i32, "dtype": 1i32 }
+"jj_bitting_brewing-hop_garden_pale_ale"
+{ "name": "5 Malt Ale", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "devil_mountain_brewing", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Amber/Red Ale", "category": "North American Ale" }
+{ "id": "devil_mountain_brewing-5_malt_ale", "flags": 0i32, "expiration": 0, "cas": 244633370625, "rev": 1i32, "vbid": 63i32, "dtype": 1i32 }
+"devil_mountain_brewing-5_malt_ale"
+{ "name": "Birra Moretti", "city": "Udine", "state": "", "code": "", "country": "Italy", "phone": "39-800-1859.00", "website": "", "type": "brewery", "updated": "2010-07-22 20:00:20", "description": "", "address": [ "Viale Venezia 9" ], "geo": { "accuracy": "RANGE_INTERPOLATED", "lat": 46.0597d, "lon": 13.2269d } }
+{ "id": "birra_moretti", "flags": 0i32, "expiration": 0, "cas": 244370112513, "rev": 1i32, "vbid": 32i32, "dtype": 1i32 }
+"birra_moretti"
+{ "name": "Hop Ottin IPA", "abv": 7.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "anderson_valley_brewing", "updated": "2010-07-22 20:00:20", "description": "Hop Ottin' IPA is as hoppy as they come. The name means \"hard working hops,\" in Boontling, and that tells it all. Generous additions of high-alpha Pacific Northwest hops added during a vigorous boil, plus traditional dry hopping, with whole hop cones, give this ale a delicious citrus aroma, and an intense hoppy bite. This IPA is a hop lover's dream.", "style": "American-Style India Pale Ale", "category": "North American Ale" }
+{ "id": "anderson_valley_brewing-hop_ottin_ipa", "flags": 0i32, "expiration": 0, "cas": 244366311424, "rev": 1i32, "vbid": 24i32, "dtype": 1i32 }
+"anderson_valley_brewing-hop_ottin_ipa"
+{ "name": "Feast of Fools", "abv": 5.3d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "magic_hat", "updated": "2010-07-22 20:00:20", "description": "Our Holiday Offering\r\nIn pre-christian times, the Celebration of darkness and light was marked with great halls filled with smoke & mirrors. Guilded goblets brimming with seasonal brews were lifted to lips, speaking a language no longer known. \r\nCenturies pass.\r\n\r\nThe winter wind finds its way through heavy wood doors. There is a solemn business of monks to be done. But also brewing, a season of celebration is about to begin....\r\nMore years pass.\r\nThe modern age. \r\nThe present connects the past through the brewer's art and a new beer is born.\r\nFeast of Fools... \r\nA perfect dessert beer brewed exclusively for the holiday season.  Hand bottled, champagne corked. \r\nOur inky, rich, black stout, with the addition of raspberries.", "style": "Fruit Beer", "category": "Other Style" }
+{ "id": "magic_hat-feast_of_fools", "flags": 0i32, "expiration": 0, "cas": 244881948672, "rev": 1i32, "vbid": 61i32, "dtype": 1i32 }
+"magic_hat-feast_of_fools"
+{ "name": "Village Nut Brown Ale", "abv": 6.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "oak_creek_brewery", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Brown Ale", "category": "North American Ale" }
+{ "id": "oak_creek_brewery-village_nut_brown_ale", "flags": 0i32, "expiration": 0, "cas": 244881817601, "rev": 1i32, "vbid": 60i32, "dtype": 1i32 }
+"oak_creek_brewery-village_nut_brown_ale"
+{ "name": "Hefeweizen", "abv": 4.2d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "upper_mississippi_brewing", "updated": "2010-07-22 20:00:20", "description": "", "style": "South German-Style Hefeweizen", "category": "German Ale" }
+{ "id": "upper_mississippi_brewing-hefeweizen", "flags": 0i32, "expiration": 0, "cas": 245117550593, "rev": 1i32, "vbid": 45i32, "dtype": 1i32 }
+"upper_mississippi_brewing-hefeweizen"
+{ "name": "Mesa Pale Ale", "abv": 0.0d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "callahan_s_pub_and_brewery", "updated": "2010-07-22 20:00:20", "description": "", "style": "American-Style Pale Ale", "category": "North American Ale" }
+{ "id": "callahan_s_pub_and_brewery-mesa_pale_ale", "flags": 0i32, "expiration": 0, "cas": 244496924673, "rev": 1i32, "vbid": 53i32, "dtype": 1i32 }
+"callahan_s_pub_and_brewery-mesa_pale_ale"
+{ "name": "Winter Brew", "abv": 5.75d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "sprecher_brewing", "updated": "2010-07-22 20:00:20", "description": "A flavorful blend of dark roasted and sweet caramel malts defines this smooth and robust lager. The rich, nourishing flavors of a full-bodied Munich bock make this Bavarian-style brew perfect for those long winter nights.", "style": "Traditional German-Style Bock", "category": "German Lager" }
+{ "id": "sprecher_brewing-winter_brew", "flags": 0i32, "expiration": 0, "cas": 245110669312, "rev": 1i32, "vbid": 29i32, "dtype": 1i32 }
+"sprecher_brewing-winter_brew"
+{ "name": "Yards Brawler", "abv": 4.3d, "ibu": 0.0d, "srm": 0.0d, "upc": 0, "type": "beer", "brewery_id": "yards_brewing", "updated": "2010-07-22 20:00:20", "description": "Boasting superior taste and champion flavor, the Brawler is crafted in the style of English session ales. This malt-forward, ruby colored ale is great for when you want to go a few rounds.", "style": "Classic English-Style Pale Ale", "category": "British Ale" }
+{ "id": "yards_b

<TRUNCATED>