You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@asterixdb.apache.org by im...@apache.org on 2015/08/20 04:27:49 UTC

[4/9] incubator-asterixdb git commit: Changed metadata storage format for nullable field types. Moved field name generation to the client out of metadata node code. Changed naming scheme for autogenerated types. Moved GroupName, CompactionPolicy & Compac

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataPrimaryIndexes.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataPrimaryIndexes.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataPrimaryIndexes.java
index c11e756..4539bca 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataPrimaryIndexes.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataPrimaryIndexes.java
@@ -76,64 +76,62 @@ public class MetadataPrimaryIndexes {
         }
 
         DATAVERSE_DATASET = new MetadataIndex("Dataverse", null, 2, new IAType[] { BuiltinType.ASTRING },
-                (Arrays.asList(Arrays.asList("DataverseName"))), 0, MetadataRecordTypes.DATAVERSE_RECORDTYPE, DATAVERSE_DATASET_ID,
-                true, new int[] { 0 });
+                (Arrays.asList(Arrays.asList("DataverseName"))), 0, MetadataRecordTypes.DATAVERSE_RECORDTYPE,
+                DATAVERSE_DATASET_ID, true, new int[] { 0 });
 
         DATASET_DATASET = new MetadataIndex("Dataset", null, 3,
-                new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING }, (Arrays.asList(Arrays.asList("DataverseName"),Arrays.asList(
-                        "DatasetName"))), 0, MetadataRecordTypes.DATASET_RECORDTYPE, DATASET_DATASET_ID, true,
-                new int[] { 0, 1 });
+                new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING }, (Arrays.asList(
+                        Arrays.asList("DataverseName"), Arrays.asList("DatasetName"))), 0,
+                MetadataRecordTypes.DATASET_RECORDTYPE, DATASET_DATASET_ID, true, new int[] { 0, 1 });
 
         DATATYPE_DATASET = new MetadataIndex("Datatype", null, 3, new IAType[] { BuiltinType.ASTRING,
-                BuiltinType.ASTRING }, (Arrays.asList(Arrays.asList("DataverseName"),Arrays.asList("DatatypeName" ))), 0,
-                MetadataRecordTypes.DATATYPE_RECORDTYPE, DATATYPE_DATASET_ID, true, new int[] { 0, 1 });
+                BuiltinType.ASTRING }, (Arrays.asList(Arrays.asList("DataverseName"), Arrays.asList("DatatypeName"))),
+                0, MetadataRecordTypes.DATATYPE_RECORDTYPE, DATATYPE_DATASET_ID, true, new int[] { 0, 1 });
 
         INDEX_DATASET = new MetadataIndex("Index", null, 4, new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING,
-                BuiltinType.ASTRING }, (Arrays.asList(Arrays.asList("DataverseName"),Arrays.asList("DatasetName"),Arrays.asList("IndexName"))), 0,
-                MetadataRecordTypes.INDEX_RECORDTYPE, INDEX_DATASET_ID, true, new int[] { 0, 1, 2 });
+                BuiltinType.ASTRING }, (Arrays.asList(Arrays.asList("DataverseName"), Arrays.asList("DatasetName"),
+                Arrays.asList("IndexName"))), 0, MetadataRecordTypes.INDEX_RECORDTYPE, INDEX_DATASET_ID, true,
+                new int[] { 0, 1, 2 });
 
-        NODE_DATASET = new MetadataIndex("Node", null, 2, new IAType[] { BuiltinType.ASTRING },
-                (Arrays.asList(Arrays.asList("NodeName"))), 0, MetadataRecordTypes.NODE_RECORDTYPE, NODE_DATASET_ID, true,
-                new int[] { 0 });
+        NODE_DATASET = new MetadataIndex("Node", null, 2, new IAType[] { BuiltinType.ASTRING }, (Arrays.asList(Arrays
+                .asList("NodeName"))), 0, MetadataRecordTypes.NODE_RECORDTYPE, NODE_DATASET_ID, true, new int[] { 0 });
 
         NODEGROUP_DATASET = new MetadataIndex("Nodegroup", null, 2, new IAType[] { BuiltinType.ASTRING },
-                (Arrays.asList(Arrays.asList("GroupName"))), 0, MetadataRecordTypes.NODEGROUP_RECORDTYPE, NODEGROUP_DATASET_ID, true,
-                new int[] { 0 });
+                (Arrays.asList(Arrays.asList("GroupName"))), 0, MetadataRecordTypes.NODEGROUP_RECORDTYPE,
+                NODEGROUP_DATASET_ID, true, new int[] { 0 });
 
         FUNCTION_DATASET = new MetadataIndex("Function", null, 4, new IAType[] { BuiltinType.ASTRING,
-                BuiltinType.ASTRING, BuiltinType.ASTRING }, (Arrays.asList(Arrays.asList("DataverseName"),Arrays.asList("Name"),Arrays.asList("Arity"))), 0,
-                MetadataRecordTypes.FUNCTION_RECORDTYPE, FUNCTION_DATASET_ID, true, new int[] { 0, 1, 2 });
+                BuiltinType.ASTRING, BuiltinType.ASTRING }, (Arrays.asList(Arrays.asList("DataverseName"),
+                Arrays.asList("Name"), Arrays.asList("Arity"))), 0, MetadataRecordTypes.FUNCTION_RECORDTYPE,
+                FUNCTION_DATASET_ID, true, new int[] { 0, 1, 2 });
 
         DATASOURCE_ADAPTER_DATASET = new MetadataIndex("DatasourceAdapter", null, 3, new IAType[] {
-                BuiltinType.ASTRING, BuiltinType.ASTRING }, (Arrays.asList(Arrays.asList("DataverseName"),Arrays.asList("Name"))), 0,
-                MetadataRecordTypes.DATASOURCE_ADAPTER_RECORDTYPE, DATASOURCE_ADAPTER_DATASET_ID, true, new int[] { 0,
-                        1 });
+                BuiltinType.ASTRING, BuiltinType.ASTRING }, (Arrays.asList(Arrays.asList("DataverseName"),
+                Arrays.asList("Name"))), 0, MetadataRecordTypes.DATASOURCE_ADAPTER_RECORDTYPE,
+                DATASOURCE_ADAPTER_DATASET_ID, true, new int[] { 0, 1 });
 
         FEED_DATASET = new MetadataIndex("Feed", null, 3, new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING },
-                (Arrays.asList(Arrays.asList("DataverseName"),Arrays.asList("FeedName"))), 0, MetadataRecordTypes.FEED_RECORDTYPE, FEED_DATASET_ID,
-                true, new int[] { 0, 1 });
-
-        FEED_ACTIVITY_DATASET = new MetadataIndex("FeedActivity", null, 5, new IAType[] { BuiltinType.ASTRING,
-                BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.AINT32 }, (Arrays.asList(Arrays.asList("DataverseName"),Arrays.asList(
-                "FeedName"),Arrays.asList("DatasetName"),Arrays.asList("ActivityId"))), 0, MetadataRecordTypes.FEED_ACTIVITY_RECORDTYPE,
-                FEED_ACTIVITY_DATASET_ID, true, new int[] { 0, 1, 2, 3 });
+                (Arrays.asList(Arrays.asList("DataverseName"), Arrays.asList("FeedName"))), 0,
+                MetadataRecordTypes.FEED_RECORDTYPE, FEED_DATASET_ID, true, new int[] { 0, 1 });
 
         LIBRARY_DATASET = new MetadataIndex("Library", null, 3,
-                new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING }, (Arrays.asList(Arrays.asList("DataverseName"),Arrays.asList("Name"))), 0,
+                new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING }, (Arrays.asList(
+                        Arrays.asList("DataverseName"), Arrays.asList("Name"))), 0,
                 MetadataRecordTypes.LIBRARY_RECORDTYPE, LIBRARY_DATASET_ID, true, new int[] { 0, 1 });
 
         FEED_POLICY_DATASET = new MetadataIndex("FeedPolicy", null, 3, new IAType[] { BuiltinType.ASTRING,
-                BuiltinType.ASTRING }, (Arrays.asList(Arrays.asList("DataverseName"),Arrays.asList("PolicyName"))), 0,
+                BuiltinType.ASTRING }, (Arrays.asList(Arrays.asList("DataverseName"), Arrays.asList("PolicyName"))), 0,
                 MetadataRecordTypes.FEED_POLICY_RECORDTYPE, FEED_POLICY_DATASET_ID, true, new int[] { 0, 1 });
 
         COMPACTION_POLICY_DATASET = new MetadataIndex("CompactionPolicy", null, 3, new IAType[] { BuiltinType.ASTRING,
-                BuiltinType.ASTRING }, (Arrays.asList(Arrays.asList("DataverseName"),Arrays.asList("CompactionPolicy"))), 0,
+                BuiltinType.ASTRING },
+                (Arrays.asList(Arrays.asList("DataverseName"), Arrays.asList("CompactionPolicy"))), 0,
                 MetadataRecordTypes.COMPACTION_POLICY_RECORDTYPE, COMPACTION_POLICY_DATASET_ID, true,
                 new int[] { 0, 1 });
-        
+
         EXTERNAL_FILE_DATASET = new MetadataIndex("ExternalFile", null, 4, new IAType[] { BuiltinType.ASTRING,
-                BuiltinType.ASTRING, BuiltinType.AINT32 },
-                (Arrays.asList(Arrays.asList("DataverseName"),Arrays.asList("DatasetName"),Arrays.asList("FileNumber"))), 0,
+                BuiltinType.ASTRING, BuiltinType.AINT32 }, (Arrays.asList(Arrays.asList("DataverseName"),
+                Arrays.asList("DatasetName"), Arrays.asList("FileNumber"))), 0,
                 MetadataRecordTypes.EXTERNAL_FILE_RECORDTYPE, EXTERNAL_FILE_DATASET_ID, true, new int[] { 0, 1, 2 });
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java
index b726ed6..573d340 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java
@@ -15,9 +15,6 @@
 
 package edu.uci.ics.asterix.metadata.bootstrap;
 
-import java.util.ArrayList;
-import java.util.List;
-
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.metadata.MetadataException;
 import edu.uci.ics.asterix.om.types.AOrderedListType;
@@ -53,7 +50,6 @@ public final class MetadataRecordTypes {
     public static ARecordType PRIMARY_FEED_DETAILS_RECORDTYPE;
     public static ARecordType SECONDARY_FEED_DETAILS_RECORDTYPE;
     public static ARecordType FEED_ADAPTER_CONFIGURATION_RECORDTYPE;
-    public static ARecordType FEED_ACTIVITY_RECORDTYPE;
     public static ARecordType FEED_POLICY_RECORDTYPE;
     public static ARecordType POLICY_PARAMS_RECORDTYPE;
     public static ARecordType LIBRARY_RECORDTYPE;
@@ -62,7 +58,8 @@ public final class MetadataRecordTypes {
 
     /**
      * Create all metadata record types.
-     * @throws HyracksDataException 
+     * 
+     * @throws HyracksDataException
      */
     public static void init() throws MetadataException, HyracksDataException {
         // Attention: The order of these calls is important because some types
@@ -103,6 +100,22 @@ public final class MetadataRecordTypes {
 
             EXTERNAL_FILE_RECORDTYPE = createExternalFileRecordType();
 
+            //generate nested type names
+            DATASET_RECORDTYPE.generateNestedDerivedTypeNames();
+            DATATYPE_RECORDTYPE.generateNestedDerivedTypeNames();
+            DATAVERSE_RECORDTYPE.generateNestedDerivedTypeNames();
+            INDEX_RECORDTYPE.generateNestedDerivedTypeNames();
+            NODE_RECORDTYPE.generateNestedDerivedTypeNames();
+            NODEGROUP_RECORDTYPE.generateNestedDerivedTypeNames();
+            FUNCTION_RECORDTYPE.generateNestedDerivedTypeNames();
+            DATASOURCE_ADAPTER_RECORDTYPE.generateNestedDerivedTypeNames();
+            FEED_RECORDTYPE.generateNestedDerivedTypeNames();
+            PRIMARY_FEED_DETAILS_RECORDTYPE.generateNestedDerivedTypeNames();
+            SECONDARY_FEED_DETAILS_RECORDTYPE.generateNestedDerivedTypeNames();
+            FEED_POLICY_RECORDTYPE.generateNestedDerivedTypeNames();
+            LIBRARY_RECORDTYPE.generateNestedDerivedTypeNames();
+            COMPACTION_POLICY_RECORDTYPE.generateNestedDerivedTypeNames();
+            EXTERNAL_FILE_RECORDTYPE.generateNestedDerivedTypeNames();
         } catch (AsterixException e) {
             throw new MetadataException(e);
         }
@@ -163,20 +176,14 @@ public final class MetadataRecordTypes {
     public static final int INTERNAL_DETAILS_ARECORD_PARTITIONSTRATEGY_FIELD_INDEX = 1;
     public static final int INTERNAL_DETAILS_ARECORD_PARTITIONKEY_FIELD_INDEX = 2;
     public static final int INTERNAL_DETAILS_ARECORD_PRIMARYKEY_FIELD_INDEX = 3;
-    public static final int INTERNAL_DETAILS_ARECORD_GROUPNAME_FIELD_INDEX = 4;
-    public static final int INTERNAL_DETAILS_ARECORD_AUTOGENERATED_FIELD_INDEX = 5;
-    public static final int INTERNAL_DETAILS_ARECORD_COMPACTION_POLICY_FIELD_INDEX = 6;
-    public static final int INTERNAL_DETAILS_ARECORD_COMPACTION_POLICY_PROPERTIES_FIELD_INDEX = 7;
+    public static final int INTERNAL_DETAILS_ARECORD_AUTOGENERATED_FIELD_INDEX = 4;
 
     private static final ARecordType createInternalDetailsRecordType() throws AsterixException {
         AOrderedListType olType = new AOrderedListType(BuiltinType.ASTRING, null);
         AOrderedListType ololType = new AOrderedListType(olType, null);
-        AOrderedListType compactionPolicyPropertyListType = new AOrderedListType(
-                COMPACTION_POLICY_PROPERTIES_RECORDTYPE, null);
-        String[] fieldNames = { "FileStructure", "PartitioningStrategy", "PartitioningKey", "PrimaryKey", "GroupName",
-                "Autogenerated", "CompactionPolicy", "CompactionPolicyProperties" };
-        IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, ololType, ololType, BuiltinType.ASTRING,
-                BuiltinType.ABOOLEAN, BuiltinType.ASTRING, compactionPolicyPropertyListType };
+        String[] fieldNames = { "FileStructure", "PartitioningStrategy", "PartitioningKey", "PrimaryKey",
+                "Autogenerated" };
+        IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, ololType, ololType, BuiltinType.ABOOLEAN };
         try {
             return new ARecordType(null, fieldNames, fieldTypes, true);
         } catch (HyracksDataException e) {
@@ -188,21 +195,16 @@ public final class MetadataRecordTypes {
     // external details.
     public static final int EXTERNAL_DETAILS_ARECORD_DATASOURCE_ADAPTER_FIELD_INDEX = 0;
     public static final int EXTERNAL_DETAILS_ARECORD_PROPERTIES_FIELD_INDEX = 1;
-    public static final int EXTERNAL_DETAILS_ARECORD_GROUPNAME_FIELD_INDEX = 2;
-    public static final int EXTERNAL_DETAILS_ARECORD_LAST_REFRESH_TIME_FIELD_INDEX = 3;
-    public static final int EXTERNAL_DETAILS_ARECORD_TRANSACTION_STATE_FIELD_INDEX = 4;
-    public static final int EXTERNAL_DETAILS_ARECORD_COMPACTION_POLICY_FIELD_INDEX = 5;
-    public static final int EXTERNAL_DETAILS_ARECORD_COMPACTION_POLICY_PROPERTIES_FIELD_INDEX = 6;
+    public static final int EXTERNAL_DETAILS_ARECORD_LAST_REFRESH_TIME_FIELD_INDEX = 2;
+    public static final int EXTERNAL_DETAILS_ARECORD_TRANSACTION_STATE_FIELD_INDEX = 3;
 
     private static final ARecordType createExternalDetailsRecordType() throws AsterixException {
 
         AOrderedListType orderedPropertyListType = new AOrderedListType(DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE, null);
         AOrderedListType compactionPolicyPropertyListType = new AOrderedListType(
                 COMPACTION_POLICY_PROPERTIES_RECORDTYPE, null);
-        String[] fieldNames = { "DatasourceAdapter", "Properties", "GroupName", "LastRefreshTime", "TransactionState",
-                "CompactionPolicy", "CompactionPolicyProperties" };
-        IAType[] fieldTypes = { BuiltinType.ASTRING, orderedPropertyListType, BuiltinType.ASTRING,
-                BuiltinType.ADATETIME, BuiltinType.AINT32, BuiltinType.ASTRING, compactionPolicyPropertyListType };
+        String[] fieldNames = { "DatasourceAdapter", "Properties", "LastRefreshTime", "TransactionState", };
+        IAType[] fieldTypes = { BuiltinType.ASTRING, orderedPropertyListType, BuiltinType.ADATETIME, BuiltinType.AINT32 };
         try {
             return new ARecordType(null, fieldNames, fieldTypes, true);
         } catch (HyracksDataException e) {
@@ -246,10 +248,7 @@ public final class MetadataRecordTypes {
                 "DatasourceAdapter", "Properties", "Function", "Status", "CompactionPolicy",
                 "CompactionPolicyProperties" };
 
-        List<IAType> feedFunctionUnionList = new ArrayList<IAType>();
-        feedFunctionUnionList.add(BuiltinType.ANULL);
-        feedFunctionUnionList.add(BuiltinType.ASTRING);
-        AUnionType feedFunctionUnion = new AUnionType(feedFunctionUnionList, null);
+        AUnionType feedFunctionUnion = AUnionType.createNullableType(BuiltinType.ASTRING);
 
         IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, orderedListType, orderedListType,
                 BuiltinType.ASTRING, BuiltinType.ASTRING, orderedListOfPropertiesType, feedFunctionUnion,
@@ -266,32 +265,32 @@ public final class MetadataRecordTypes {
     public static final int DATASET_ARECORD_DATASETNAME_FIELD_INDEX = 1;
     public static final int DATASET_ARECORD_DATATYPENAME_FIELD_INDEX = 2;
     public static final int DATASET_ARECORD_DATASETTYPE_FIELD_INDEX = 3;
-    public static final int DATASET_ARECORD_INTERNALDETAILS_FIELD_INDEX = 4;
-    public static final int DATASET_ARECORD_EXTERNALDETAILS_FIELD_INDEX = 5;
-    public static final int DATASET_ARECORD_HINTS_FIELD_INDEX = 6;
-    public static final int DATASET_ARECORD_TIMESTAMP_FIELD_INDEX = 7;
-    public static final int DATASET_ARECORD_DATASETID_FIELD_INDEX = 8;
-    public static final int DATASET_ARECORD_PENDINGOP_FIELD_INDEX = 9;
+    public static final int DATASET_ARECORD_GROUPNAME_FIELD_INDEX = 4;
+    public static final int DATASET_ARECORD_COMPACTION_POLICY_FIELD_INDEX = 5;
+    public static final int DATASET_ARECORD_COMPACTION_POLICY_PROPERTIES_FIELD_INDEX = 6;
+    public static final int DATASET_ARECORD_INTERNALDETAILS_FIELD_INDEX = 7;
+    public static final int DATASET_ARECORD_EXTERNALDETAILS_FIELD_INDEX = 8;
+    public static final int DATASET_ARECORD_HINTS_FIELD_INDEX = 9;
+    public static final int DATASET_ARECORD_TIMESTAMP_FIELD_INDEX = 10;
+    public static final int DATASET_ARECORD_DATASETID_FIELD_INDEX = 11;
+    public static final int DATASET_ARECORD_PENDINGOP_FIELD_INDEX = 12;
 
     private static final ARecordType createDatasetRecordType() throws AsterixException {
-        String[] fieldNames = { "DataverseName", "DatasetName", "DataTypeName", "DatasetType", "InternalDetails",
-                "ExternalDetails", "Hints", "Timestamp", "DatasetId", "PendingOp" };
+        String[] fieldNames = { "DataverseName", "DatasetName", "DatatypeName", "DatasetType", "GroupName",
+                "CompactionPolicy", "CompactionPolicyProperties", "InternalDetails", "ExternalDetails", "Hints",
+                "Timestamp", "DatasetId", "PendingOp" };
 
-        List<IAType> internalRecordUnionList = new ArrayList<IAType>();
-        internalRecordUnionList.add(BuiltinType.ANULL);
-        internalRecordUnionList.add(INTERNAL_DETAILS_RECORDTYPE);
-        AUnionType internalRecordUnion = new AUnionType(internalRecordUnionList, null);
-
-        List<IAType> externalRecordUnionList = new ArrayList<IAType>();
-        externalRecordUnionList.add(BuiltinType.ANULL);
-        externalRecordUnionList.add(EXTERNAL_DETAILS_RECORDTYPE);
-        AUnionType externalRecordUnion = new AUnionType(externalRecordUnionList, null);
+        AUnionType internalRecordUnion = AUnionType.createNullableType(INTERNAL_DETAILS_RECORDTYPE);
+        AUnionType externalRecordUnion = AUnionType.createNullableType(EXTERNAL_DETAILS_RECORDTYPE);
+        AOrderedListType compactionPolicyPropertyListType = new AOrderedListType(
+                COMPACTION_POLICY_PROPERTIES_RECORDTYPE, null);
 
         AUnorderedListType unorderedListOfHintsType = new AUnorderedListType(DATASET_HINTS_RECORDTYPE, null);
 
         IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
-                internalRecordUnion, externalRecordUnion, unorderedListOfHintsType, BuiltinType.ASTRING,
-                BuiltinType.AINT32, BuiltinType.AINT32 };
+                BuiltinType.ASTRING, BuiltinType.ASTRING, compactionPolicyPropertyListType, internalRecordUnion,
+                externalRecordUnion, unorderedListOfHintsType, BuiltinType.ASTRING, BuiltinType.AINT32,
+                BuiltinType.AINT32 };
         try {
             return new ARecordType("DatasetRecordType", fieldNames, fieldTypes, true);
         } catch (HyracksDataException e) {
@@ -303,10 +302,11 @@ public final class MetadataRecordTypes {
     // field type.
     public static final int FIELD_ARECORD_FIELDNAME_FIELD_INDEX = 0;
     public static final int FIELD_ARECORD_FIELDTYPE_FIELD_INDEX = 1;
+    public static final int FIELD_ARECORD_ISNULLABLE_FIELD_INDEX = 2;
 
     private static final ARecordType createFieldRecordType() throws AsterixException {
-        String[] fieldNames = { "FieldName", "FieldType" };
-        IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING };
+        String[] fieldNames = { "FieldName", "FieldType", "IsNullable" };
+        IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ABOOLEAN };
         try {
             return new ARecordType(null, fieldNames, fieldTypes, true);
         } catch (HyracksDataException e) {
@@ -334,31 +334,17 @@ public final class MetadataRecordTypes {
     // derived type.
     public static final int DERIVEDTYPE_ARECORD_TAG_FIELD_INDEX = 0;
     public static final int DERIVEDTYPE_ARECORD_ISANONYMOUS_FIELD_INDEX = 1;
-    public static final int DERIVEDTYPE_ARECORD_ENUMVALUES_FIELD_INDEX = 2;
-    public static final int DERIVEDTYPE_ARECORD_RECORD_FIELD_INDEX = 3;
-    public static final int DERIVEDTYPE_ARECORD_UNION_FIELD_INDEX = 4;
-    public static final int DERIVEDTYPE_ARECORD_UNORDEREDLIST_FIELD_INDEX = 5;
-    public static final int DERIVEDTYPE_ARECORD_ORDEREDLIST_FIELD_INDEX = 6;
+    public static final int DERIVEDTYPE_ARECORD_RECORD_FIELD_INDEX = 2;
+    public static final int DERIVEDTYPE_ARECORD_UNORDEREDLIST_FIELD_INDEX = 3;
+    public static final int DERIVEDTYPE_ARECORD_ORDEREDLIST_FIELD_INDEX = 4;
 
     private static final ARecordType createDerivedTypeRecordType() throws AsterixException {
-        String[] fieldNames = { "Tag", "IsAnonymous", "EnumValues", "Record", "Union", "UnorderedList", "OrderedList" };
-        List<IAType> recordUnionList = new ArrayList<IAType>();
-        recordUnionList.add(BuiltinType.ANULL);
-        recordUnionList.add(RECORD_RECORDTYPE);
-        AUnionType recordUnion = new AUnionType(recordUnionList, null);
-
-        List<IAType> unionUnionList = new ArrayList<IAType>();
-        unionUnionList.add(BuiltinType.ANULL);
-        unionUnionList.add(new AOrderedListType(BuiltinType.ASTRING, null));
-        AUnionType unionUnion = new AUnionType(unionUnionList, null);
-
-        List<IAType> collectionUnionList = new ArrayList<IAType>();
-        collectionUnionList.add(BuiltinType.ANULL);
-        collectionUnionList.add(BuiltinType.ASTRING);
-        AUnionType collectionUnion = new AUnionType(collectionUnionList, null);
-
-        IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ABOOLEAN, unionUnion, recordUnion, unionUnion,
-                collectionUnion, collectionUnion };
+        String[] fieldNames = { "Tag", "IsAnonymous", "Record", "UnorderedList", "OrderedList" };
+        AUnionType recordUnion = AUnionType.createNullableType(RECORD_RECORDTYPE);
+        AUnionType collectionUnion = AUnionType.createNullableType(BuiltinType.ASTRING);
+
+        IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ABOOLEAN, recordUnion, collectionUnion,
+                collectionUnion };
         try {
             return new ARecordType(null, fieldNames, fieldTypes, true);
         } catch (HyracksDataException e) {
@@ -375,10 +361,7 @@ public final class MetadataRecordTypes {
 
     private static final ARecordType createDatatypeRecordType() throws AsterixException {
         String[] fieldNames = { "DataverseName", "DatatypeName", "Derived", "Timestamp" };
-        List<IAType> recordUnionList = new ArrayList<IAType>();
-        recordUnionList.add(BuiltinType.ANULL);
-        recordUnionList.add(DERIVEDTYPE_RECORDTYPE);
-        AUnionType recordUnion = new AUnionType(recordUnionList, null);
+        AUnionType recordUnion = AUnionType.createNullableType(DERIVEDTYPE_RECORDTYPE);
         IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, recordUnion, BuiltinType.ASTRING };
         try {
             return new ARecordType("DatatypeRecordType", fieldNames, fieldTypes, true);
@@ -500,7 +483,6 @@ public final class MetadataRecordTypes {
     public static final int FEED_ACTIVITY_ARECORD_ACTIVITY_TYPE_FIELD_INDEX = 4;
     public static final int FEED_ACTIVITY_ARECORD_DETAILS_FIELD_INDEX = 5;
     public static final int FEED_ACTIVITY_ARECORD_LAST_UPDATE_TIMESTAMP_FIELD_INDEX = 6;
-   
 
     public static final int FEED_ARECORD_DATAVERSE_NAME_FIELD_INDEX = 0;
     public static final int FEED_ARECORD_FEED_NAME_FIELD_INDEX = 1;
@@ -510,7 +492,6 @@ public final class MetadataRecordTypes {
     public static final int FEED_ARECORD_SECONDARY_TYPE_DETAILS_FIELD_INDEX = 5;
     public static final int FEED_ARECORD_TIMESTAMP_FIELD_INDEX = 6;
 
-    
     public static final int FEED_ARECORD_PRIMARY_FIELD_DETAILS_ADAPTOR_NAME_FIELD_INDEX = 0;
     public static final int FEED_ARECORD_PRIMARY_FIELD_DETAILS_ADAPTOR_CONFIGURATION_FIELD_INDEX = 1;
 
@@ -518,20 +499,9 @@ public final class MetadataRecordTypes {
 
     private static ARecordType createFeedRecordType() throws AsterixException, HyracksDataException {
 
-        List<IAType> feedFunctionUnionList = new ArrayList<IAType>();
-        feedFunctionUnionList.add(BuiltinType.ANULL);
-        feedFunctionUnionList.add(BuiltinType.ASTRING);
-        AUnionType feedFunctionUnion = new AUnionType(feedFunctionUnionList, null);
-
-        List<IAType> primaryFeedTypeDetailsRecordUnionList = new ArrayList<IAType>();
-        primaryFeedTypeDetailsRecordUnionList.add(BuiltinType.ANULL);
-        primaryFeedTypeDetailsRecordUnionList.add(PRIMARY_FEED_DETAILS_RECORDTYPE);
-        AUnionType primaryRecordUnion = new AUnionType(primaryFeedTypeDetailsRecordUnionList, null);
-
-        List<IAType> secondaryFeedTypeDetailsRecordUnionList = new ArrayList<IAType>();
-        secondaryFeedTypeDetailsRecordUnionList.add(BuiltinType.ANULL);
-        secondaryFeedTypeDetailsRecordUnionList.add(SECONDARY_FEED_DETAILS_RECORDTYPE);
-        AUnionType secondaryRecordUnion = new AUnionType(secondaryFeedTypeDetailsRecordUnionList, null);
+        AUnionType feedFunctionUnion = AUnionType.createNullableType(BuiltinType.ASTRING);
+        AUnionType primaryRecordUnion = AUnionType.createNullableType(PRIMARY_FEED_DETAILS_RECORDTYPE);
+        AUnionType secondaryRecordUnion = AUnionType.createNullableType(SECONDARY_FEED_DETAILS_RECORDTYPE);
 
         String[] fieldNames = { "DataverseName", "FeedName", "Function", "FeedType", "PrimaryTypeDetails",
                 "SecondaryTypeDetails", "Timestamp" };
@@ -543,7 +513,7 @@ public final class MetadataRecordTypes {
 
     public static final int FEED_TYPE_PRIMARY_ARECORD_ADAPTER_NAME_FIELD_INDEX = 0;
     public static final int FEED_TYPE_PRIMARY_ARECORD_ADAPTER_CONFIGURATION_FIELD_INDEX = 1;
-    
+
     private static final ARecordType createPrimaryFeedDetailsRecordType() throws AsterixException, HyracksDataException {
         AUnorderedListType unorderedAdaptorPropertyListType = new AUnorderedListType(
                 DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE, null);
@@ -555,12 +525,13 @@ public final class MetadataRecordTypes {
 
     public static final int FEED_TYPE_SECONDARY_ARECORD_SOURCE_FEED_NAME_FIELD_INDEX = 0;
 
-    private static final ARecordType createSecondaryFeedDetailsRecordType() throws AsterixException, HyracksDataException {
+    private static final ARecordType createSecondaryFeedDetailsRecordType() throws AsterixException,
+            HyracksDataException {
         String[] fieldNames = { "SourceFeedName" };
         IAType[] fieldTypes = { BuiltinType.ASTRING };
         return new ARecordType(null, fieldNames, fieldTypes, true);
     }
-    
+
     public static final int LIBRARY_ARECORD_DATAVERSENAME_FIELD_INDEX = 0;
     public static final int LIBRARY_ARECORD_NAME_FIELD_INDEX = 1;
     public static final int LIBRARY_ARECORD_TIMESTAMP_FIELD_INDEX = 2;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataSecondaryIndexes.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataSecondaryIndexes.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataSecondaryIndexes.java
index f977998..292f24c 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataSecondaryIndexes.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataSecondaryIndexes.java
@@ -45,18 +45,19 @@ public class MetadataSecondaryIndexes {
         }
 
         GROUPNAME_ON_DATASET_INDEX = new MetadataIndex("Dataset", "GroupName", 3, new IAType[] { BuiltinType.ASTRING,
-                BuiltinType.ASTRING, BuiltinType.ASTRING },
-                (Arrays.asList(Arrays.asList("GroupName"),Arrays.asList("DataverseName"),Arrays.asList("DatasetName"))), 1, null,
+                BuiltinType.ASTRING, BuiltinType.ASTRING }, (Arrays.asList(Arrays.asList("GroupName"),
+                Arrays.asList("DataverseName"), Arrays.asList("DatasetName"))), 1, null,
                 MetadataPrimaryIndexes.DATASET_DATASET_ID, false, new int[] { 1, 2 });
 
         DATATYPENAME_ON_DATASET_INDEX = new MetadataIndex("Dataset", "DatatypeName", 3, new IAType[] {
-                BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING }, (Arrays.asList(Arrays.asList("DataverseName"),Arrays.asList(
-                "DatatypeName"),Arrays.asList("DatasetName"))), 2, null, MetadataPrimaryIndexes.DATASET_DATASET_ID, false, new int[] {
-                0, 2 });
+                BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING }, (Arrays.asList(
+                Arrays.asList("DataverseName"), Arrays.asList("DatatypeName"), Arrays.asList("DatasetName"))), 2, null,
+                MetadataPrimaryIndexes.DATASET_DATASET_ID, false, new int[] { 0, 2 });
 
         DATATYPENAME_ON_DATATYPE_INDEX = new MetadataIndex("Datatype", "DatatypeName", 3, new IAType[] {
-                BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING }, (Arrays.asList(Arrays.asList("DataverseName"),Arrays.asList(
-                "NestedDatatypeName"),Arrays.asList("TopDatatypeName"))), 2, null, MetadataPrimaryIndexes.DATATYPE_DATASET_ID, false,
+                BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING },
+                (Arrays.asList(Arrays.asList("DataverseName"), Arrays.asList("NestedDatatypeName"),
+                        Arrays.asList("TopDatatypeName"))), 2, null, MetadataPrimaryIndexes.DATATYPE_DATASET_ID, false,
                 new int[] { 0, 2 });
 
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlCompiledMetadataDeclarations.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlCompiledMetadataDeclarations.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlCompiledMetadataDeclarations.java
index 8b7d8dd..1878bbc 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlCompiledMetadataDeclarations.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlCompiledMetadataDeclarations.java
@@ -34,7 +34,6 @@ import edu.uci.ics.asterix.metadata.entities.Dataset;
 import edu.uci.ics.asterix.metadata.entities.Datatype;
 import edu.uci.ics.asterix.metadata.entities.Dataverse;
 import edu.uci.ics.asterix.metadata.entities.Index;
-import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
 import edu.uci.ics.asterix.metadata.entities.NodeGroup;
 import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
@@ -225,10 +224,9 @@ public class AqlCompiledMetadataDeclarations {
         if (dataset.getDatasetType() != DatasetType.INTERNAL) {
             throw new AlgebricksException("Not an internal dataset");
         }
-        InternalDatasetDetails datasetDetails = (InternalDatasetDetails) dataset.getDatasetDetails();
-        List<String> nodeGroup = findNodeGroupNodeNames(datasetDetails.getNodeGroupName());
+        List<String> nodeGroup = findNodeGroupNodeNames(dataset.getNodeGroupName());
         if (nodeGroup == null) {
-            throw new AlgebricksException("Couldn't find node group " + datasetDetails.getNodeGroupName());
+            throw new AlgebricksException("Couldn't find node group " + dataset.getNodeGroupName());
         }
 
         List<FileSplit> splitArray = new ArrayList<FileSplit>();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
index 0ec098d..7162a0e 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
@@ -56,7 +56,6 @@ import edu.uci.ics.asterix.formats.base.IDataFormat;
 import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
 import edu.uci.ics.asterix.formats.nontagged.AqlLinearizeComparatorFactoryProvider;
 import edu.uci.ics.asterix.formats.nontagged.AqlTypeTraitProvider;
-import edu.uci.ics.asterix.metadata.IDatasetDetails;
 import edu.uci.ics.asterix.metadata.MetadataException;
 import edu.uci.ics.asterix.metadata.MetadataManager;
 import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
@@ -378,110 +377,111 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
         }
     }
 
-@SuppressWarnings("rawtypes")
-public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildFeedCollectRuntime(JobSpecification jobSpec,
-        IDataSource<AqlSourceId> dataSource) throws AlgebricksException {
-
-    FeedDataSource feedDataSource = (FeedDataSource) dataSource;
-    FeedCollectOperatorDescriptor feedCollector = null;
+    @SuppressWarnings("rawtypes")
+    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildFeedCollectRuntime(JobSpecification jobSpec,
+            IDataSource<AqlSourceId> dataSource) throws AlgebricksException {
 
-    try {
-        ARecordType feedOutputType = (ARecordType) feedDataSource.getItemType();
-        ISerializerDeserializer payloadSerde = NonTaggedDataFormat.INSTANCE.getSerdeProvider()
-                .getSerializerDeserializer(feedOutputType);
-        RecordDescriptor feedDesc = new RecordDescriptor(new ISerializerDeserializer[] { payloadSerde });
+        FeedDataSource feedDataSource = (FeedDataSource) dataSource;
+        FeedCollectOperatorDescriptor feedCollector = null;
 
-        FeedPolicy feedPolicy = (FeedPolicy) ((AqlDataSource) dataSource).getProperties().get(
-                BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY);
-        if (feedPolicy == null) {
-            throw new AlgebricksException("Feed not configured with a policy");
-        }
-        feedPolicy.getProperties().put(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY, feedPolicy.getPolicyName());
-        FeedConnectionId feedConnectionId = new FeedConnectionId(feedDataSource.getId().getDataverseName(),
-                feedDataSource.getId().getDatasourceName(), feedDataSource.getTargetDataset());
-        feedCollector = new FeedCollectOperatorDescriptor(jobSpec, feedConnectionId,
-                feedDataSource.getSourceFeedId(), (ARecordType) feedOutputType, feedDesc,
-                feedPolicy.getProperties(), feedDataSource.getLocation());
+        try {
+            ARecordType feedOutputType = (ARecordType) feedDataSource.getItemType();
+            ISerializerDeserializer payloadSerde = NonTaggedDataFormat.INSTANCE.getSerdeProvider()
+                    .getSerializerDeserializer(feedOutputType);
+            RecordDescriptor feedDesc = new RecordDescriptor(new ISerializerDeserializer[] { payloadSerde });
+
+            FeedPolicy feedPolicy = (FeedPolicy) ((AqlDataSource) dataSource).getProperties().get(
+                    BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY);
+            if (feedPolicy == null) {
+                throw new AlgebricksException("Feed not configured with a policy");
+            }
+            feedPolicy.getProperties().put(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY, feedPolicy.getPolicyName());
+            FeedConnectionId feedConnectionId = new FeedConnectionId(feedDataSource.getId().getDataverseName(),
+                    feedDataSource.getId().getDatasourceName(), feedDataSource.getTargetDataset());
+            feedCollector = new FeedCollectOperatorDescriptor(jobSpec, feedConnectionId,
+                    feedDataSource.getSourceFeedId(), (ARecordType) feedOutputType, feedDesc,
+                    feedPolicy.getProperties(), feedDataSource.getLocation());
 
-        return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(feedCollector,
-                determineLocationConstraint(feedDataSource));
+            return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(feedCollector,
+                    determineLocationConstraint(feedDataSource));
 
-    } catch (Exception e) {
-        throw new AlgebricksException(e);
+        } catch (Exception e) {
+            throw new AlgebricksException(e);
+        }
     }
-}
 
-private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDataSource feedDataSource)
-        throws AsterixException {
-    String[] locationArray = null;
-    String locations = null;;
-    switch (feedDataSource.getSourceFeedType()) {
-        case PRIMARY:
-            switch (feedDataSource.getLocation()) {
-                case SOURCE_FEED_COMPUTE_STAGE:
-                    if (feedDataSource.getFeed().getFeedId().equals(feedDataSource.getSourceFeedId())) {
+    private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDataSource feedDataSource)
+            throws AsterixException {
+        String[] locationArray = null;
+        String locations = null;;
+        switch (feedDataSource.getSourceFeedType()) {
+            case PRIMARY:
+                switch (feedDataSource.getLocation()) {
+                    case SOURCE_FEED_COMPUTE_STAGE:
+                        if (feedDataSource.getFeed().getFeedId().equals(feedDataSource.getSourceFeedId())) {
+                            locationArray = feedDataSource.getLocations();
+                        } else {
+                            Collection<FeedActivity> activities = centralFeedManager.getFeedLoadManager()
+                                    .getFeedActivities();
+                            Iterator<FeedActivity> it = activities.iterator();
+                            FeedActivity activity = null;
+                            while (it.hasNext()) {
+                                activity = it.next();
+                                if (activity.getDataverseName().equals(feedDataSource.getSourceFeedId().getDataverse())
+                                        && activity.getFeedName()
+                                                .equals(feedDataSource.getSourceFeedId().getFeedName())) {
+                                    locations = activity.getFeedActivityDetails().get(
+                                            FeedActivityDetails.COMPUTE_LOCATIONS);
+                                    locationArray = locations.split(",");
+                                    break;
+                                }
+                            }
+                        }
+                        break;
+                    case SOURCE_FEED_INTAKE_STAGE:
                         locationArray = feedDataSource.getLocations();
-                    } else {
-                        Collection<FeedActivity> activities = centralFeedManager.getFeedLoadManager()
-                                .getFeedActivities();
-                        Iterator<FeedActivity> it = activities.iterator();
-                        FeedActivity activity = null;
-                        while (it.hasNext()) {
-                            activity = it.next();
-                            if (activity.getDataverseName().equals(feedDataSource.getSourceFeedId().getDataverse())
-                                    && activity.getFeedName()
-                                            .equals(feedDataSource.getSourceFeedId().getFeedName())) {
-                                locations = activity.getFeedActivityDetails().get(
-                                        FeedActivityDetails.COMPUTE_LOCATIONS);
-                                locationArray = locations.split(",");
+                        break;
+                }
+                break;
+            case SECONDARY:
+                Collection<FeedActivity> activities = centralFeedManager.getFeedLoadManager().getFeedActivities();
+                Iterator<FeedActivity> it = activities.iterator();
+                FeedActivity activity = null;
+                while (it.hasNext()) {
+                    activity = it.next();
+                    if (activity.getDataverseName().equals(feedDataSource.getSourceFeedId().getDataverse())
+                            && activity.getFeedName().equals(feedDataSource.getSourceFeedId().getFeedName())) {
+                        switch (feedDataSource.getLocation()) {
+                            case SOURCE_FEED_INTAKE_STAGE:
+                                locations = activity.getFeedActivityDetails()
+                                        .get(FeedActivityDetails.COLLECT_LOCATIONS);
+                                break;
+                            case SOURCE_FEED_COMPUTE_STAGE:
+                                locations = activity.getFeedActivityDetails()
+                                        .get(FeedActivityDetails.COMPUTE_LOCATIONS);
                                 break;
-                            }
                         }
+                        break;
                     }
-                    break;
-                case SOURCE_FEED_INTAKE_STAGE:
-                    locationArray = feedDataSource.getLocations();
-                    break;
-            }
-            break;
-        case SECONDARY:
-            Collection<FeedActivity> activities = centralFeedManager.getFeedLoadManager().getFeedActivities();
-            Iterator<FeedActivity> it = activities.iterator();
-            FeedActivity activity = null;
-            while (it.hasNext()) {
-                activity = it.next();
-                if (activity.getDataverseName().equals(feedDataSource.getSourceFeedId().getDataverse())
-                        && activity.getFeedName().equals(feedDataSource.getSourceFeedId().getFeedName())) {
-                    switch (feedDataSource.getLocation()) {
-                        case SOURCE_FEED_INTAKE_STAGE:
-                            locations = activity.getFeedActivityDetails()
-                                    .get(FeedActivityDetails.COLLECT_LOCATIONS);
-                            break;
-                        case SOURCE_FEED_COMPUTE_STAGE:
-                            locations = activity.getFeedActivityDetails()
-                                    .get(FeedActivityDetails.COMPUTE_LOCATIONS);
-                            break;
-                    }
-                    break;
                 }
-            }
 
-            if (locations != null) {
-                locationArray = locations.split(",");
-            } else {
-                String message = "Unable to discover location(s) for source feed data hand-off "
-                        + feedDataSource.getSourceFeedId();
-                if (LOGGER.isLoggable(Level.SEVERE)) {
-                    LOGGER.severe(message);
+                if (locations != null) {
+                    locationArray = locations.split(",");
+                } else {
+                    String message = "Unable to discover location(s) for source feed data hand-off "
+                            + feedDataSource.getSourceFeedId();
+                    if (LOGGER.isLoggable(Level.SEVERE)) {
+                        LOGGER.severe(message);
+                    }
+                    throw new AsterixException(message);
                 }
-                throw new AsterixException(message);
-            }
-            break;
+                break;
+        }
+        AlgebricksAbsolutePartitionConstraint locationConstraint = new AlgebricksAbsolutePartitionConstraint(
+                locationArray);
+        return locationConstraint;
     }
-    AlgebricksAbsolutePartitionConstraint locationConstraint = new AlgebricksAbsolutePartitionConstraint(
-            locationArray);
-    return locationConstraint;
-}
+
     private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildLoadableDatasetScan(JobSpecification jobSpec,
             LoadableDataSource alds, IAdapterFactory adapterFactory, RecordDescriptor rDesc, boolean isPKAutoGenerated,
             List<List<String>> primaryKeys, ARecordType recType, int pkIndex) throws AlgebricksException {
@@ -545,8 +545,8 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
     }
 
     private IAdapterFactory getConfiguredAdapterFactory(Dataset dataset, String adapterName,
-            Map<String, String> configuration, IAType itemType, boolean isPKAutoGenerated, List<List<String>> primaryKeys)
-            throws AlgebricksException {
+            Map<String, String> configuration, IAType itemType, boolean isPKAutoGenerated,
+            List<List<String>> primaryKeys) throws AlgebricksException {
         IAdapterFactory adapterFactory;
         DatasourceAdapter adapterEntity;
         String adapterFactoryClassname;
@@ -583,8 +583,8 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
                 // TODO Check this call, result of merge from master! 
                 //  ((IGenericAdapterFactory) adapterFactory).setFiles(files);
             }
-            
-           return adapterFactory; 
+
+            return adapterFactory;
         } catch (Exception e) {
             throw new AlgebricksException("Unable to create adapter " + e);
         }
@@ -592,7 +592,7 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
 
     public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildExternalDatasetDataScannerRuntime(
             JobSpecification jobSpec, IAType itemType, IAdapterFactory adapterFactory, IDataFormat format)
-                    throws AlgebricksException {
+            throws AlgebricksException {
         if (itemType.getTypeTag() != ATypeTag.RECORD) {
             throw new AlgebricksException("Can only scan datasets of records.");
         }
@@ -663,7 +663,6 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
         return new Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, IFeedAdapterFactory>(feedIngestor,
                 partitionConstraint, adapterFactory);
     }
-   
 
     public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildBtreeRuntime(JobSpecification jobSpec,
             List<LogicalVariable> outputVars, IOperatorSchema opSchema, IVariableTypeEnvironment typeEnv,
@@ -775,12 +774,12 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
                                 new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()), compactionInfo.first,
                                 compactionInfo.second, isSecondary ? new SecondaryIndexOperationTrackerProvider(
                                         dataset.getDatasetId()) : new PrimaryIndexOperationTrackerProvider(
-                                                dataset.getDatasetId()), rtcProvider,
-                                                LSMBTreeIOOperationCallbackFactory.INSTANCE,
-                                                storageProperties.getBloomFilterFalsePositiveRate(), !isSecondary, filterTypeTraits,
-                                                filterCmpFactories, btreeFields, filterFields, !temp), retainInput, retainNull,
-                                                context.getNullWriterFactory(), searchCallbackFactory, minFilterFieldIndexes,
-                                                maxFilterFieldIndexes);
+                                        dataset.getDatasetId()), rtcProvider,
+                                LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                                storageProperties.getBloomFilterFalsePositiveRate(), !isSecondary, filterTypeTraits,
+                                filterCmpFactories, btreeFields, filterFields, !temp), retainInput, retainNull,
+                        context.getNullWriterFactory(), searchCallbackFactory, minFilterFieldIndexes,
+                        maxFilterFieldIndexes);
             } else {
                 // External dataset <- use the btree with buddy btree->
                 // Be Careful of Key Start Index ?
@@ -788,9 +787,9 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
                 ExternalBTreeWithBuddyDataflowHelperFactory indexDataflowHelperFactory = new ExternalBTreeWithBuddyDataflowHelperFactory(
                         compactionInfo.first, compactionInfo.second, new SecondaryIndexOperationTrackerProvider(
                                 dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                                LSMBTreeWithBuddyIOOperationCallbackFactory.INSTANCE, getStorageProperties()
+                        LSMBTreeWithBuddyIOOperationCallbackFactory.INSTANCE, getStorageProperties()
                                 .getBloomFilterFalsePositiveRate(), buddyBreeFields,
-                                ExternalDatasetsRegistry.INSTANCE.getAndLockDatasetVersion(dataset, this), !temp);
+                        ExternalDatasetsRegistry.INSTANCE.getAndLockDatasetVersion(dataset, this), !temp);
                 btreeSearchOp = new ExternalBTreeSearchOperatorDescriptor(jobSpec, outputRecDesc, rtcProvider,
                         rtcProvider, spPc.first, typeTraits, comparatorFactories, bloomFilterKeyFields, lowKeyFields,
                         highKeyFields, lowKeyInclusive, highKeyInclusive, indexDataflowHelperFactory, retainInput,
@@ -892,12 +891,12 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
                                 new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()), compactionInfo.first,
                                 compactionInfo.second, new SecondaryIndexOperationTrackerProvider(
                                         dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                                        LSMRTreeIOOperationCallbackFactory.INSTANCE, proposeLinearizer(
-                                                nestedKeyType.getTypeTag(), comparatorFactories.length),
-                                                storageProperties.getBloomFilterFalsePositiveRate(), rtreeFields, btreeFields,
-                                                filterTypeTraits, filterCmpFactories, filterFields, !temp), retainInput, retainNull,
-                                                context.getNullWriterFactory(), searchCallbackFactory, minFilterFieldIndexes,
-                                                maxFilterFieldIndexes);
+                                LSMRTreeIOOperationCallbackFactory.INSTANCE, proposeLinearizer(
+                                        nestedKeyType.getTypeTag(), comparatorFactories.length),
+                                storageProperties.getBloomFilterFalsePositiveRate(), rtreeFields, btreeFields,
+                                filterTypeTraits, filterCmpFactories, filterFields, !temp), retainInput, retainNull,
+                        context.getNullWriterFactory(), searchCallbackFactory, minFilterFieldIndexes,
+                        maxFilterFieldIndexes);
 
             } else {
                 // External Dataset
@@ -995,7 +994,8 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
         IAType itemType = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, aqlId.getDataverseName(), tName).getDatatype();
         AqlDataSourceType datasourceType = dataset.getDatasetType().equals(DatasetType.EXTERNAL) ? AqlDataSourceType.EXTERNAL_DATASET
                 : AqlDataSourceType.INTERNAL_DATASET;
-        return new DatasetDataSource(aqlId, aqlId.getDataverseName(), aqlId.getDatasourceName(), itemType, datasourceType);
+        return new DatasetDataSource(aqlId, aqlId.getDataverseName(), aqlId.getDatasourceName(), itemType,
+                datasourceType);
     }
 
     @Override
@@ -1092,9 +1092,9 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
                     new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
                             compactionInfo.first, compactionInfo.second, new PrimaryIndexOperationTrackerProvider(
                                     dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                                    LSMBTreeIOOperationCallbackFactory.INSTANCE,
-                                    storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits,
-                                    filterCmpFactories, btreeFields, filterFields, !temp));
+                            LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                            storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits,
+                            filterCmpFactories, btreeFields, filterFields, !temp));
             return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(btreeBulkLoad,
                     splitsAndConstraint.second);
         } catch (MetadataException me) {
@@ -1106,7 +1106,7 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
             IDataSource<AqlSourceId> dataSource, IOperatorSchema propagatedSchema, IVariableTypeEnvironment typeEnv,
             List<LogicalVariable> keys, LogicalVariable payload, List<LogicalVariable> additionalNonKeyFields,
             RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec, boolean bulkload)
-                    throws AlgebricksException {
+            throws AlgebricksException {
 
         String datasetName = dataSource.getId().getDatasourceName();
         Dataset dataset = findDataset(dataSource.getId().getDataverseName(), datasetName);
@@ -1169,30 +1169,30 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
             TransactionSubsystemProvider txnSubsystemProvider = new TransactionSubsystemProvider();
             IModificationOperationCallbackFactory modificationCallbackFactory = temp ? new TempDatasetPrimaryIndexModificationOperationCallbackFactory(
                     jobId, datasetId, primaryKeyFields, txnSubsystemProvider, indexOp, ResourceType.LSM_BTREE)
-            : new PrimaryIndexModificationOperationCallbackFactory(jobId, datasetId, primaryKeyFields,
-                    txnSubsystemProvider, indexOp, ResourceType.LSM_BTREE);
+                    : new PrimaryIndexModificationOperationCallbackFactory(jobId, datasetId, primaryKeyFields,
+                            txnSubsystemProvider, indexOp, ResourceType.LSM_BTREE);
 
-                    Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(
-                            dataset, mdTxnCtx);
-                    IIndexDataflowHelperFactory idfh = new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
-                            datasetId), compactionInfo.first, compactionInfo.second, new PrimaryIndexOperationTrackerProvider(
-                                    dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                                    LSMBTreeIOOperationCallbackFactory.INSTANCE, storageProperties.getBloomFilterFalsePositiveRate(),
-                                    true, filterTypeTraits, filterCmpFactories, btreeFields, filterFields, !temp);
-                    IOperatorDescriptor op;
-                    if (bulkload) {
-                        long numElementsHint = getCardinalityPerPartitionHint(dataset);
-                        op = new TreeIndexBulkLoadOperatorDescriptor(spec, recordDesc, appContext.getStorageManagerInterface(),
-                                appContext.getIndexLifecycleManagerProvider(), splitsAndConstraint.first, typeTraits,
-                                comparatorFactories, bloomFilterKeyFields, fieldPermutation,
-                                GlobalConfig.DEFAULT_TREE_FILL_FACTOR, true, numElementsHint, true, idfh);
-                    } else {
-                        op = new AsterixLSMTreeInsertDeleteOperatorDescriptor(spec, recordDesc,
-                                appContext.getStorageManagerInterface(), appContext.getIndexLifecycleManagerProvider(),
-                                splitsAndConstraint.first, typeTraits, comparatorFactories, bloomFilterKeyFields,
-                                fieldPermutation, indexOp, idfh, null, modificationCallbackFactory, true, indexName);
-                    }
-                    return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
+            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(
+                    dataset, mdTxnCtx);
+            IIndexDataflowHelperFactory idfh = new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
+                    datasetId), compactionInfo.first, compactionInfo.second, new PrimaryIndexOperationTrackerProvider(
+                    dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                    LSMBTreeIOOperationCallbackFactory.INSTANCE, storageProperties.getBloomFilterFalsePositiveRate(),
+                    true, filterTypeTraits, filterCmpFactories, btreeFields, filterFields, !temp);
+            IOperatorDescriptor op;
+            if (bulkload) {
+                long numElementsHint = getCardinalityPerPartitionHint(dataset);
+                op = new TreeIndexBulkLoadOperatorDescriptor(spec, recordDesc, appContext.getStorageManagerInterface(),
+                        appContext.getIndexLifecycleManagerProvider(), splitsAndConstraint.first, typeTraits,
+                        comparatorFactories, bloomFilterKeyFields, fieldPermutation,
+                        GlobalConfig.DEFAULT_TREE_FILL_FACTOR, true, numElementsHint, true, idfh);
+            } else {
+                op = new AsterixLSMTreeInsertDeleteOperatorDescriptor(spec, recordDesc,
+                        appContext.getStorageManagerInterface(), appContext.getIndexLifecycleManagerProvider(),
+                        splitsAndConstraint.first, typeTraits, comparatorFactories, bloomFilterKeyFields,
+                        fieldPermutation, indexOp, idfh, null, modificationCallbackFactory, true, indexName);
+            }
+            return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
 
         } catch (MetadataException me) {
             throw new AlgebricksException(me);
@@ -1204,7 +1204,7 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
             IDataSource<AqlSourceId> dataSource, IOperatorSchema propagatedSchema, IVariableTypeEnvironment typeEnv,
             List<LogicalVariable> keys, LogicalVariable payload, List<LogicalVariable> additionalNonKeyFields,
             RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec, boolean bulkload)
-                    throws AlgebricksException {
+            throws AlgebricksException {
         return getInsertOrDeleteRuntime(IndexOperation.INSERT, dataSource, propagatedSchema, typeEnv, keys, payload,
                 additionalNonKeyFields, recordDesc, context, spec, bulkload);
     }
@@ -1332,7 +1332,7 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
             IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,
             AsterixTupleFilterFactory filterFactory, RecordDescriptor recordDesc, JobGenContext context,
             JobSpecification spec, IndexOperation indexOp, IndexType indexType, boolean bulkload)
-                    throws AlgebricksException {
+            throws AlgebricksException {
 
         // Sanity checks.
         if (primaryKeys.size() > 1) {
@@ -1522,7 +1522,7 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
             IOperatorSchema[] inputSchemas, IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys,
             List<LogicalVariable> secondaryKeys, List<LogicalVariable> additionalNonKeyFields,
             ILogicalExpression filterExpr, RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec)
-                    throws AlgebricksException {
+            throws AlgebricksException {
         return getIndexInsertOrDeleteRuntime(IndexOperation.DELETE, dataSourceIndex, propagatedSchema, inputSchemas,
                 typeEnv, primaryKeys, secondaryKeys, additionalNonKeyFields, filterExpr, recordDesc, context, spec,
                 false);
@@ -1530,7 +1530,7 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
 
     private AsterixTupleFilterFactory createTupleFilterFactory(IOperatorSchema[] inputSchemas,
             IVariableTypeEnvironment typeEnv, ILogicalExpression filterExpr, JobGenContext context)
-                    throws AlgebricksException {
+            throws AlgebricksException {
         // No filtering condition.
         if (filterExpr == null) {
             return null;
@@ -1644,37 +1644,37 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
             IModificationOperationCallbackFactory modificationCallbackFactory = temp ? new TempDatasetSecondaryIndexModificationOperationCallbackFactory(
                     jobId, datasetId, modificationCallbackPrimaryKeyFields, txnSubsystemProvider, indexOp,
                     ResourceType.LSM_BTREE) : new SecondaryIndexModificationOperationCallbackFactory(jobId, datasetId,
-                            modificationCallbackPrimaryKeyFields, txnSubsystemProvider, indexOp, ResourceType.LSM_BTREE);
-
-                    Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(
-                            dataset, mdTxnCtx);
-                    IIndexDataflowHelperFactory idfh = new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
-                            datasetId), compactionInfo.first, compactionInfo.second,
-                            new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
-                            storageProperties.getBloomFilterFalsePositiveRate(), false, filterTypeTraits, filterCmpFactories,
-                            btreeFields, filterFields, !temp);
-                    IOperatorDescriptor op;
-                    if (bulkload) {
-                        long numElementsHint = getCardinalityPerPartitionHint(dataset);
-                        op = new TreeIndexBulkLoadOperatorDescriptor(spec, recordDesc, appContext.getStorageManagerInterface(),
-                                appContext.getIndexLifecycleManagerProvider(), splitsAndConstraint.first, typeTraits,
-                                comparatorFactories, bloomFilterKeyFields, fieldPermutation,
-                                GlobalConfig.DEFAULT_TREE_FILL_FACTOR, false, numElementsHint, false, idfh);
-                    } else {
-                        op = new AsterixLSMTreeInsertDeleteOperatorDescriptor(spec, recordDesc,
-                                appContext.getStorageManagerInterface(), appContext.getIndexLifecycleManagerProvider(),
-                                splitsAndConstraint.first, typeTraits, comparatorFactories, bloomFilterKeyFields,
-                                fieldPermutation, indexOp, new LSMBTreeDataflowHelperFactory(
-                                        new AsterixVirtualBufferCacheProvider(datasetId), compactionInfo.first,
-                                        compactionInfo.second, new SecondaryIndexOperationTrackerProvider(
-                                                dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                                                LSMBTreeIOOperationCallbackFactory.INSTANCE,
-                                                storageProperties.getBloomFilterFalsePositiveRate(), false, filterTypeTraits,
-                                                filterCmpFactories, btreeFields, filterFields, !temp), filterFactory,
-                                                modificationCallbackFactory, false, indexName);
-                    }
-                    return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
+                    modificationCallbackPrimaryKeyFields, txnSubsystemProvider, indexOp, ResourceType.LSM_BTREE);
+
+            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(
+                    dataset, mdTxnCtx);
+            IIndexDataflowHelperFactory idfh = new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
+                    datasetId), compactionInfo.first, compactionInfo.second,
+                    new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                    storageProperties.getBloomFilterFalsePositiveRate(), false, filterTypeTraits, filterCmpFactories,
+                    btreeFields, filterFields, !temp);
+            IOperatorDescriptor op;
+            if (bulkload) {
+                long numElementsHint = getCardinalityPerPartitionHint(dataset);
+                op = new TreeIndexBulkLoadOperatorDescriptor(spec, recordDesc, appContext.getStorageManagerInterface(),
+                        appContext.getIndexLifecycleManagerProvider(), splitsAndConstraint.first, typeTraits,
+                        comparatorFactories, bloomFilterKeyFields, fieldPermutation,
+                        GlobalConfig.DEFAULT_TREE_FILL_FACTOR, false, numElementsHint, false, idfh);
+            } else {
+                op = new AsterixLSMTreeInsertDeleteOperatorDescriptor(spec, recordDesc,
+                        appContext.getStorageManagerInterface(), appContext.getIndexLifecycleManagerProvider(),
+                        splitsAndConstraint.first, typeTraits, comparatorFactories, bloomFilterKeyFields,
+                        fieldPermutation, indexOp, new LSMBTreeDataflowHelperFactory(
+                                new AsterixVirtualBufferCacheProvider(datasetId), compactionInfo.first,
+                                compactionInfo.second, new SecondaryIndexOperationTrackerProvider(
+                                        dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                                LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                                storageProperties.getBloomFilterFalsePositiveRate(), false, filterTypeTraits,
+                                filterCmpFactories, btreeFields, filterFields, !temp), filterFactory,
+                        modificationCallbackFactory, false, indexName);
+            }
+            return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
         } catch (MetadataException e) {
             throw new AlgebricksException(e);
         } catch (IOException e) {
@@ -1845,46 +1845,46 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
             IModificationOperationCallbackFactory modificationCallbackFactory = temp ? new TempDatasetSecondaryIndexModificationOperationCallbackFactory(
                     jobId, datasetId, modificationCallbackPrimaryKeyFields, txnSubsystemProvider, indexOp,
                     ResourceType.LSM_INVERTED_INDEX) : new SecondaryIndexModificationOperationCallbackFactory(jobId,
-                            datasetId, modificationCallbackPrimaryKeyFields, txnSubsystemProvider, indexOp,
-                            ResourceType.LSM_INVERTED_INDEX);
-
-                    Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(
-                            dataset, mdTxnCtx);
-                    IIndexDataflowHelperFactory indexDataFlowFactory;
-                    if (!isPartitioned) {
-                        indexDataFlowFactory = new LSMInvertedIndexDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
-                                datasetId), compactionInfo.first, compactionInfo.second,
-                                new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                                LSMInvertedIndexIOOperationCallbackFactory.INSTANCE,
-                                storageProperties.getBloomFilterFalsePositiveRate(), invertedIndexFields, filterTypeTraits,
-                                filterCmpFactories, filterFields, filterFieldsForNonBulkLoadOps,
-                                invertedIndexFieldsForNonBulkLoadOps, !temp);
-                    } else {
-                        indexDataFlowFactory = new PartitionedLSMInvertedIndexDataflowHelperFactory(
-                                new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()), compactionInfo.first,
-                                compactionInfo.second, new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                                LSMInvertedIndexIOOperationCallbackFactory.INSTANCE,
-                                storageProperties.getBloomFilterFalsePositiveRate(), invertedIndexFields, filterTypeTraits,
-                                filterCmpFactories, filterFields, filterFieldsForNonBulkLoadOps,
-                                invertedIndexFieldsForNonBulkLoadOps, !temp);
-                    }
-                    IOperatorDescriptor op;
-                    if (bulkload) {
-                        long numElementsHint = getCardinalityPerPartitionHint(dataset);
-                        op = new LSMInvertedIndexBulkLoadOperatorDescriptor(spec, recordDesc, fieldPermutation, false,
-                                numElementsHint, false, appContext.getStorageManagerInterface(), splitsAndConstraint.first,
-                                appContext.getIndexLifecycleManagerProvider(), tokenTypeTraits, tokenComparatorFactories,
-                                invListsTypeTraits, invListComparatorFactories, tokenizerFactory, indexDataFlowFactory);
-                    } else {
-                        op = new AsterixLSMInvertedIndexInsertDeleteOperatorDescriptor(spec, recordDesc,
-                                appContext.getStorageManagerInterface(), splitsAndConstraint.first,
-                                appContext.getIndexLifecycleManagerProvider(), tokenTypeTraits, tokenComparatorFactories,
-                                invListsTypeTraits, invListComparatorFactories, tokenizerFactory, fieldPermutation, indexOp,
-                                indexDataFlowFactory, filterFactory, modificationCallbackFactory, indexName);
-                    }
-                    return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
+                    datasetId, modificationCallbackPrimaryKeyFields, txnSubsystemProvider, indexOp,
+                    ResourceType.LSM_INVERTED_INDEX);
+
+            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(
+                    dataset, mdTxnCtx);
+            IIndexDataflowHelperFactory indexDataFlowFactory;
+            if (!isPartitioned) {
+                indexDataFlowFactory = new LSMInvertedIndexDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
+                        datasetId), compactionInfo.first, compactionInfo.second,
+                        new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                        LSMInvertedIndexIOOperationCallbackFactory.INSTANCE,
+                        storageProperties.getBloomFilterFalsePositiveRate(), invertedIndexFields, filterTypeTraits,
+                        filterCmpFactories, filterFields, filterFieldsForNonBulkLoadOps,
+                        invertedIndexFieldsForNonBulkLoadOps, !temp);
+            } else {
+                indexDataFlowFactory = new PartitionedLSMInvertedIndexDataflowHelperFactory(
+                        new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()), compactionInfo.first,
+                        compactionInfo.second, new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                        LSMInvertedIndexIOOperationCallbackFactory.INSTANCE,
+                        storageProperties.getBloomFilterFalsePositiveRate(), invertedIndexFields, filterTypeTraits,
+                        filterCmpFactories, filterFields, filterFieldsForNonBulkLoadOps,
+                        invertedIndexFieldsForNonBulkLoadOps, !temp);
+            }
+            IOperatorDescriptor op;
+            if (bulkload) {
+                long numElementsHint = getCardinalityPerPartitionHint(dataset);
+                op = new LSMInvertedIndexBulkLoadOperatorDescriptor(spec, recordDesc, fieldPermutation, false,
+                        numElementsHint, false, appContext.getStorageManagerInterface(), splitsAndConstraint.first,
+                        appContext.getIndexLifecycleManagerProvider(), tokenTypeTraits, tokenComparatorFactories,
+                        invListsTypeTraits, invListComparatorFactories, tokenizerFactory, indexDataFlowFactory);
+            } else {
+                op = new AsterixLSMInvertedIndexInsertDeleteOperatorDescriptor(spec, recordDesc,
+                        appContext.getStorageManagerInterface(), splitsAndConstraint.first,
+                        appContext.getIndexLifecycleManagerProvider(), tokenTypeTraits, tokenComparatorFactories,
+                        invListsTypeTraits, invListComparatorFactories, tokenizerFactory, fieldPermutation, indexOp,
+                        indexDataFlowFactory, filterFactory, modificationCallbackFactory, indexName);
+            }
+            return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
         } catch (MetadataException e) {
             throw new AlgebricksException(e);
         } catch (IOException e) {
@@ -1993,41 +1993,41 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
             IModificationOperationCallbackFactory modificationCallbackFactory = temp ? new TempDatasetSecondaryIndexModificationOperationCallbackFactory(
                     jobId, datasetId, modificationCallbackPrimaryKeyFields, txnSubsystemProvider, indexOp,
                     ResourceType.LSM_RTREE) : new SecondaryIndexModificationOperationCallbackFactory(jobId, datasetId,
-                            modificationCallbackPrimaryKeyFields, txnSubsystemProvider, indexOp, ResourceType.LSM_RTREE);
-
-                    Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(
-                            dataset, mdTxnCtx);
-                    IIndexDataflowHelperFactory idfh = new LSMRTreeDataflowHelperFactory(valueProviderFactories,
-                            RTreePolicyType.RTREE, primaryComparatorFactories, new AsterixVirtualBufferCacheProvider(
-                                    dataset.getDatasetId()), compactionInfo.first, compactionInfo.second,
-                                    new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMRTreeIOOperationCallbackFactory.INSTANCE,
-                                    proposeLinearizer(nestedKeyType.getTypeTag(), comparatorFactories.length),
-                                    storageProperties.getBloomFilterFalsePositiveRate(), rtreeFields, btreeFields, filterTypeTraits,
-                                    filterCmpFactories, filterFields, !temp);
-                    IOperatorDescriptor op;
-                    if (bulkload) {
-                        long numElementsHint = getCardinalityPerPartitionHint(dataset);
-                        op = new TreeIndexBulkLoadOperatorDescriptor(spec, recordDesc, appContext.getStorageManagerInterface(),
-                                appContext.getIndexLifecycleManagerProvider(), splitsAndConstraint.first, typeTraits,
-                                primaryComparatorFactories, btreeFields, fieldPermutation,
-                                GlobalConfig.DEFAULT_TREE_FILL_FACTOR, false, numElementsHint, false, idfh);
-                    } else {
-                        op = new AsterixLSMTreeInsertDeleteOperatorDescriptor(spec, recordDesc,
-                                appContext.getStorageManagerInterface(), appContext.getIndexLifecycleManagerProvider(),
-                                splitsAndConstraint.first, typeTraits, comparatorFactories, null, fieldPermutation, indexOp,
-                                new LSMRTreeDataflowHelperFactory(valueProviderFactories, RTreePolicyType.RTREE,
-                                        primaryComparatorFactories, new AsterixVirtualBufferCacheProvider(dataset
-                                                .getDatasetId()), compactionInfo.first, compactionInfo.second,
-                                                new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                                                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                                                LSMRTreeIOOperationCallbackFactory.INSTANCE, proposeLinearizer(
-                                                        nestedKeyType.getTypeTag(), comparatorFactories.length), storageProperties
-                                                        .getBloomFilterFalsePositiveRate(), rtreeFields, btreeFields, filterTypeTraits,
-                                                        filterCmpFactories, filterFields, !temp), filterFactory,
-                                                        modificationCallbackFactory, false, indexName);
-                    }
-                    return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
+                    modificationCallbackPrimaryKeyFields, txnSubsystemProvider, indexOp, ResourceType.LSM_RTREE);
+
+            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(
+                    dataset, mdTxnCtx);
+            IIndexDataflowHelperFactory idfh = new LSMRTreeDataflowHelperFactory(valueProviderFactories,
+                    RTreePolicyType.RTREE, primaryComparatorFactories, new AsterixVirtualBufferCacheProvider(
+                            dataset.getDatasetId()), compactionInfo.first, compactionInfo.second,
+                    new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMRTreeIOOperationCallbackFactory.INSTANCE,
+                    proposeLinearizer(nestedKeyType.getTypeTag(), comparatorFactories.length),
+                    storageProperties.getBloomFilterFalsePositiveRate(), rtreeFields, btreeFields, filterTypeTraits,
+                    filterCmpFactories, filterFields, !temp);
+            IOperatorDescriptor op;
+            if (bulkload) {
+                long numElementsHint = getCardinalityPerPartitionHint(dataset);
+                op = new TreeIndexBulkLoadOperatorDescriptor(spec, recordDesc, appContext.getStorageManagerInterface(),
+                        appContext.getIndexLifecycleManagerProvider(), splitsAndConstraint.first, typeTraits,
+                        primaryComparatorFactories, btreeFields, fieldPermutation,
+                        GlobalConfig.DEFAULT_TREE_FILL_FACTOR, false, numElementsHint, false, idfh);
+            } else {
+                op = new AsterixLSMTreeInsertDeleteOperatorDescriptor(spec, recordDesc,
+                        appContext.getStorageManagerInterface(), appContext.getIndexLifecycleManagerProvider(),
+                        splitsAndConstraint.first, typeTraits, comparatorFactories, null, fieldPermutation, indexOp,
+                        new LSMRTreeDataflowHelperFactory(valueProviderFactories, RTreePolicyType.RTREE,
+                                primaryComparatorFactories, new AsterixVirtualBufferCacheProvider(dataset
+                                        .getDatasetId()), compactionInfo.first, compactionInfo.second,
+                                new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                                LSMRTreeIOOperationCallbackFactory.INSTANCE, proposeLinearizer(
+                                        nestedKeyType.getTypeTag(), comparatorFactories.length), storageProperties
+                                        .getBloomFilterFalsePositiveRate(), rtreeFields, btreeFields, filterTypeTraits,
+                                filterCmpFactories, filterFields, !temp), filterFactory,
+                        modificationCallbackFactory, false, indexName);
+            }
+            return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
         } catch (MetadataException | IOException e) {
             throw new AlgebricksException(e);
         }
@@ -2067,8 +2067,8 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
             numElementsHint = Long.parseLong(numElementsHintString);
         }
         int numPartitions = 0;
-        List<String> nodeGroup = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx,
-                dataset.getDatasetDetails().getNodeGroupName()).getNodeNames();
+        List<String> nodeGroup = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, dataset.getNodeGroupName())
+                .getNodeNames();
         for (String nd : nodeGroup) {
             numPartitions += AsterixClusterProperties.INSTANCE.getNumberOfIODevices(nd);
         }
@@ -2131,11 +2131,10 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
         try {
             File relPathFile = new File(getRelativePath(dataverseName, datasetName + "_idx_" + targetIdxName));
             Dataset dataset = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
-            IDatasetDetails datasetDetails = dataset.getDatasetDetails();
-            List<String> nodeGroup = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, datasetDetails.getNodeGroupName())
+            List<String> nodeGroup = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, dataset.getNodeGroupName())
                     .getNodeNames();
             if (nodeGroup == null) {
-                throw new AlgebricksException("Couldn't find node group " + datasetDetails.getNodeGroupName());
+                throw new AlgebricksException("Couldn't find node group " + dataset.getNodeGroupName());
             }
 
             List<FileSplit> splitArray = new ArrayList<FileSplit>();
@@ -2146,7 +2145,7 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
                     throw new AlgebricksException("Node " + nd + " has no stores.");
                 } else {
                     int numIODevices;
-                    if (datasetDetails.getNodeGroupName().compareTo(MetadataConstants.METADATA_NODEGROUP_NAME) == 0) {
+                    if (dataset.getNodeGroupName().compareTo(MetadataConstants.METADATA_NODEGROUP_NAME) == 0) {
                         numIODevices = 1;
                     } else {
                         numIODevices = AsterixClusterProperties.INSTANCE.getNumberOfIODevices(nd);
@@ -2309,11 +2308,10 @@ private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDa
         try {
             File relPathFile = new File(getRelativePath(dataverseName, datasetName + "_idx_" + targetIdxName));
             Dataset dataset = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
-            ExternalDatasetDetails datasetDetails = (ExternalDatasetDetails) dataset.getDatasetDetails();
-            List<String> nodeGroup = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, datasetDetails.getNodeGroupName())
+            List<String> nodeGroup = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, dataset.getNodeGroupName())
                     .getNodeNames();
             if (nodeGroup == null) {
-                throw new AlgebricksException("Couldn't find node group " + datasetDetails.getNodeGroupName());
+                throw new AlgebricksException("Couldn't find node group " + dataset.getNodeGroupName());
             }
 
             List<FileSplit> splitArray = new ArrayList<FileSplit>();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/DatasetDataSource.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/DatasetDataSource.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/DatasetDataSource.java
index 4bfa3dd..ea52419 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/DatasetDataSource.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/DatasetDataSource.java
@@ -81,7 +81,7 @@ public class DatasetDataSource extends AqlDataSource {
             schemaTypes[i] = recordType.getSubFieldType(partitioningKeys.get(i));
         }
         schemaTypes[n] = itemType;
-        domain = new DefaultNodeGroupDomain(DatasetUtils.getNodegroupName(dataset));
+        domain = new DefaultNodeGroupDomain(dataset.getNodeGroupName());
     }
 
     private void initExternalDataset(IAType itemType) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/LoadableDataSource.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/LoadableDataSource.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/LoadableDataSource.java
index c91e3f6..a950b71 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/LoadableDataSource.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/LoadableDataSource.java
@@ -57,7 +57,7 @@ public class LoadableDataSource extends AqlDataSource {
         this.adapter = adapter;
         this.adapterProperties = properties;
         partitioningKeys = DatasetUtils.getPartitioningKeys(targetDataset);
-        domain = new DefaultNodeGroupDomain(DatasetUtils.getNodegroupName(targetDataset));
+        domain = new DefaultNodeGroupDomain(targetDataset.getNodeGroupName());
         ARecordType recType = (ARecordType) itemType;
         isPKAutoGenerated = ((InternalDatasetDetails) targetDataset.getDatasetDetails()).isAutogenerated();
         if (isPKAutoGenerated) {