You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@asterixdb.apache.org by im...@apache.org on 2015/08/20 04:27:50 UTC

[5/9] incubator-asterixdb git commit: Changed metadata storage format for nullable field types. Moved field name generation to the client out of metadata node code. Changed naming scheme for autogenerated types. Moved GroupName, CompactionPolicy & Compac

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv19/cross-dv19.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv19/cross-dv19.1.adm b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv19/cross-dv19.1.adm
index 3f69f4f..aaa5400 100644
--- a/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv19/cross-dv19.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/cross-dataverse/cross-dv19/cross-dv19.1.adm
@@ -1,8 +1,8 @@
-[ { "DataverseName": "test1", "DatasetName": "TwitterData", "DataTypeName": "Tweet", "DatasetType": "EXTERNAL", "InternalDetails": null, "ExternalDetails": { "DatasourceAdapter": "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter", "Properties": [ { "Name": "path", "Value": "nc1://data/twitter/extrasmalltweets.txt" }, { "Name": "format", "Value": "adm" } ], "GroupName": "DEFAULT_NG_ALL_NODES", "LastRefreshTime": datetime("2014-06-08T20:30:43.724Z"), "TransactionState": 0, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ] }, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:30:43 PDT 2014", "DatasetId": 107, "PendingOp": 0 }
-, { "DataverseName": "test1", "DatasetName": "t1", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "id" ] ], "PrimaryKey": [ [ "id" ] ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:30:42 PDT 2014", "DatasetId": 101, "PendingOp": 0 }
-, { "DataverseName": "test1", "DatasetName": "t2", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "id" ] ], "PrimaryKey": [ [ "id" ] ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:30:43 PDT 2014", "DatasetId": 104, "PendingOp": 0 }
-, { "DataverseName": "test1", "DatasetName": "t3", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "id" ] ], "PrimaryKey": [ [ "id" ] ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:30:43 PDT 2014", "DatasetId": 105, "PendingOp": 0 }
-, { "DataverseName": "test2", "DatasetName": "t2", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "id" ] ], "PrimaryKey": [ [ "id" ] ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:30:42 PDT 2014", "DatasetId": 102, "PendingOp": 0 }
-, { "DataverseName": "test2", "DatasetName": "t3", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "id" ] ], "PrimaryKey": [ [ "id" ] ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:30:42 PDT 2014", "DatasetId": 103, "PendingOp": 0 }
-, { "DataverseName": "test2", "DatasetName": "t4", "DataTypeName": "testtype", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "id" ] ], "PrimaryKey": [ [ "id" ] ], "GroupName": "DEFAULT_NG_ALL_NODES", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:30:43 PDT 2014", "DatasetId": 106, "PendingOp": 0 }
+[ { "DataverseName": "test1", "DatasetName": "TwitterData", "DatatypeName": "Tweet", "DatasetType": "EXTERNAL", "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ], "InternalDetails": null, "ExternalDetails": { "DatasourceAdapter": "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter", "Properties": [ { "Name": "path", "Value": "nc1://data/twitter/extrasmalltweets.txt" }, { "Name": "format", "Value": "adm" } ], "LastRefreshTime": datetime("2014-06-08T20:30:43.724Z"), "TransactionState": 0 }, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:30:43 PDT 2014", "DatasetId": 107i32, "PendingOp": 0i32 }
+, { "DataverseName": "test1", "DatasetName": "t1", "DatatypeName": "testtype", "DatasetType": "INTERNAL", "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ], "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "id" ] ], "PrimaryKey": [ [ "id" ] ], "Autogenerated": false }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:30:42 PDT 2014", "DatasetId": 101i32, "PendingOp": 0i32 }
+, { "DataverseName": "test1", "DatasetName": "t2", "DatatypeName": "testtype", "DatasetType": "INTERNAL", "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ], "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "id" ] ], "PrimaryKey": [ [ "id" ] ], "Autogenerated": false }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:30:43 PDT 2014", "DatasetId": 104i32, "PendingOp": 0i32 }
+, { "DataverseName": "test1", "DatasetName": "t3", "DatatypeName": "testtype", "DatasetType": "INTERNAL", "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ], "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "id" ] ], "PrimaryKey": [ [ "id" ] ], "Autogenerated": false }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:30:43 PDT 2014", "DatasetId": 105i32, "PendingOp": 0i32 }
+, { "DataverseName": "test2", "DatasetName": "t2", "DatatypeName": "testtype", "DatasetType": "INTERNAL", "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ], "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "id" ] ], "PrimaryKey": [ [ "id" ] ], "Autogenerated": false }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:30:42 PDT 2014", "DatasetId": 102i32, "PendingOp": 0i32 }
+, { "DataverseName": "test2", "DatasetName": "t3", "DatatypeName": "testtype", "DatasetType": "INTERNAL", "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ], "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "id" ] ], "PrimaryKey": [ [ "id" ] ], "Autogenerated": false }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:30:42 PDT 2014", "DatasetId": 103i32, "PendingOp": 0i32 }
+, { "DataverseName": "test2", "DatasetName": "t4", "DatatypeName": "testtype", "DatasetType": "INTERNAL", "GroupName": "DEFAULT_NG_ALL_NODES", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ], "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "id" ] ], "PrimaryKey": [ [ "id" ] ], "Autogenerated": false }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:30:43 PDT 2014", "DatasetId": 106i32, "PendingOp": 0i32 }
  ]
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-app/src/test/resources/runtimets/results/dml/create-drop-cltype/create-drop-cltype.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/create-drop-cltype/create-drop-cltype.1.adm b/asterix-app/src/test/resources/runtimets/results/dml/create-drop-cltype/create-drop-cltype.1.adm
index 5570d53..4ac62a7 100644
--- a/asterix-app/src/test/resources/runtimets/results/dml/create-drop-cltype/create-drop-cltype.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/dml/create-drop-cltype/create-drop-cltype.1.adm
@@ -1,2 +1,2 @@
-[ { "DataverseName": "test", "DatatypeName": "TestType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": false, "Fields": [ { "FieldName": "id", "FieldType": "int32" }, { "FieldName": "salary", "FieldType": "Field_salary_in_TestType" }, { "FieldName": "name", "FieldType": "string" }, { "FieldName": "durtn", "FieldType": "Field_durtn_in_TestType" }, { "FieldName": "inter", "FieldType": "interval" }, { "FieldName": "dt", "FieldType": "Field_dt_in_TestType" }, { "FieldName": "tm", "FieldType": "time" }, { "FieldName": "pt", "FieldType": "Field_pt_in_TestType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Feb 11 18:10:43 PST 2013" }
+[ { "DataverseName": "test", "DatatypeName": "TestType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "Record": { "IsOpen": false, "Fields": [ { "FieldName": "id", "FieldType": "int32", "IsNullable": false }, { "FieldName": "salary", "FieldType": "double", "IsNullable": true }, { "FieldName": "name", "FieldType": "string", "IsNullable": false }, { "FieldName": "durtn", "FieldType": "duration", "IsNullable": true }, { "FieldName": "inter", "FieldType": "interval", "IsNullable": false }, { "FieldName": "dt", "FieldType": "date", "IsNullable": true }, { "FieldName": "tm", "FieldType": "time", "IsNullable": false }, { "FieldName": "pt", "FieldType": "point", "IsNullable": true } ] }, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Feb 11 18:12:10 PST 2013" }
  ]

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-app/src/test/resources/runtimets/results/dml/create-drop-opntype/create-drop-opntype.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/create-drop-opntype/create-drop-opntype.1.adm b/asterix-app/src/test/resources/runtimets/results/dml/create-drop-opntype/create-drop-opntype.1.adm
index 8d1c67f..5cbcd6c 100644
--- a/asterix-app/src/test/resources/runtimets/results/dml/create-drop-opntype/create-drop-opntype.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/dml/create-drop-opntype/create-drop-opntype.1.adm
@@ -1,2 +1,2 @@
-[ { "DataverseName": "test", "DatatypeName": "TestType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "id", "FieldType": "int32" }, { "FieldName": "salary", "FieldType": "Field_salary_in_TestType" }, { "FieldName": "name", "FieldType": "string" }, { "FieldName": "durtn", "FieldType": "Field_durtn_in_TestType" }, { "FieldName": "inter", "FieldType": "interval" }, { "FieldName": "dt", "FieldType": "Field_dt_in_TestType" }, { "FieldName": "tm", "FieldType": "time" }, { "FieldName": "pt", "FieldType": "Field_pt_in_TestType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Feb 11 18:12:10 PST 2013" }
+[ { "DataverseName": "test", "DatatypeName": "TestType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "id", "FieldType": "int32", "IsNullable": false }, { "FieldName": "salary", "FieldType": "double", "IsNullable": true }, { "FieldName": "name", "FieldType": "string", "IsNullable": false }, { "FieldName": "durtn", "FieldType": "duration", "IsNullable": true }, { "FieldName": "inter", "FieldType": "interval", "IsNullable": false }, { "FieldName": "dt", "FieldType": "date", "IsNullable": true }, { "FieldName": "tm", "FieldType": "time", "IsNullable": false }, { "FieldName": "pt", "FieldType": "point", "IsNullable": true } ] }, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Feb 11 18:12:10 PST 2013" }
  ]

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-app/src/test/resources/runtimets/results/dml/drop-empty-secondary-indexes/drop-empty-secondary-indexes.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/drop-empty-secondary-indexes/drop-empty-secondary-indexes.1.adm b/asterix-app/src/test/resources/runtimets/results/dml/drop-empty-secondary-indexes/drop-empty-secondary-indexes.1.adm
index ae02b69..954ba16 100644
--- a/asterix-app/src/test/resources/runtimets/results/dml/drop-empty-secondary-indexes/drop-empty-secondary-indexes.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/dml/drop-empty-secondary-indexes/drop-empty-secondary-indexes.1.adm
@@ -1,4 +1,4 @@
-[ { "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ [ "DataverseName" ], [ "DatatypeName" ], [ "DatasetName" ] ], "IsPrimary": false, "Timestamp": "Tue Sep 23 14:44:50 PDT 2014", "PendingOp": 0, "SearchKeyType": [ "null", "null", "null" ] }
-, { "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "GroupName", "IndexStructure": "BTREE", "SearchKey": [ [ "GroupName" ], [ "DataverseName" ], [ "DatasetName" ] ], "IsPrimary": false, "Timestamp": "Tue Sep 23 14:44:50 PDT 2014", "PendingOp": 0, "SearchKeyType": [ "null", "null", "null" ] }
-, { "DataverseName": "Metadata", "DatasetName": "Datatype", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ [ "DataverseName" ], [ "NestedDatatypeName" ], [ "TopDatatypeName" ] ], "IsPrimary": false, "Timestamp": "Tue Sep 23 14:44:50 PDT 2014", "PendingOp": 0, "SearchKeyType": [ "null", "null", "null" ] }
+[ { "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ [ "DataverseName" ], [ "DatatypeName" ], [ "DatasetName" ] ], "IsPrimary": false, "Timestamp": "Tue Sep 23 14:44:50 PDT 2014", "PendingOp": 0 }
+, { "DataverseName": "Metadata", "DatasetName": "Dataset", "IndexName": "GroupName", "IndexStructure": "BTREE", "SearchKey": [ [ "GroupName" ], [ "DataverseName" ], [ "DatasetName" ] ], "IsPrimary": false, "Timestamp": "Tue Sep 23 14:44:50 PDT 2014", "PendingOp": 0 }
+, { "DataverseName": "Metadata", "DatasetName": "Datatype", "IndexName": "DatatypeName", "IndexStructure": "BTREE", "SearchKey": [ [ "DataverseName" ], [ "NestedDatatypeName" ], [ "TopDatatypeName" ] ], "IsPrimary": false, "Timestamp": "Tue Sep 23 14:44:50 PDT 2014", "PendingOp": 0 }
  ]
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf23/udf23.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf23/udf23.1.adm b/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf23/udf23.1.adm
index d0db5d2..c180ae8 100644
--- a/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf23/udf23.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/user-defined-functions/udf23/udf23.1.adm
@@ -1,7 +1,7 @@
-[ { "DataverseName": "Metadata", "DatasetName": "CompactionPolicy", "DataTypeName": "CompactionPolicyRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "DataverseName" ], [ "CompactionPolicy" ] ], "PrimaryKey": [ [ "DataverseName" ], [ "CompactionPolicy" ] ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:29:06 PDT 2014", "DatasetId": 13, "PendingOp": 0 }
-, { "DataverseName": "Metadata", "DatasetName": "Dataset", "DataTypeName": "DatasetRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "DataverseName" ], [ "DatasetName" ] ], "PrimaryKey": [ [ "DataverseName" ], [ "DatasetName" ] ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:29:06 PDT 2014", "DatasetId": 2, "PendingOp": 0 }
-, { "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "DataTypeName": "DatasourceAdapterRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "DataverseName" ], [ "Name" ] ], "PrimaryKey": [ [ "DataverseName" ], [ "Name" ] ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:29:06 PDT 2014", "DatasetId": 8, "PendingOp": 0 }
-, { "DataverseName": "Metadata", "DatasetName": "Datatype", "DataTypeName": "DatatypeRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "DataverseName" ], [ "DatatypeName" ] ], "PrimaryKey": [ [ "DataverseName" ], [ "DatatypeName" ] ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:29:06 PDT 2014", "DatasetId": 3, "PendingOp": 0 }
-, { "DataverseName": "Metadata", "DatasetName": "Dataverse", "DataTypeName": "DataverseRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "DataverseName" ] ], "PrimaryKey": [ [ "DataverseName" ] ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:29:06 PDT 2014", "DatasetId": 1, "PendingOp": 0 }
-, { "DataverseName": "Metadata", "DatasetName": "ExternalFile", "DataTypeName": "ExternalFileRecordType", "DatasetType": "INTERNAL", "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "DataverseName" ], [ "DatasetName" ], [ "FileNumber" ] ], "PrimaryKey": [ [ "DataverseName" ], [ "DatasetName" ], [ "FileNumber" ] ], "GroupName": "MetadataGroup", "Autogenerated": false, "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ] }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:29:06 PDT 2014", "DatasetId": 14, "PendingOp": 0 }
+[ { "DataverseName": "Metadata", "DatasetName": "CompactionPolicy", "DatatypeName": "CompactionPolicyRecordType", "DatasetType": "INTERNAL", "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ], "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "DataverseName" ], [ "CompactionPolicy" ] ], "PrimaryKey": [ [ "DataverseName" ], [ "CompactionPolicy" ] ], "Autogenerated": false }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:29:06 PDT 2014", "DatasetId": 13i32, "PendingOp": 0i32 }
+, { "DataverseName": "Metadata", "DatasetName": "Dataset", "DatatypeName": "DatasetRecordType", "DatasetType": "INTERNAL", "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ], "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "DataverseName" ], [ "DatasetName" ] ], "PrimaryKey": [ [ "DataverseName" ], [ "DatasetName" ] ], "Autogenerated": false }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:29:06 PDT 2014", "DatasetId": 2i32, "PendingOp": 0i32 }
+, { "DataverseName": "Metadata", "DatasetName": "DatasourceAdapter", "DatatypeName": "DatasourceAdapterRecordType", "DatasetType": "INTERNAL", "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ], "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "DataverseName" ], [ "Name" ] ], "PrimaryKey": [ [ "DataverseName" ], [ "Name" ] ], "Autogenerated": false }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:29:06 PDT 2014", "DatasetId": 8i32, "PendingOp": 0i32 }
+, { "DataverseName": "Metadata", "DatasetName": "Datatype", "DatatypeName": "DatatypeRecordType", "DatasetType": "INTERNAL", "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ], "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "DataverseName" ], [ "DatatypeName" ] ], "PrimaryKey": [ [ "DataverseName" ], [ "DatatypeName" ] ], "Autogenerated": false }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:29:06 PDT 2014", "DatasetId": 3i32, "PendingOp": 0i32 }
+, { "DataverseName": "Metadata", "DatasetName": "Dataverse", "DatatypeName": "DataverseRecordType", "DatasetType": "INTERNAL", "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ], "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "DataverseName" ] ], "PrimaryKey": [ [ "DataverseName" ] ], "Autogenerated": false }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:29:06 PDT 2014", "DatasetId": 1i32, "PendingOp": 0i32 }
+, { "DataverseName": "Metadata", "DatasetName": "ExternalFile", "DatatypeName": "ExternalFileRecordType", "DatasetType": "INTERNAL", "GroupName": "MetadataGroup", "CompactionPolicy": "prefix", "CompactionPolicyProperties": [ { "Name": "max-mergable-component-size", "Value": "1073741824" }, { "Name": "max-tolerance-component-count", "Value": "5" } ], "InternalDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ [ "DataverseName" ], [ "DatasetName" ], [ "FileNumber" ] ], "PrimaryKey": [ [ "DataverseName" ], [ "DatasetName" ], [ "FileNumber" ] ], "Autogenerated": false }, "ExternalDetails": null, "Hints": {{  }}, "Timestamp": "Sun Jun 08 13:29:06 PDT 2014", "DatasetId": 14i32, "PendingOp": 0i32 }
  ]
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DatasetDecl.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DatasetDecl.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DatasetDecl.java
index 2674da6..9d52e01 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DatasetDecl.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DatasetDecl.java
@@ -21,21 +21,30 @@ import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
 import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
 
 public class DatasetDecl implements Statement {
     protected final Identifier name;
     protected final Identifier dataverse;
     protected final Identifier itemTypeName;
+    protected final Identifier nodegroupName;
+    protected final String compactionPolicy;
+    protected final Map<String, String> compactionPolicyProperties;
     protected final DatasetType datasetType;
     protected final IDatasetDetailsDecl datasetDetailsDecl;
     protected final Map<String, String> hints;
     protected final boolean ifNotExists;
 
-    public DatasetDecl(Identifier dataverse, Identifier name, Identifier itemTypeName, Map<String, String> hints,
+    public DatasetDecl(Identifier dataverse, Identifier name, Identifier itemTypeName, Identifier nodeGroupName,
+            String compactionPolicy, Map<String, String> compactionPolicyProperties, Map<String, String> hints,
             DatasetType datasetType, IDatasetDetailsDecl idd, boolean ifNotExists) {
         this.dataverse = dataverse;
         this.name = name;
         this.itemTypeName = itemTypeName;
+        this.nodegroupName = nodeGroupName == null ? new Identifier(MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME)
+                : nodeGroupName;
+        this.compactionPolicy = compactionPolicy;
+        this.compactionPolicyProperties = compactionPolicyProperties;
         this.hints = hints;
         this.ifNotExists = ifNotExists;
         this.datasetType = datasetType;
@@ -58,6 +67,18 @@ public class DatasetDecl implements Statement {
         return itemTypeName;
     }
 
+    public Identifier getNodegroupName() {
+        return nodegroupName;
+    }
+
+    public String getCompactionPolicy() {
+        return compactionPolicy;
+    }
+
+    public Map<String, String> getCompactionPolicyProperties() {
+        return compactionPolicyProperties;
+    }
+
     public Map<String, String> getHints() {
         return hints;
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ExternalDetailsDecl.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ExternalDetailsDecl.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ExternalDetailsDecl.java
index ff39fd4..69d5201 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ExternalDetailsDecl.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/ExternalDetailsDecl.java
@@ -19,9 +19,6 @@ import java.util.Map;
 public class ExternalDetailsDecl implements IDatasetDetailsDecl {
     private Map<String, String> properties;
     private String adapter;
-    private Identifier nodegroupName;
-    private String compactionPolicy;
-    private Map<String, String> compactionPolicyProperties;
 
     public void setAdapter(String adapter) {
         this.adapter = adapter;
@@ -40,34 +37,8 @@ public class ExternalDetailsDecl implements IDatasetDetailsDecl {
     }
 
     @Override
-    public Identifier getNodegroupName() {
-        return nodegroupName;
-    }
-
-    public void setNodegroupName(Identifier nodegroupName) {
-        this.nodegroupName = nodegroupName;
-    }
-
-    @Override
-    public String getCompactionPolicy() {
-        return compactionPolicy;
-    }
-
-    public void setCompactionPolicy(String compactionPolicy) {
-        this.compactionPolicy = compactionPolicy;
-    }
-
-    @Override
-    public Map<String, String> getCompactionPolicyProperties() {
-        return compactionPolicyProperties;
-    }
-
-    @Override
     public boolean isTemp() {
         return false;
     }
 
-    public void setCompactionPolicyProperties(Map<String, String> compactionPolicyProperties) {
-        this.compactionPolicyProperties = compactionPolicyProperties;
-    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedDetailsDecl.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedDetailsDecl.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedDetailsDecl.java
index 68cceba..0c773ba 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedDetailsDecl.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/FeedDetailsDecl.java
@@ -25,9 +25,8 @@ public class FeedDetailsDecl extends InternalDetailsDecl {
     private final FunctionSignature functionSignature;
 
     public FeedDetailsDecl(String adapterFactoryClassname, Map<String, String> configuration,
-            FunctionSignature signature, Identifier nodeGroupName, List<List<String>> partitioningExpr,
-            String compactionPolicy, Map<String, String> compactionPolicyProperties, List<String> filterField) {
-        super(nodeGroupName, partitioningExpr, false, compactionPolicy, compactionPolicyProperties, filterField, false);
+            FunctionSignature signature, List<List<String>> partitioningExpr, List<String> filterField) {
+        super(partitioningExpr, false, filterField, false);
         this.adapterFactoryClassname = adapterFactoryClassname;
         this.configuration = configuration;
         this.functionSignature = signature;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IDatasetDetailsDecl.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IDatasetDetailsDecl.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IDatasetDetailsDecl.java
index 4ceb969..b86a5f6 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IDatasetDetailsDecl.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/IDatasetDetailsDecl.java
@@ -14,16 +14,9 @@
  */
 package edu.uci.ics.asterix.aql.expression;
 
-import java.util.Map;
 
 public interface IDatasetDetailsDecl {
 
-    public Identifier getNodegroupName();
-
-    public String getCompactionPolicy();
-
-    public Map<String, String> getCompactionPolicyProperties();
-
     public boolean isTemp();
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/InternalDetailsDecl.java
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/InternalDetailsDecl.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/InternalDetailsDecl.java
index aa1b291..3d87a9a 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/InternalDetailsDecl.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/InternalDetailsDecl.java
@@ -15,28 +15,17 @@
 package edu.uci.ics.asterix.aql.expression;
 
 import java.util.List;
-import java.util.Map;
-
-import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
 
 public class InternalDetailsDecl implements IDatasetDetailsDecl {
-    private final Identifier nodegroupName;
     private final List<List<String>> partitioningExprs;
     private final boolean autogenerated;
-    private final String compactionPolicy;
-    private final Map<String, String> compactionPolicyProperties;
     private final boolean temp;
     private final List<String> filterField;
 
-    public InternalDetailsDecl(Identifier nodeGroupName, List<List<String>> partitioningExpr, boolean autogenerated,
-            String compactionPolicy, Map<String, String> compactionPolicyProperties, List<String> filterField,
+    public InternalDetailsDecl(List<List<String>> partitioningExpr, boolean autogenerated, List<String> filterField,
             boolean temp) {
-        this.nodegroupName = nodeGroupName == null ? new Identifier(MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME)
-                : nodeGroupName;
         this.partitioningExprs = partitioningExpr;
         this.autogenerated = autogenerated;
-        this.compactionPolicy = compactionPolicy;
-        this.compactionPolicyProperties = compactionPolicyProperties;
         this.filterField = filterField;
         this.temp = temp;
     }
@@ -45,26 +34,11 @@ public class InternalDetailsDecl implements IDatasetDetailsDecl {
         return partitioningExprs;
     }
 
-    @Override
-    public Identifier getNodegroupName() {
-        return nodegroupName;
-    }
-
     public boolean isAutogenerated() {
         return autogenerated;
     }
 
     @Override
-    public String getCompactionPolicy() {
-        return compactionPolicy;
-    }
-
-    @Override
-    public Map<String, String> getCompactionPolicyProperties() {
-        return compactionPolicyProperties;
-    }
-
-    @Override
     public boolean isTemp() {
         return temp;
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-aql/src/main/javacc/AQL.jj
----------------------------------------------------------------------
diff --git a/asterix-aql/src/main/javacc/AQL.jj b/asterix-aql/src/main/javacc/AQL.jj
index 7044962..52d2b83 100644
--- a/asterix-aql/src/main/javacc/AQL.jj
+++ b/asterix-aql/src/main/javacc/AQL.jj
@@ -427,12 +427,12 @@ DatasetDecl DatasetSpecification() throws ParseException:
         ExternalDetailsDecl edd = new ExternalDetailsDecl();
         edd.setAdapter(adapterName);
         edd.setProperties(properties);
-        edd.setNodegroupName(nodeGroupName != null? new Identifier(nodeGroupName): null);
-        edd.setCompactionPolicy(compactionPolicy);
-        edd.setCompactionPolicyProperties(compactionPolicyProperties);
         dsetDecl = new DatasetDecl(nameComponents.first,
                                    nameComponents.second,
                                    new Identifier(typeName),
+                                   nodeGroupName != null? new Identifier(nodeGroupName): null,
+                                   compactionPolicy,
+                                   compactionPolicyProperties,
                                    hints,
                                    DatasetType.EXTERNAL,
                                    edd,
@@ -453,18 +453,16 @@ DatasetDecl DatasetSpecification() throws ParseException:
     ( "using" "compaction" "policy" compactionPolicy = CompactionPolicy() (compactionPolicyProperties = Configuration())? )?
     ( "with filter on" filterField = NestedField() )?
       {
-        InternalDetailsDecl idd = new InternalDetailsDecl(nodeGroupName != null
-                                                            ? new Identifier(nodeGroupName)
-                                                            : null,
-                                                          primaryKeyFields,
+        InternalDetailsDecl idd = new InternalDetailsDecl(primaryKeyFields,
                                                           autogenerated,
-                                                          compactionPolicy,
-                                                          compactionPolicyProperties,
                                                           filterField,
                                                           temp);
         dsetDecl = new DatasetDecl(nameComponents.first,
                                    nameComponents.second,
                                    new Identifier(typeName),
+                                   nodeGroupName != null ? new Identifier(nodeGroupName) : null,
+                                   compactionPolicy,
+                                   compactionPolicyProperties,
                                    hints,
                                    DatasetType.INTERNAL,
                                    idd,

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-doc/src/site/markdown/aql/manual.md
----------------------------------------------------------------------
diff --git a/asterix-doc/src/site/markdown/aql/manual.md b/asterix-doc/src/site/markdown/aql/manual.md
index cabbbd3..3280fca 100644
--- a/asterix-doc/src/site/markdown/aql/manual.md
+++ b/asterix-doc/src/site/markdown/aql/manual.md
@@ -695,7 +695,7 @@ This index can be useful for accelerating exact-match queries, range search quer
 
 ##### Example
 
-    create index fbAuthorIdx on FacebookMessages(author-id) type btree enforced;
+    create index fbAuthorIdx on FacebookMessages(author-id) type btree;
 
 The following example creates an open btree index called fbSendTimeIdx on the open send-time field of the
 FacebookMessages dataset having datetime type.
@@ -703,7 +703,7 @@ This index can be useful for accelerating exact-match queries, range search quer
 
 ##### Example
 
-    create index fbSendTimeIdx on FacebookMessages(send-time:datetime) type btree;
+    create index fbSendTimeIdx on FacebookMessages(send-time:datetime) type btree enforced;
 
 The following example creates a btree index called twUserScrNameIdx on the screen-name field, which is a nested field
 of the user field in the TweetMessages dataset.

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HDFSIndexingAdapterFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HDFSIndexingAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HDFSIndexingAdapterFactory.java
index 37b8050..4e69279 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HDFSIndexingAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HDFSIndexingAdapterFactory.java
@@ -17,7 +17,6 @@ package edu.uci.ics.asterix.external.adapter.factory;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
-import java.util.List;
 import java.util.Map;
 
 import org.apache.hadoop.mapred.InputSplit;
@@ -33,6 +32,7 @@ import edu.uci.ics.asterix.om.types.AUnionType;
 import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.asterix.om.util.AsterixAppContextInfo;
 import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
+import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
 import edu.uci.ics.asterix.runtime.operators.file.AsterixTupleParserFactory;
 import edu.uci.ics.asterix.runtime.operators.file.DelimitedDataParser;
 import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
@@ -158,11 +158,10 @@ public class HDFSIndexingAdapterFactory extends HDFSAdapterFactory {
         for (int i = 0; i < n; i++) {
             ATypeTag tag = null;
             if (recordType.getFieldTypes()[i].getTypeTag() == ATypeTag.UNION) {
-                List<IAType> unionTypes = ((AUnionType) recordType.getFieldTypes()[i]).getUnionList();
-                if (unionTypes.size() != 2 && unionTypes.get(0).getTypeTag() != ATypeTag.NULL) {
+                if (!NonTaggedFormatUtil.isOptional(recordType.getFieldTypes()[i])) {
                     throw new NotImplementedException("Non-optional UNION type is not supported.");
                 }
-                tag = unionTypes.get(1).getTypeTag();
+                tag = ((AUnionType) recordType.getFieldTypes()[i]).getNullableType().getTypeTag();
             } else {
                 tag = recordType.getFieldTypes()[i].getTypeTag();
             }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/StreamBasedAdapterFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/StreamBasedAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/StreamBasedAdapterFactory.java
index 7d45448..87ea01e 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/StreamBasedAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/StreamBasedAdapterFactory.java
@@ -34,7 +34,6 @@ public abstract class StreamBasedAdapterFactory implements IAdapterFactory {
 
     protected Map<String, String> configuration;
     protected ITupleParserFactory parserFactory;
-   
 
     public abstract InputDataFormat getInputDataFormat();
 
@@ -43,5 +42,4 @@ public abstract class StreamBasedAdapterFactory implements IAdapterFactory {
 
     }
 
-  
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/indexing/dataflow/HiveObjectParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/indexing/dataflow/HiveObjectParser.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/indexing/dataflow/HiveObjectParser.java
index 3651bb9..94247e9 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/indexing/dataflow/HiveObjectParser.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/indexing/dataflow/HiveObjectParser.java
@@ -50,12 +50,13 @@ import edu.uci.ics.asterix.om.types.ATypeTag;
 import edu.uci.ics.asterix.om.types.AUnionType;
 import edu.uci.ics.asterix.om.types.AUnorderedListType;
 import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
 import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
 
 @SuppressWarnings("deprecation")
-public class HiveObjectParser implements IAsterixHDFSRecordParser{
+public class HiveObjectParser implements IAsterixHDFSRecordParser {
 
     private static final String KEY_HIVE_SERDE = "hive-serde";
     private ARecordType aRecord;
@@ -73,8 +74,9 @@ public class HiveObjectParser implements IAsterixHDFSRecordParser{
 
     @SuppressWarnings({ "unchecked" })
     @Override
-    public void initialize(ARecordType record, Map<String, String> arguments, Configuration hadoopConfig) throws Exception {
-        if(!initialized){
+    public void initialize(ARecordType record, Map<String, String> arguments, Configuration hadoopConfig)
+            throws Exception {
+        if (!initialized) {
             this.aRecord = record;
             int n = record.getFieldNames().length;
             fieldTypes = record.getFieldTypes();
@@ -84,7 +86,7 @@ public class HiveObjectParser implements IAsterixHDFSRecordParser{
             tbl.put(Constants.LIST_COLUMNS, getCommaDelimitedColNames(record));
             tbl.put(Constants.LIST_COLUMN_TYPES, getColTypes(record));
             String hiveSerdeClassName = (String) arguments.get(KEY_HIVE_SERDE);
-            if(hiveSerdeClassName == null){
+            if (hiveSerdeClassName == null) {
                 throw new IllegalArgumentException("no hive serde provided for hive deserialized records");
             }
             hiveSerde = (SerDe) Class.forName(hiveSerdeClassName).newInstance();
@@ -105,20 +107,19 @@ public class HiveObjectParser implements IAsterixHDFSRecordParser{
         }
     }
 
-    private Object getColTypes(ARecordType record) throws Exception{
+    private Object getColTypes(ARecordType record) throws Exception {
         int n = record.getFieldTypes().length;
-        if(n < 1){
+        if (n < 1) {
             throw new HyracksDataException("Failed to get columns of record");
         }
         ATypeTag tag = null;
 
         //First Column
         if (record.getFieldTypes()[0].getTypeTag() == ATypeTag.UNION) {
-            List<IAType> unionTypes = ((AUnionType) record.getFieldTypes()[0]).getUnionList();
-            if (unionTypes.size() != 2 && unionTypes.get(0).getTypeTag() != ATypeTag.NULL) {
+            if (NonTaggedFormatUtil.isOptional(record.getFieldTypes()[0])) {
                 throw new NotImplementedException("Non-optional UNION type is not supported.");
             }
-            tag = unionTypes.get(1).getTypeTag();
+            tag = ((AUnionType) record.getFieldTypes()[0]).getNullableType().getTypeTag();
         } else {
             tag = record.getFieldTypes()[0].getTypeTag();
         }
@@ -130,11 +131,10 @@ public class HiveObjectParser implements IAsterixHDFSRecordParser{
         for (int i = 1; i < n; i++) {
             tag = null;
             if (record.getFieldTypes()[i].getTypeTag() == ATypeTag.UNION) {
-                List<IAType> unionTypes = ((AUnionType) record.getFieldTypes()[i]).getUnionList();
-                if (unionTypes.size() != 2 && unionTypes.get(0).getTypeTag() != ATypeTag.NULL) {
+                if (NonTaggedFormatUtil.isOptional(record.getFieldTypes()[i])) {
                     throw new NotImplementedException("Non-optional UNION type is not supported.");
                 }
-                tag = unionTypes.get(1).getTypeTag();
+                tag = ((AUnionType) record.getFieldTypes()[i]).getNullableType().getTypeTag();
             } else {
                 tag = record.getFieldTypes()[i].getTypeTag();
             }
@@ -147,52 +147,53 @@ public class HiveObjectParser implements IAsterixHDFSRecordParser{
     }
 
     private String getCommaDelimitedColNames(ARecordType record) throws Exception {
-        if(record.getFieldNames().length < 1){
+        if (record.getFieldNames().length < 1) {
             throw new HyracksDataException("Can't deserialize hive records with no closed columns");
         }
 
         String cols = record.getFieldNames()[0];
-        for(int i=1; i<record.getFieldNames().length; i++){
+        for (int i = 1; i < record.getFieldNames().length; i++) {
             cols = cols + "," + record.getFieldNames()[i];
         }
         return cols;
     }
 
-    private String getHiveTypeString(ATypeTag tag) throws Exception{
-        switch(tag){
-        case BOOLEAN:
-            return Constants.BOOLEAN_TYPE_NAME;
-        case DATE:
-            return Constants.DATE_TYPE_NAME;
-        case DATETIME:
-            return Constants.DATETIME_TYPE_NAME;
-        case DOUBLE:
-            return Constants.DOUBLE_TYPE_NAME;
-        case FLOAT:
-            return Constants.FLOAT_TYPE_NAME;
-        case INT16:
-            return Constants.SMALLINT_TYPE_NAME;
-        case INT32:
-            return Constants.INT_TYPE_NAME;
-        case INT64:
-            return Constants.BIGINT_TYPE_NAME;
-        case INT8:
-            return Constants.TINYINT_TYPE_NAME;
-        case ORDEREDLIST:
-            return Constants.LIST_TYPE_NAME;
-        case STRING:
-            return Constants.STRING_TYPE_NAME;
-        case TIME:
-            return Constants.DATETIME_TYPE_NAME;
-        case UNORDEREDLIST:
-            return Constants.LIST_TYPE_NAME;
-        default:
-            throw new HyracksDataException("Can't get hive type for field of type " + tag);
+    private String getHiveTypeString(ATypeTag tag) throws Exception {
+        switch (tag) {
+            case BOOLEAN:
+                return Constants.BOOLEAN_TYPE_NAME;
+            case DATE:
+                return Constants.DATE_TYPE_NAME;
+            case DATETIME:
+                return Constants.DATETIME_TYPE_NAME;
+            case DOUBLE:
+                return Constants.DOUBLE_TYPE_NAME;
+            case FLOAT:
+                return Constants.FLOAT_TYPE_NAME;
+            case INT16:
+                return Constants.SMALLINT_TYPE_NAME;
+            case INT32:
+                return Constants.INT_TYPE_NAME;
+            case INT64:
+                return Constants.BIGINT_TYPE_NAME;
+            case INT8:
+                return Constants.TINYINT_TYPE_NAME;
+            case ORDEREDLIST:
+                return Constants.LIST_TYPE_NAME;
+            case STRING:
+                return Constants.STRING_TYPE_NAME;
+            case TIME:
+                return Constants.DATETIME_TYPE_NAME;
+            case UNORDEREDLIST:
+                return Constants.LIST_TYPE_NAME;
+            default:
+                throw new HyracksDataException("Can't get hive type for field of type " + tag);
         }
     }
+
     @Override
     public void parse(Object object, DataOutput output) throws Exception {
-        if(object == null){
+        if (object == null) {
             throw new HyracksDataException("Hive parser can't parse null objects");
         }
         Object hiveObject = hiveSerde.deserialize((Writable) object);
@@ -200,53 +201,58 @@ public class HiveObjectParser implements IAsterixHDFSRecordParser{
         List<Object> attributesValues = oi.getStructFieldsDataAsList(hiveObject);
         recBuilder.reset(aRecord);
         recBuilder.init();
-        for(int i=0;i<n;i++){
+        for (int i = 0; i < n; i++) {
             fieldValueBuffer.reset();
             fieldValueBuffer.getDataOutput().writeByte(fieldTypeTags[i]);
             ObjectInspector foi = fieldRefs.get(i).getFieldObjectInspector();
             //get field type
-            switch(fieldTypes[i].getTypeTag()){
-            case BOOLEAN:
-                parseBoolean(attributesValues.get(i), (BooleanObjectInspector)foi, fieldValueBuffer.getDataOutput());
-                break;
-            case TIME:
-                parseTime(attributesValues.get(i), (TimestampObjectInspector)foi, fieldValueBuffer.getDataOutput());
-                break;
-            case DATE:
-                parseDate(attributesValues.get(i), (TimestampObjectInspector)foi, fieldValueBuffer.getDataOutput());
-                break;
-            case DATETIME:
-                parseDateTime(attributesValues.get(i), (TimestampObjectInspector)foi, fieldValueBuffer.getDataOutput());
-                break;
-            case DOUBLE:
-                parseDouble(attributesValues.get(i), (DoubleObjectInspector)foi, fieldValueBuffer.getDataOutput());
-                break;
-            case FLOAT:
-                parseFloat(attributesValues.get(i), (FloatObjectInspector)foi, fieldValueBuffer.getDataOutput());
-                break;
-            case INT8:
-                parseInt8(attributesValues.get(i), (ByteObjectInspector)foi, fieldValueBuffer.getDataOutput());
-                break;
-            case INT16:
-                parseInt16(attributesValues.get(i), (ShortObjectInspector)foi, fieldValueBuffer.getDataOutput());
-                break;
-            case INT32:
-                parseInt32(attributesValues.get(i), (IntObjectInspector)foi, fieldValueBuffer.getDataOutput());
-                break;
-            case INT64:
-                parseInt64(attributesValues.get(i), (LongObjectInspector)foi, fieldValueBuffer.getDataOutput());
-                break;
-            case STRING:
-                parseString(attributesValues.get(i), (StringObjectInspector)foi, fieldValueBuffer.getDataOutput());
-                break;
-            case ORDEREDLIST:
-                parseOrderedList((AOrderedListType)fieldTypes[i], attributesValues.get(i), (ListObjectInspector)foi);
-                break;
-            case UNORDEREDLIST:
-                parseUnorderedList((AUnorderedListType)fieldTypes[i], attributesValues.get(i), (ListObjectInspector)foi);
-                break;
-            default:
-                throw new HyracksDataException("Can't get hive type for field of type " + fieldTypes[i].getTypeTag());
+            switch (fieldTypes[i].getTypeTag()) {
+                case BOOLEAN:
+                    parseBoolean(attributesValues.get(i), (BooleanObjectInspector) foi,
+                            fieldValueBuffer.getDataOutput());
+                    break;
+                case TIME:
+                    parseTime(attributesValues.get(i), (TimestampObjectInspector) foi, fieldValueBuffer.getDataOutput());
+                    break;
+                case DATE:
+                    parseDate(attributesValues.get(i), (TimestampObjectInspector) foi, fieldValueBuffer.getDataOutput());
+                    break;
+                case DATETIME:
+                    parseDateTime(attributesValues.get(i), (TimestampObjectInspector) foi,
+                            fieldValueBuffer.getDataOutput());
+                    break;
+                case DOUBLE:
+                    parseDouble(attributesValues.get(i), (DoubleObjectInspector) foi, fieldValueBuffer.getDataOutput());
+                    break;
+                case FLOAT:
+                    parseFloat(attributesValues.get(i), (FloatObjectInspector) foi, fieldValueBuffer.getDataOutput());
+                    break;
+                case INT8:
+                    parseInt8(attributesValues.get(i), (ByteObjectInspector) foi, fieldValueBuffer.getDataOutput());
+                    break;
+                case INT16:
+                    parseInt16(attributesValues.get(i), (ShortObjectInspector) foi, fieldValueBuffer.getDataOutput());
+                    break;
+                case INT32:
+                    parseInt32(attributesValues.get(i), (IntObjectInspector) foi, fieldValueBuffer.getDataOutput());
+                    break;
+                case INT64:
+                    parseInt64(attributesValues.get(i), (LongObjectInspector) foi, fieldValueBuffer.getDataOutput());
+                    break;
+                case STRING:
+                    parseString(attributesValues.get(i), (StringObjectInspector) foi, fieldValueBuffer.getDataOutput());
+                    break;
+                case ORDEREDLIST:
+                    parseOrderedList((AOrderedListType) fieldTypes[i], attributesValues.get(i),
+                            (ListObjectInspector) foi);
+                    break;
+                case UNORDEREDLIST:
+                    parseUnorderedList((AUnorderedListType) fieldTypes[i], attributesValues.get(i),
+                            (ListObjectInspector) foi);
+                    break;
+                default:
+                    throw new HyracksDataException("Can't get hive type for field of type "
+                            + fieldTypes[i].getTypeTag());
             }
             recBuilder.addField(i, fieldValueBuffer);
         }
@@ -258,7 +264,7 @@ public class HiveObjectParser implements IAsterixHDFSRecordParser{
     }
 
     private void parseInt32(Object obj, IntObjectInspector foi, DataOutput dataOutput) throws IOException {
-        if(obj == null){
+        if (obj == null) {
             throw new HyracksDataException("can't parse null field");
         }
         dataOutput.writeInt(foi.get(obj));
@@ -277,7 +283,7 @@ public class HiveObjectParser implements IAsterixHDFSRecordParser{
     }
 
     private void parseDateTime(Object obj, TimestampObjectInspector foi, DataOutput dataOutput) throws IOException {
-        dataOutput.writeLong(foi.getPrimitiveJavaObject(obj).getTime()) ;
+        dataOutput.writeLong(foi.getPrimitiveJavaObject(obj).getTime());
     }
 
     private void parseDate(Object obj, TimestampObjectInspector foi, DataOutput dataOutput) throws IOException {
@@ -301,12 +307,12 @@ public class HiveObjectParser implements IAsterixHDFSRecordParser{
         dataOutput.writeUTF(foi.getPrimitiveJavaObject(obj));
     }
 
-    private void parseTime(Object obj,
-            TimestampObjectInspector foi, DataOutput dataOutput) throws IOException {
-        dataOutput.writeInt((int)(foi.getPrimitiveJavaObject(obj).getTime() % 86400000));   
+    private void parseTime(Object obj, TimestampObjectInspector foi, DataOutput dataOutput) throws IOException {
+        dataOutput.writeInt((int) (foi.getPrimitiveJavaObject(obj).getTime() % 86400000));
     }
 
-    private void parseOrderedList(AOrderedListType aOrderedListType, Object obj, ListObjectInspector foi) throws IOException {
+    private void parseOrderedList(AOrderedListType aOrderedListType, Object obj, ListObjectInspector foi)
+            throws IOException {
         OrderedListBuilder orderedListBuilder = getOrderedListBuilder();
         IAType itemType = null;
         if (aOrderedListType != null)
@@ -314,10 +320,10 @@ public class HiveObjectParser implements IAsterixHDFSRecordParser{
         orderedListBuilder.reset(aOrderedListType);
 
         int n = foi.getListLength(obj);
-        for(int i=0; i<n;i++){
+        for (int i = 0; i < n; i++) {
             Object element = foi.getListElement(obj, i);
             ObjectInspector eoi = foi.getListElementObjectInspector();
-            if(element == null){
+            if (element == null) {
                 throw new HyracksDataException("can't parse hive list with null values");
             }
 
@@ -327,7 +333,8 @@ public class HiveObjectParser implements IAsterixHDFSRecordParser{
         orderedListBuilder.write(fieldValueBuffer.getDataOutput(), true);
     }
 
-    private void parseUnorderedList(AUnorderedListType uoltype, Object obj, ListObjectInspector oi) throws IOException, AsterixException {
+    private void parseUnorderedList(AUnorderedListType uoltype, Object obj, ListObjectInspector oi) throws IOException,
+            AsterixException {
         UnorderedListBuilder unorderedListBuilder = getUnorderedListBuilder();
         IAType itemType = null;
         if (uoltype != null)
@@ -336,10 +343,10 @@ public class HiveObjectParser implements IAsterixHDFSRecordParser{
         unorderedListBuilder.reset(uoltype);
 
         int n = oi.getListLength(obj);
-        for(int i=0; i<n;i++){
+        for (int i = 0; i < n; i++) {
             Object element = oi.getListElement(obj, i);
             ObjectInspector eoi = oi.getListElementObjectInspector();
-            if(element == null){
+            if (element == null) {
                 throw new HyracksDataException("can't parse hive list with null values");
             }
             listItemBuffer.reset();
@@ -350,52 +357,52 @@ public class HiveObjectParser implements IAsterixHDFSRecordParser{
         unorderedListBuilder.write(fieldValueBuffer.getDataOutput(), true);
     }
 
-    private void parseHiveListItem(Object obj, ObjectInspector eoi,
-            ArrayBackedValueStorage fieldValueBuffer, IAType itemType) throws IOException {
+    private void parseHiveListItem(Object obj, ObjectInspector eoi, ArrayBackedValueStorage fieldValueBuffer,
+            IAType itemType) throws IOException {
         //get field type
-        switch(itemType.getTypeTag()){
-        case BOOLEAN:
-            parseBoolean(obj, (BooleanObjectInspector)eoi, fieldValueBuffer.getDataOutput());
-            break;
-        case TIME:
-            parseTime(obj, (TimestampObjectInspector)eoi, fieldValueBuffer.getDataOutput());
-            break;
-        case DATE:
-            parseDate(obj, (TimestampObjectInspector)eoi, fieldValueBuffer.getDataOutput());
-            break;
-        case DATETIME:
-            parseDateTime(obj, (TimestampObjectInspector)eoi, fieldValueBuffer.getDataOutput());
-            break;
-        case DOUBLE:
-            parseDouble(obj, (DoubleObjectInspector)eoi, fieldValueBuffer.getDataOutput());
-            break;
-        case FLOAT:
-            parseFloat(obj, (FloatObjectInspector)eoi, fieldValueBuffer.getDataOutput());
-            break;
-        case INT8:
-            parseInt8(obj, (ByteObjectInspector)eoi, fieldValueBuffer.getDataOutput());
-            break;
-        case INT16:
-            parseInt16(obj, (ShortObjectInspector)eoi, fieldValueBuffer.getDataOutput());
-            break;
-        case INT32:
-            parseInt32(obj, (IntObjectInspector)eoi, fieldValueBuffer.getDataOutput());
-            break;
-        case INT64:
-            parseInt64(obj, (LongObjectInspector)eoi, fieldValueBuffer.getDataOutput());
-            break;
-        case STRING:
-            parseString(obj, (StringObjectInspector)eoi, fieldValueBuffer.getDataOutput());
-            break;
-        default:
-            throw new HyracksDataException("doesn't support hive data with list of non-primitive types");
+        switch (itemType.getTypeTag()) {
+            case BOOLEAN:
+                parseBoolean(obj, (BooleanObjectInspector) eoi, fieldValueBuffer.getDataOutput());
+                break;
+            case TIME:
+                parseTime(obj, (TimestampObjectInspector) eoi, fieldValueBuffer.getDataOutput());
+                break;
+            case DATE:
+                parseDate(obj, (TimestampObjectInspector) eoi, fieldValueBuffer.getDataOutput());
+                break;
+            case DATETIME:
+                parseDateTime(obj, (TimestampObjectInspector) eoi, fieldValueBuffer.getDataOutput());
+                break;
+            case DOUBLE:
+                parseDouble(obj, (DoubleObjectInspector) eoi, fieldValueBuffer.getDataOutput());
+                break;
+            case FLOAT:
+                parseFloat(obj, (FloatObjectInspector) eoi, fieldValueBuffer.getDataOutput());
+                break;
+            case INT8:
+                parseInt8(obj, (ByteObjectInspector) eoi, fieldValueBuffer.getDataOutput());
+                break;
+            case INT16:
+                parseInt16(obj, (ShortObjectInspector) eoi, fieldValueBuffer.getDataOutput());
+                break;
+            case INT32:
+                parseInt32(obj, (IntObjectInspector) eoi, fieldValueBuffer.getDataOutput());
+                break;
+            case INT64:
+                parseInt64(obj, (LongObjectInspector) eoi, fieldValueBuffer.getDataOutput());
+                break;
+            case STRING:
+                parseString(obj, (StringObjectInspector) eoi, fieldValueBuffer.getDataOutput());
+                break;
+            default:
+                throw new HyracksDataException("doesn't support hive data with list of non-primitive types");
         }
     }
 
     private OrderedListBuilder getOrderedListBuilder() {
         if (orderedListBuilder != null)
             return orderedListBuilder;
-        else{
+        else {
             orderedListBuilder = new OrderedListBuilder();
             return orderedListBuilder;
         }
@@ -404,7 +411,7 @@ public class HiveObjectParser implements IAsterixHDFSRecordParser{
     private UnorderedListBuilder getUnorderedListBuilder() {
         if (unorderedListBuilder != null)
             return unorderedListBuilder;
-        else{
+        else {
             unorderedListBuilder = new UnorderedListBuilder();
             return unorderedListBuilder;
         }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/java/JObjectUtil.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/java/JObjectUtil.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/java/JObjectUtil.java
index 95a9efa..ffca014 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/java/JObjectUtil.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/java/JObjectUtil.java
@@ -366,9 +366,8 @@ public class JObjectUtil {
 
                         IAType fieldType = fieldTypes[fieldNumber];
                         if (fieldTypes[fieldNumber].getTypeTag() == ATypeTag.UNION) {
-                            if (NonTaggedFormatUtil.isOptionalField((AUnionType) fieldTypes[fieldNumber])) {
-                                fieldType = ((AUnionType) fieldTypes[fieldNumber]).getUnionList().get(
-                                        AUnionType.OPTIONAL_TYPE_INDEX_IN_UNION_LIST);
+                            if (((AUnionType) fieldTypes[fieldNumber]).isNullableType()) {
+                                fieldType = ((AUnionType) fieldTypes[fieldNumber]).getNullableType();
                                 fieldValueTypeTag = fieldType.getTypeTag();
                                 //                      fieldValueLength = NonTaggedFormatUtil.getFieldValueLength(recordBits,
                                 //                              fieldOffsets[fieldNumber], typeTag, false);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/IDatasetDetails.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/IDatasetDetails.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/IDatasetDetails.java
index 13a4ed6..2a36244 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/IDatasetDetails.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/IDatasetDetails.java
@@ -16,7 +16,6 @@ package edu.uci.ics.asterix.metadata;
 
 import java.io.DataOutput;
 import java.io.Serializable;
-import java.util.Map;
 
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
@@ -25,14 +24,8 @@ public interface IDatasetDetails extends Serializable {
 
     public DatasetType getDatasetType();
 
-    public String getNodeGroupName();
-
     public void writeDatasetDetailsRecordType(DataOutput out) throws HyracksDataException;
 
-    public String getCompactionPolicy();
-
-    public Map<String, String> getCompactionPolicyProperties();
-
     /**
      * @return if the dataset is a temporary dataset.
      *         Here is a summary of temporary datasets:

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
index b61f410..df0c5b8 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
@@ -43,7 +43,6 @@ import edu.uci.ics.asterix.metadata.entities.Dataset;
 import edu.uci.ics.asterix.metadata.entities.DatasourceAdapter;
 import edu.uci.ics.asterix.metadata.entities.Datatype;
 import edu.uci.ics.asterix.metadata.entities.Dataverse;
-import edu.uci.ics.asterix.metadata.entities.ExternalDatasetDetails;
 import edu.uci.ics.asterix.metadata.entities.ExternalFile;
 import edu.uci.ics.asterix.metadata.entities.Feed;
 import edu.uci.ics.asterix.metadata.entities.FeedPolicy;
@@ -178,6 +177,10 @@ public class MetadataNode implements IMetadataNode {
             DatasetTupleTranslator tupleReaderWriter = new DatasetTupleTranslator(true);
             ITupleReference datasetTuple = tupleReaderWriter.getTupleFromMetadataEntity(dataset);
             insertTupleIntoIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, datasetTuple);
+            // Add an entry for the node group
+            ITupleReference nodeGroupTuple = createTuple(dataset.getNodeGroupName(), dataset.getDataverseName(),
+                    dataset.getDatasetName());
+            insertTupleIntoIndex(jobId, MetadataSecondaryIndexes.GROUPNAME_ON_DATASET_INDEX, nodeGroupTuple);
             if (dataset.getDatasetType() == DatasetType.INTERNAL) {
                 // Add the primary index for the dataset.
                 InternalDatasetDetails id = (InternalDatasetDetails) dataset.getDatasetDetails();
@@ -186,16 +189,6 @@ public class MetadataNode implements IMetadataNode {
                         true, dataset.getPendingOp());
 
                 addIndex(jobId, primaryIndex);
-                // Add an entry for the node group
-                ITupleReference nodeGroupTuple = createTuple(id.getNodeGroupName(), dataset.getDataverseName(),
-                        dataset.getDatasetName());
-                insertTupleIntoIndex(jobId, MetadataSecondaryIndexes.GROUPNAME_ON_DATASET_INDEX, nodeGroupTuple);
-            } else if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
-                //added for external data
-                ExternalDatasetDetails id = (ExternalDatasetDetails) dataset.getDatasetDetails();
-                ITupleReference nodeGroupTuple = createTuple(id.getNodeGroupName(), dataset.getDataverseName(),
-                        dataset.getDatasetName());
-                insertTupleIntoIndex(jobId, MetadataSecondaryIndexes.GROUPNAME_ON_DATASET_INDEX, nodeGroupTuple);
             }
             // Add entry in datatype secondary index.
             ITupleReference dataTypeTuple = createTuple(dataset.getDataverseName(), dataset.getItemTypeName(),
@@ -426,61 +419,61 @@ public class MetadataNode implements IMetadataNode {
             ITupleReference searchKey = createTuple(dataverseName, datasetName);
             // Searches the index for the tuple to be deleted. Acquires an S
             // lock on the 'dataset' dataset.
+            ITupleReference datasetTuple = null;
             try {
-                ITupleReference datasetTuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATASET_DATASET,
-                        searchKey);
-                deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, datasetTuple);
-            } catch (TreeIndexException tie) {
-                // ignore this exception and continue deleting all relevant
-                // artifacts.
-            }
+                datasetTuple = getTupleToBeDeleted(jobId, MetadataPrimaryIndexes.DATASET_DATASET, searchKey);
 
-            // Delete entry from secondary index 'group'.
-            ITupleReference groupNameSearchKey = createTuple(dataset.getDatasetDetails().getNodeGroupName(),
-                    dataverseName, datasetName);
-            // Searches the index for the tuple to be deleted. Acquires an S
-            // lock on the GROUPNAME_ON_DATASET_INDEX index.
-            try {
-                ITupleReference groupNameTuple = getTupleToBeDeleted(jobId,
-                        MetadataSecondaryIndexes.GROUPNAME_ON_DATASET_INDEX, groupNameSearchKey);
-                deleteTupleFromIndex(jobId, MetadataSecondaryIndexes.GROUPNAME_ON_DATASET_INDEX, groupNameTuple);
-            } catch (TreeIndexException tie) {
-                // ignore this exception and continue deleting all relevant
-                // artifacts.
-            }
+                // Delete entry from secondary index 'group'.
+                ITupleReference groupNameSearchKey = createTuple(dataset.getNodeGroupName(), dataverseName, datasetName);
+                // Searches the index for the tuple to be deleted. Acquires an S
+                // lock on the GROUPNAME_ON_DATASET_INDEX index.
+                try {
+                    ITupleReference groupNameTuple = getTupleToBeDeleted(jobId,
+                            MetadataSecondaryIndexes.GROUPNAME_ON_DATASET_INDEX, groupNameSearchKey);
+                    deleteTupleFromIndex(jobId, MetadataSecondaryIndexes.GROUPNAME_ON_DATASET_INDEX, groupNameTuple);
+                } catch (TreeIndexException tie) {
+                    // ignore this exception and continue deleting all relevant
+                    // artifacts.
+                }
 
-            // Delete entry from secondary index 'type'.
-            ITupleReference dataTypeSearchKey = createTuple(dataverseName, dataset.getItemTypeName(), datasetName);
-            // Searches the index for the tuple to be deleted. Acquires an S
-            // lock on the DATATYPENAME_ON_DATASET_INDEX index.
-            try {
-                ITupleReference dataTypeTuple = getTupleToBeDeleted(jobId,
-                        MetadataSecondaryIndexes.DATATYPENAME_ON_DATASET_INDEX, dataTypeSearchKey);
-                deleteTupleFromIndex(jobId, MetadataSecondaryIndexes.DATATYPENAME_ON_DATASET_INDEX, dataTypeTuple);
-            } catch (TreeIndexException tie) {
-                // ignore this exception and continue deleting all relevant
-                // artifacts.
-            }
+                // Delete entry from secondary index 'type'.
+                ITupleReference dataTypeSearchKey = createTuple(dataverseName, dataset.getItemTypeName(), datasetName);
+                // Searches the index for the tuple to be deleted. Acquires an S
+                // lock on the DATATYPENAME_ON_DATASET_INDEX index.
+                try {
+                    ITupleReference dataTypeTuple = getTupleToBeDeleted(jobId,
+                            MetadataSecondaryIndexes.DATATYPENAME_ON_DATASET_INDEX, dataTypeSearchKey);
+                    deleteTupleFromIndex(jobId, MetadataSecondaryIndexes.DATATYPENAME_ON_DATASET_INDEX, dataTypeTuple);
+                } catch (TreeIndexException tie) {
+                    // ignore this exception and continue deleting all relevant
+                    // artifacts.
+                }
 
-            // Delete entry(s) from the 'indexes' dataset.
-            List<Index> datasetIndexes = getDatasetIndexes(jobId, dataverseName, datasetName);
-            if (datasetIndexes != null) {
-                for (Index index : datasetIndexes) {
-                    dropIndex(jobId, dataverseName, datasetName, index.getIndexName());
+                // Delete entry(s) from the 'indexes' dataset.
+                List<Index> datasetIndexes = getDatasetIndexes(jobId, dataverseName, datasetName);
+                if (datasetIndexes != null) {
+                    for (Index index : datasetIndexes) {
+                        dropIndex(jobId, dataverseName, datasetName, index.getIndexName());
+                    }
                 }
-            }
 
-            if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
-                // Delete External Files
-                // As a side effect, acquires an S lock on the 'ExternalFile' dataset
-                // on behalf of txnId.
-                List<ExternalFile> datasetFiles = getExternalFiles(jobId, dataset);
-                if (datasetFiles != null && datasetFiles.size() > 0) {
-                    // Drop all external files in this dataset.
-                    for (ExternalFile file : datasetFiles) {
-                        dropExternalFile(jobId, dataverseName, file.getDatasetName(), file.getFileNumber());
+                if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
+                    // Delete External Files
+                    // As a side effect, acquires an S lock on the 'ExternalFile' dataset
+                    // on behalf of txnId.
+                    List<ExternalFile> datasetFiles = getExternalFiles(jobId, dataset);
+                    if (datasetFiles != null && datasetFiles.size() > 0) {
+                        // Drop all external files in this dataset.
+                        for (ExternalFile file : datasetFiles) {
+                            dropExternalFile(jobId, dataverseName, file.getDatasetName(), file.getFileNumber());
+                        }
                     }
                 }
+            } catch (TreeIndexException tie) {
+                // ignore this exception and continue deleting all relevant
+                // artifacts.
+            } finally {
+                deleteTupleFromIndex(jobId, MetadataPrimaryIndexes.DATASET_DATASET, datasetTuple);
             }
 
         } catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataTransactionContext.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataTransactionContext.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataTransactionContext.java
index 820ac2b..d3a1fb9 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataTransactionContext.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataTransactionContext.java
@@ -118,7 +118,7 @@ public class MetadataTransactionContext extends MetadataCache {
     }
 
     public void dropDataset(String dataverseName, String datasetName) {
-        Dataset dataset = new Dataset(dataverseName, datasetName, null, null, null, null, -1,
+        Dataset dataset = new Dataset(dataverseName, datasetName, null, null, null, null, null, null, null, -1,
                 IMetadataEntity.PENDING_NO_OP);
         droppedCache.addDatasetIfNotExists(dataset);
         logAndApply(new MetadataLogicalOperation(dataset, false));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/a5895308/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
index 62688de..6f69d82 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
@@ -123,9 +123,8 @@ public class MetadataBootstrap {
                 MetadataPrimaryIndexes.INDEX_DATASET, MetadataPrimaryIndexes.NODE_DATASET,
                 MetadataPrimaryIndexes.NODEGROUP_DATASET, MetadataPrimaryIndexes.FUNCTION_DATASET,
                 MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET, MetadataPrimaryIndexes.FEED_DATASET,
-                MetadataPrimaryIndexes.FEED_POLICY_DATASET,
-                MetadataPrimaryIndexes.LIBRARY_DATASET, MetadataPrimaryIndexes.COMPACTION_POLICY_DATASET,
-                MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET };
+                MetadataPrimaryIndexes.FEED_POLICY_DATASET, MetadataPrimaryIndexes.LIBRARY_DATASET,
+                MetadataPrimaryIndexes.COMPACTION_POLICY_DATASET, MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET };
 
         secondaryIndexes = new IMetadataIndex[] { MetadataSecondaryIndexes.GROUPNAME_ON_DATASET_INDEX,
                 MetadataSecondaryIndexes.DATATYPENAME_ON_DATASET_INDEX,
@@ -237,13 +236,12 @@ public class MetadataBootstrap {
         for (int i = 0; i < primaryIndexes.length; i++) {
             IDatasetDetails id = new InternalDatasetDetails(FileStructure.BTREE, PartitioningStrategy.HASH,
                     primaryIndexes[i].getPartitioningExpr(), primaryIndexes[i].getPartitioningExpr(),
-                    primaryIndexes[i].getPartitioningExprType(), primaryIndexes[i].getNodeGroupName(), false,
-                    GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME, GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES,
-                    null, false);
+                    primaryIndexes[i].getPartitioningExprType(), false, null, false);
             MetadataManager.INSTANCE.addDataset(mdTxnCtx, new Dataset(primaryIndexes[i].getDataverseName(),
                     primaryIndexes[i].getIndexedDatasetName(), primaryIndexes[i].getPayloadRecordType().getTypeName(),
-                    id, new HashMap<String, String>(), DatasetType.INTERNAL, primaryIndexes[i].getDatasetId().getId(),
-                    IMetadataEntity.PENDING_NO_OP));
+                    primaryIndexes[i].getNodeGroupName(), GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME,
+                    GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES, id, new HashMap<String, String>(),
+                    DatasetType.INTERNAL, primaryIndexes[i].getDatasetId().getId(), IMetadataEntity.PENDING_NO_OP));
         }
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("Finished inserting initial datasets.");