You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@asterixdb.apache.org by sj...@apache.org on 2016/01/13 06:29:01 UTC

[6/6] incubator-asterixdb git commit: Enabled Datasets to use Datatypes from foreign Dataverses

Enabled Datasets to use Datatypes from foreign Dataverses

Removed broken Metadata Secondary Indexes
Added DatatypeDataverse field to Dataset
Cleaned up Metadata Tests
Added tests for dropping used Datatypes
User-facing changes are:
1) Allow qualified datatype when creating dataset
2) Metadata queries for datasets will now
include the type dataverse

Change-Id: I24dbc04dcb2a4126fc8361ebe3104877a0d1f2bb
Reviewed-on: https://asterix-gerrit.ics.uci.edu/558
Tested-by: Jenkins <je...@fulliautomatix.ics.uci.edu>
Reviewed-by: abdullah alamoudi <ba...@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/commit/dace5f2f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/tree/dace5f2f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/diff/dace5f2f

Branch: refs/heads/master
Commit: dace5f2f6e0100c1506f3c360da4c8b2b1b95b98
Parents: 07c9962
Author: Your Name <sj...@ucr.edu>
Authored: Tue Jan 12 14:02:48 2016 -0800
Committer: Steven Jacobs <sj...@ucr.edu>
Committed: Tue Jan 12 21:24:15 2016 -0800

----------------------------------------------------------------------
 .../physical/InvertedIndexPOperator.java        |  30 +-
 ...IntroduceSecondaryIndexInsertDeleteRule.java |  19 +-
 .../optimizer/rules/PushFieldAccessRule.java    |   4 +-
 .../am/AbstractIntroduceAccessMethodRule.java   |  10 -
 .../am/IntroduceLSMComponentFilterRule.java     |   4 +-
 .../rules/am/OptimizableOperatorSubTree.java    |   5 +-
 .../AqlPlusExpressionToPlanTranslator.java      |   2 +-
 .../LangExpressionToPlanTranslator.java         |   4 +-
 .../api/http/servlet/ConnectorAPIServlet.java   |  14 +-
 .../asterix/aql/translator/QueryTranslator.java |  71 +++--
 .../apache/asterix/file/DatasetOperations.java  |  55 ++--
 .../file/ExternalIndexingOperations.java        |  20 +-
 .../http/servlet/ConnectorAPIServletTest.java   |  26 +-
 .../queries/basic/meta16/meta16.1.ddl.aql       |  18 ++
 .../queries/basic/meta16/meta16.3.query.aql     |  12 +-
 .../queries/basic/meta17/meta17.1.ddl.aql       |  17 ++
 .../queries/basic/meta17/meta17.3.query.aql     |  13 +-
 .../queries/basic/meta18/meta18.1.ddl.aql       |  18 --
 .../queries/basic/meta18/meta18.2.update.aql    |  18 --
 .../queries/basic/meta18/meta18.3.query.aql     |  22 --
 .../queries/basic/meta19/meta19.1.ddl.aql       |  18 --
 .../queries/basic/meta19/meta19.2.update.aql    |  18 --
 .../queries/basic/meta19/meta19.3.query.aql     |  23 --
 .../queries/basic/meta20/meta20.1.ddl.aql       |  18 --
 .../queries/basic/meta20/meta20.2.update.aql    |  18 --
 .../queries/basic/meta20/meta20.3.query.aql     |  22 --
 .../queries/basic/meta21/meta21.1.ddl.aql       |  18 --
 .../queries/basic/meta21/meta21.2.update.aql    |  18 --
 .../queries/basic/meta21/meta21.3.query.aql     |  22 --
 .../queries/basic/meta22/meta22.1.ddl.aql       |  36 ---
 .../queries/basic/meta22/meta22.2.update.aql    |  18 --
 .../queries/basic/meta22/meta22.3.query.aql     |  28 --
 .../queries/basic/meta23/meta23.1.ddl.aql       |  35 ---
 .../queries/basic/meta23/meta23.2.update.aql    |  18 --
 .../queries/basic/meta23/meta23.3.query.aql     |  28 --
 .../results/basic/issue_251_dataset_hint_2.adm  |   1 -
 .../issue_251_dataset_hint_2.1.adm              |   2 +-
 .../results/basic/issue_251_dataset_hint_3.adm  |   1 -
 .../issue_251_dataset_hint_3.1.adm              |   2 +-
 .../results/basic/issue_251_dataset_hint_4.adm  |   1 -
 .../issue_251_dataset_hint_4.1.adm              |   2 +-
 .../resources/metadata/results/basic/meta01.adm |   2 -
 .../resources/metadata/results/basic/meta02.adm |   1 -
 .../metadata/results/basic/meta02/meta02.1.adm  |   2 +-
 .../resources/metadata/results/basic/meta03.adm |   1 -
 .../resources/metadata/results/basic/meta04.adm |   1 -
 .../resources/metadata/results/basic/meta05.adm |   2 -
 .../resources/metadata/results/basic/meta06.adm |   1 -
 .../resources/metadata/results/basic/meta07.adm |   2 -
 .../resources/metadata/results/basic/meta08.adm |   2 -
 .../resources/metadata/results/basic/meta09.adm |   1 -
 .../metadata/results/basic/meta09/meta09.1.adm  |   2 +-
 .../resources/metadata/results/basic/meta10.adm |   1 -
 .../resources/metadata/results/basic/meta11.adm |   1 -
 .../resources/metadata/results/basic/meta12.adm |   1 -
 .../resources/metadata/results/basic/meta13.adm |   1 -
 .../resources/metadata/results/basic/meta14.adm |   1 -
 .../resources/metadata/results/basic/meta15.adm |  11 -
 .../resources/metadata/results/basic/meta16.adm |  12 -
 .../metadata/results/basic/meta16/meta16.1.adm  |  15 +-
 .../resources/metadata/results/basic/meta17.adm |  68 -----
 .../metadata/results/basic/meta17/meta17.1.adm  |  63 +---
 .../resources/metadata/results/basic/meta18.adm |   1 -
 .../metadata/results/basic/meta18/meta18.1.adm  |   3 -
 .../resources/metadata/results/basic/meta19.adm |  17 --
 .../metadata/results/basic/meta19/meta19.1.adm  |  16 -
 .../resources/metadata/results/basic/meta20.adm |   2 -
 .../metadata/results/basic/meta20/meta20.1.adm  |   2 -
 .../resources/metadata/results/basic/meta21.adm |   2 -
 .../metadata/results/basic/meta21/meta21.1.adm  |   2 -
 .../metadata/results/basic/meta22/meta22.1.adm  |   2 -
 .../metadata/results/basic/meta23/meta23.1.adm  |   2 -
 .../metadata/results/basic/metadata_dataset.adm |  12 -
 .../metadata_dataset/metadata_dataset.1.adm     |  26 +-
 .../results/basic/metadata_datatype.adm         |  67 -----
 .../metadata_datatype/metadata_datatype.1.adm   | 122 ++++----
 .../results/basic/metadata_dataverse.adm        |   1 -
 .../metadata/results/basic/metadata_index.adm   |  15 -
 .../basic/metadata_index/metadata_index.1.adm   |  29 +-
 .../metadata/results/basic/metadata_node.adm    |   2 -
 .../results/basic/metadata_nodegroup.adm        |   2 -
 .../src/test/resources/metadata/testsuite.xml   |  30 --
 .../parserts/queries_sqlpp/del-dataset.sqlpp    |  13 +-
 .../queries_sqlpp/load-del-dataset.sqlpp        |  13 +-
 .../results_parser_sqlpp/del-dataset.ast        |  14 +-
 .../results_parser_sqlpp/load-del-dataset.ast   |  14 +-
 .../cross-dv01/cross-dv01.1.ddl.aql             |   5 +-
 .../cross-dv01/cross-dv01.2.update.aql          |   4 -
 .../cross-dv03/cross-dv03.1.ddl.aql             |   2 -
 .../cross-dv04/cross-dv04.1.ddl.aql             |   3 -
 .../drop-dataverse/drop-dataverse.1.ddl.aql     |  39 +++
 .../drop-dataverse/drop-dataverse.2.update.aql  |  25 ++
 .../drop-type-used-elsewhere.1.ddl.aql          |  41 +++
 .../drop-type-used-elsewhere.2.update.aql       |  25 ++
 .../drop-type-used-here-dataset.1.ddl.aql       |  40 +++
 .../drop-type-used-here-dataset.2.update.aql    |  25 ++
 .../drop-type-used-here-type.1.ddl.aql          |  42 +++
 .../drop-type-used-here-type.2.update.aql       |  25 ++
 .../query-dataset-with-foreign-type.1.ddl.aql   |  41 +++
 ...query-dataset-with-foreign-type.2.update.aql |  29 ++
 .../query-dataset-with-foreign-type.3.query.aql |  29 ++
 .../cross-dv01/cross-dv01.1.ddl.sqlpp           |   7 +-
 .../cross-dv01/cross-dv01.2.update.sqlpp        |   4 -
 .../cross-dv03/cross-dv03.1.ddl.sqlpp           |   5 -
 .../cross-dv04/cross-dv04.1.ddl.sqlpp           |   5 -
 .../cross-dataverse/cross-dv02/cross-dv02.1.adm |   8 +-
 .../cross-dataverse/cross-dv04/cross-dv04.1.adm |   8 +-
 .../cross-dataverse/cross-dv19/cross-dv19.1.adm |  14 +-
 .../drop-dataverse/drop-dataverse.1.adm         |   0
 .../drop-type-used-elsewhere.1.adm              |   0
 .../drop-type-used-here-dataset.1.adm           |   0
 .../drop-type-used-here-type.1.adm              |   0
 .../query-dataset-with-foreign-type.1.adm       |   1 +
 .../drop-empty-secondary-indexes.1.adm          |   3 -
 .../user-defined-functions/udf23/udf23.1.adm    |  12 +-
 .../cross-dataverse/cross-dv01/cross-dv01.1.ast |   1 -
 .../cross-dataverse/cross-dv01/cross-dv01.2.ast |   2 -
 .../cross-dataverse/cross-dv03/cross-dv03.1.ast |   2 -
 .../cross-dataverse/cross-dv04/cross-dv04.1.ast |   2 -
 .../src/test/resources/runtimets/testsuite.xml  |  43 ++-
 asterix-doc/src/site/markdown/aql/manual.md     |   4 +-
 asterix-lang-aql/src/main/javacc/AQL.jj         |  12 +-
 .../lang/common/statement/DatasetDecl.java      |  24 +-
 .../lang/common/visitor/FormatPrintVisitor.java |   7 +-
 .../lang/common/visitor/QueryPrintVisitor.java  |   3 +-
 asterix-lang-sqlpp/src/main/javacc/SQLPP.jj     |  12 +-
 .../apache/asterix/metadata/MetadataNode.java   | 289 ++++++++-----------
 .../metadata/MetadataTransactionContext.java    |   2 +-
 .../metadata/bootstrap/MetadataBootstrap.java   |  42 +--
 .../MetadataIndexImmutableProperties.java       |   5 +-
 .../metadata/bootstrap/MetadataRecordTypes.java |  35 +--
 .../bootstrap/MetadataSecondaryIndexes.java     |  69 -----
 .../metadata/declared/AqlMetadataProvider.java  |  25 +-
 .../asterix/metadata/entities/Dataset.java      |  14 +-
 .../DatasetTupleTranslator.java                 |  77 ++---
 .../IndexTupleTranslator.java                   |  27 +-
 .../functions/MetadataBuiltinFunctions.java     |   4 +-
 .../metadata/utils/MetadataLockManager.java     |  28 +-
 .../DatasetNameValueExtractor.java              |  53 ----
 .../DatatypeNameValueExtractor.java             |  68 -----
 140 files changed, 1031 insertions(+), 1590 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
index 661fcb2..0e5850b 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
@@ -106,7 +106,7 @@ public class InvertedIndexPOperator extends IndexSearchPOperator {
     @Override
     public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
             IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
-            throws AlgebricksException {
+                    throws AlgebricksException {
         UnnestMapOperator unnestMapOp = (UnnestMapOperator) op;
         ILogicalExpression unnestExpr = unnestMapOp.getExpressionRef().getValue();
         if (unnestExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
@@ -155,22 +155,20 @@ public class InvertedIndexPOperator extends IndexSearchPOperator {
         try {
             IAObject simThresh = ((AsterixConstantValue) similarityThreshold).getObject();
             IAType itemType = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(),
-                    dataset.getDataverseName(), dataset.getItemTypeName()).getDatatype();
+                    dataset.getItemTypeDataverseName(), dataset.getItemTypeName()).getDatatype();
             int numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
             Index secondaryIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(),
                     dataset.getDataverseName(), dataset.getDatasetName(), indexName);
             if (secondaryIndex == null) {
-                throw new AlgebricksException("Code generation error: no index " + indexName + " for dataset "
-                        + datasetName);
+                throw new AlgebricksException(
+                        "Code generation error: no index " + indexName + " for dataset " + datasetName);
             }
             List<List<String>> secondaryKeyFieldEntries = secondaryIndex.getKeyFieldNames();
             List<IAType> secondaryKeyTypeEntries = secondaryIndex.getKeyFieldTypes();
             int numSecondaryKeys = secondaryKeyFieldEntries.size();
             if (numSecondaryKeys != 1) {
-                throw new AlgebricksException(
-                        "Cannot use "
-                                + numSecondaryKeys
-                                + " fields as a key for an inverted index. There can be only one field as a key for the inverted index index.");
+                throw new AlgebricksException("Cannot use " + numSecondaryKeys
+                        + " fields as a key for an inverted index. There can be only one field as a key for the inverted index index.");
             }
             if (itemType.getTypeTag() != ATypeTag.RECORD) {
                 throw new AlgebricksException("Only record types can be indexed.");
@@ -180,8 +178,8 @@ public class InvertedIndexPOperator extends IndexSearchPOperator {
                     secondaryKeyFieldEntries.get(0), recordType);
             IAType secondaryKeyType = keyPairType.first;
             if (secondaryKeyType == null) {
-                throw new AlgebricksException("Could not find field " + secondaryKeyFieldEntries.get(0)
-                        + " in the schema.");
+                throw new AlgebricksException(
+                        "Could not find field " + secondaryKeyFieldEntries.get(0) + " in the schema.");
             }
 
             // TODO: For now we assume the type of the generated tokens is the
@@ -217,8 +215,8 @@ public class InvertedIndexPOperator extends IndexSearchPOperator {
                     typeEnv, context);
 
             ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, recordType);
-            IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(
-                    dataset, recordType, context.getBinaryComparatorFactoryProvider());
+            IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
+                    recordType, context.getBinaryComparatorFactoryProvider());
 
             int[] filterFields = null;
             int[] invertedIndexFields = null;
@@ -249,13 +247,13 @@ public class InvertedIndexPOperator extends IndexSearchPOperator {
             // Get tokenizer and search modifier factories.
             IInvertedIndexSearchModifierFactory searchModifierFactory = InvertedIndexAccessMethod
                     .getSearchModifierFactory(searchModifierType, simThresh, secondaryIndex);
-            IBinaryTokenizerFactory queryTokenizerFactory = InvertedIndexAccessMethod.getBinaryTokenizerFactory(
-                    searchModifierType, searchKeyType, secondaryIndex);
+            IBinaryTokenizerFactory queryTokenizerFactory = InvertedIndexAccessMethod
+                    .getBinaryTokenizerFactory(searchModifierType, searchKeyType, secondaryIndex);
             IIndexDataflowHelperFactory dataflowHelperFactory;
 
             AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
-            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(
-                    dataset, metadataProvider.getMetadataTxnContext());
+            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils
+                    .getMergePolicyFactory(dataset, metadataProvider.getMetadataTxnContext());
             boolean temp = dataset.getDatasetDetails().isTemp();
             if (!isPartitioned) {
                 dataflowHelperFactory = new LSMInvertedIndexDataflowHelperFactory(

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
index bb5f659..952368f 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
@@ -150,7 +150,7 @@ public class IntroduceSecondaryIndexInsertDeleteRule implements IAlgebraicRewrit
 
         // Create operators for secondary index insert/delete.
         String itemTypeName = dataset.getItemTypeName();
-        IAType itemType = mp.findType(dataset.getDataverseName(), itemTypeName);
+        IAType itemType = mp.findType(dataset.getItemTypeDataverseName(), itemTypeName);
         if (itemType.getTypeTag() != ATypeTag.RECORD) {
             throw new AlgebricksException("Only record types can be indexed.");
         }
@@ -172,8 +172,9 @@ public class IntroduceSecondaryIndexInsertDeleteRule implements IAlgebraicRewrit
         // Check whether multiple keyword or n-gram indexes exist
         int secondaryIndexTotalCnt = 0;
         for (Index index : indexes) {
-            if (index.isSecondaryIndex())
+            if (index.isSecondaryIndex()) {
                 secondaryIndexTotalCnt++;
+            }
         }
 
         // Initialize inputs to the SINK operator
@@ -275,10 +276,11 @@ public class IntroduceSecondaryIndexInsertDeleteRule implements IAlgebraicRewrit
             }
 
             // Only apply replicate operator when doing bulk-load
-            if (secondaryIndexTotalCnt > 1 && insertOp.isBulkload())
+            if (secondaryIndexTotalCnt > 1 && insertOp.isBulkload()) {
                 project.getInputs().add(new MutableObject<ILogicalOperator>(replicateOp));
-            else
+            } else {
                 project.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
+            }
 
             context.computeAndSetTypeEnvironmentForOperator(project);
 
@@ -316,8 +318,9 @@ public class IntroduceSecondaryIndexInsertDeleteRule implements IAlgebraicRewrit
                     // filtering operator.
                     boolean isPartitioned = false;
                     if (index.getIndexType() == IndexType.LENGTH_PARTITIONED_WORD_INVIX
-                            || index.getIndexType() == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX)
+                            || index.getIndexType() == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX) {
                         isPartitioned = true;
+                    }
 
                     // Create a new logical variable - token
                     List<LogicalVariable> tokenizeKeyVars = new ArrayList<LogicalVariable>();
@@ -377,8 +380,9 @@ public class IntroduceSecondaryIndexInsertDeleteRule implements IAlgebraicRewrit
                     currentTop = indexUpdate;
                     context.computeAndSetTypeEnvironmentForOperator(indexUpdate);
 
-                    if (insertOp.isBulkload())
+                    if (insertOp.isBulkload()) {
                         op0.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
+                    }
 
                 }
 
@@ -424,8 +428,9 @@ public class IntroduceSecondaryIndexInsertDeleteRule implements IAlgebraicRewrit
                 currentTop = indexUpdate;
                 context.computeAndSetTypeEnvironmentForOperator(indexUpdate);
 
-                if (insertOp.isBulkload())
+                if (insertOp.isBulkload()) {
                     op0.getInputs().add(new MutableObject<ILogicalOperator>(currentTop));
+                }
 
             }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushFieldAccessRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushFieldAccessRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushFieldAccessRule.java
index b47cf98..a9b8e99 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushFieldAccessRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushFieldAccessRule.java
@@ -140,7 +140,7 @@ public class PushFieldAccessRule implements IAlgebraicRewriteRule {
         if (obj.getType().getTypeTag() != ATypeTag.STRING) {
             int pos = ((AInt32) obj).getIntegerValue();
             String tName = dataset.getItemTypeName();
-            IAType t = mp.findType(dataset.getDataverseName(), tName);
+            IAType t = mp.findType(dataset.getItemTypeDataverseName(), tName);
             if (t.getTypeTag() != ATypeTag.RECORD) {
                 return false;
             }
@@ -321,7 +321,7 @@ public class PushFieldAccessRule implements IAlgebraicRewriteRule {
                             } else {
                                 int pos = ((AInt32) obj).getIntegerValue();
                                 String tName = dataset.getItemTypeName();
-                                IAType t = mp.findType(dataset.getDataverseName(), tName);
+                                IAType t = mp.findType(dataset.getItemTypeDataverseName(), tName);
                                 if (t.getTypeTag() != ATypeTag.RECORD) {
                                     return false;
                                 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
index 70b6770..1e24ea8 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/AbstractIntroduceAccessMethodRule.java
@@ -28,7 +28,6 @@ import java.util.Map;
 import org.apache.asterix.common.config.DatasetConfig.IndexType;
 import org.apache.asterix.dataflow.data.common.AqlExpressionTypeComputer;
 import org.apache.asterix.metadata.api.IMetadataEntity;
-import org.apache.asterix.metadata.bootstrap.MetadataConstants;
 import org.apache.asterix.metadata.declared.AqlMetadataProvider;
 import org.apache.asterix.metadata.entities.Index;
 import org.apache.asterix.metadata.utils.DatasetUtils;
@@ -193,15 +192,6 @@ public abstract class AbstractIntroduceAccessMethodRule implements IAlgebraicRew
             matchedExpressions.clear();
             numMatchedKeys = 0;
 
-            // Remove the candidate if the dataset is a metadata dataset and the index is secondary
-            // TODO: fix the way secondary metadata indexes are implemented and remove this check
-            if (accessMethod.matchPrefixIndexExprs()) {
-                if (index.getDataverseName().equals(MetadataConstants.METADATA_DATAVERSE_NAME)
-                        && !index.isPrimaryIndex()) {
-                    indexExprAndVarIt.remove();
-                    continue;
-                }
-            }
             for (int i = 0; i < index.getKeyFieldNames().size(); i++) {
                 List<String> keyField = index.getKeyFieldNames().get(i);
                 final IAType keyType = index.getKeyFieldTypes().get(i);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java
index 4af06e1..55ba426 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceLSMComponentFilterRule.java
@@ -93,8 +93,8 @@ public class IntroduceLSMComponentFilterRule implements IAlgebraicRewriteRule {
         ARecordType recType = null;
         if (dataset != null && dataset.getDatasetType() == DatasetType.INTERNAL) {
             filterFieldName = DatasetUtils.getFilterField(dataset);
-            IAType itemType = ((AqlMetadataProvider) context.getMetadataProvider()).findType(dataset.getDataverseName(),
-                    dataset.getItemTypeName());
+            IAType itemType = ((AqlMetadataProvider) context.getMetadataProvider())
+                    .findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
             if (itemType.getTypeTag() == ATypeTag.RECORD) {
                 recType = (ARecordType) itemType;
             }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
index 8d28c62..52582ba 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/OptimizableOperatorSubTree.java
@@ -21,8 +21,6 @@ package org.apache.asterix.optimizer.rules.am;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.commons.lang3.mutable.Mutable;
-
 import org.apache.asterix.metadata.declared.AqlMetadataProvider;
 import org.apache.asterix.metadata.entities.Dataset;
 import org.apache.asterix.metadata.utils.DatasetUtils;
@@ -31,6 +29,7 @@ import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.types.ATypeTag;
 import org.apache.asterix.om.types.IAType;
 import org.apache.asterix.optimizer.base.AnalysisUtil;
+import org.apache.commons.lang3.mutable.Mutable;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.common.utils.Pair;
 import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
@@ -256,7 +255,7 @@ public class OptimizableOperatorSubTree {
                 throw new AlgebricksException("No metadata for dataset " + datasetName);
             }
             // Get the record type for that dataset.
-            IAType itemType = metadataProvider.findType(dataverseName, ds.getItemTypeName());
+            IAType itemType = metadataProvider.findType(ds.getItemTypeDataverseName(), ds.getItemTypeName());
             if (itemType.getTypeTag() != ATypeTag.RECORD) {
                 if (i == 0) {
                     return false;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-algebra/src/main/java/org/apache/asterix/translator/AqlPlusExpressionToPlanTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/translator/AqlPlusExpressionToPlanTranslator.java b/asterix-algebra/src/main/java/org/apache/asterix/translator/AqlPlusExpressionToPlanTranslator.java
index 2ac442f..9425254 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/translator/AqlPlusExpressionToPlanTranslator.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/translator/AqlPlusExpressionToPlanTranslator.java
@@ -269,7 +269,7 @@ public class AqlPlusExpressionToPlanTranslator extends AbstractLangTranslator
             if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
                 throw new AlgebricksException("Cannot write output to an external dataset.");
             }
-            ARecordType itemType = (ARecordType) metadata.findType(dataset.getDataverseName(),
+            ARecordType itemType = (ARecordType) metadata.findType(dataset.getItemTypeDataverseName(),
                     dataset.getItemTypeName());
             List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
             ArrayList<LogicalVariable> vars = new ArrayList<LogicalVariable>();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java b/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
index 788572a..cd0d21a 100644
--- a/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
+++ b/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
@@ -174,7 +174,7 @@ class LangExpressionToPlanTranslator
             throw new AlgebricksException(
                     "Unable to load dataset " + clffs.getDatasetName() + " since it does not exist");
         }
-        IAType itemType = metadataProvider.findType(clffs.getDataverseName(), dataset.getItemTypeName());
+        IAType itemType = metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
         DatasetDataSource targetDatasource = validateDatasetInfo(metadataProvider, stmt.getDataverseName(),
                 stmt.getDatasetName());
         List<List<String>> partitionKeys = DatasetUtils.getPartitioningKeys(targetDatasource.getDataset());
@@ -426,7 +426,7 @@ class LangExpressionToPlanTranslator
         }
         AqlSourceId sourceId = new AqlSourceId(dataverseName, datasetName);
         String itemTypeName = dataset.getItemTypeName();
-        IAType itemType = metadataProvider.findType(dataverseName, itemTypeName);
+        IAType itemType = metadataProvider.findType(dataset.getItemTypeDataverseName(), itemTypeName);
         DatasetDataSource dataSource = new DatasetDataSource(sourceId, dataset.getDataverseName(),
                 dataset.getDatasetName(), itemType, AqlDataSourceType.INTERNAL_DATASET);
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java
index c83ce6a..1e6cc66 100644
--- a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java
+++ b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java
@@ -29,10 +29,6 @@ import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-
 import org.apache.asterix.feeds.CentralFeedManager;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
@@ -44,6 +40,9 @@ import org.apache.asterix.util.FlushDatasetUtils;
 import org.apache.hyracks.api.client.IHyracksClientConnection;
 import org.apache.hyracks.api.client.NodeControllerInfo;
 import org.apache.hyracks.dataflow.std.file.FileSplit;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
 
 /***
  * The REST API that takes a dataverse name and a dataset name as the input
@@ -89,8 +88,8 @@ public class ConnectorAPIServlet extends HttpServlet {
             metadataProvider.setMetadataTxnContext(mdTxnCtx);
             Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
             if (dataset == null) {
-                jsonResponse.put("error", "Dataset " + datasetName + " does not exist in " + "dataverse "
-                        + dataverseName);
+                jsonResponse.put("error",
+                        "Dataset " + datasetName + " does not exist in " + "dataverse " + dataverseName);
                 out.write(jsonResponse.toString());
                 out.flush();
                 return;
@@ -98,7 +97,8 @@ public class ConnectorAPIServlet extends HttpServlet {
             boolean temp = dataset.getDatasetDetails().isTemp();
             FileSplit[] fileSplits = metadataProvider.splitsForDataset(mdTxnCtx, dataverseName, datasetName,
                     datasetName, temp);
-            ARecordType recordType = (ARecordType) metadataProvider.findType(dataverseName, dataset.getItemTypeName());
+            ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(),
+                    dataset.getItemTypeName());
             List<List<String>> primaryKeys = DatasetUtils.getPartitioningKeys(dataset);
             StringBuilder pkStrBuf = new StringBuilder();
             for (List<String> keys : primaryKeys) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java b/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java
index 01b0514..545cb67 100644
--- a/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java
+++ b/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java
@@ -523,6 +523,7 @@ public class QueryTranslator extends AbstractLangTranslator {
         String dataverseName = getActiveDataverse(dd.getDataverse());
         String datasetName = dd.getName().getValue();
         DatasetType dsType = dd.getDatasetType();
+        String itemTypeDataverseName = dd.getItemTypeDataverse().getValue();
         String itemTypeName = dd.getItemTypeName().getValue();
         Identifier ngNameId = dd.getNodegroupName();
         String nodegroupName = getNodeGroupName(ngNameId, dd, dataverseName);
@@ -535,8 +536,9 @@ public class QueryTranslator extends AbstractLangTranslator {
         boolean bActiveTxn = true;
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
 
-        MetadataLockManager.INSTANCE.createDatasetBegin(dataverseName, dataverseName + "." + itemTypeName,
-                nodegroupName, compactionPolicy, dataverseName + "." + datasetName, defaultCompactionPolicy);
+        MetadataLockManager.INSTANCE.createDatasetBegin(dataverseName, itemTypeDataverseName,
+                itemTypeDataverseName + "." + itemTypeName, nodegroupName, compactionPolicy,
+                dataverseName + "." + datasetName, defaultCompactionPolicy);
         Dataset dataset = null;
         try {
 
@@ -551,8 +553,8 @@ public class QueryTranslator extends AbstractLangTranslator {
                     throw new AlgebricksException("A dataset with this name " + datasetName + " already exists.");
                 }
             }
-            Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), dataverseName,
-                    itemTypeName);
+            Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(),
+                    itemTypeDataverseName, itemTypeName);
             if (dt == null) {
                 throw new AlgebricksException(": type " + itemTypeName + " could not be found.");
             }
@@ -612,8 +614,8 @@ public class QueryTranslator extends AbstractLangTranslator {
             }
 
             //#. add a new dataset with PendingAddOp
-            dataset = new Dataset(dataverseName, datasetName, itemTypeName, ngName, compactionPolicy,
-                    compactionPolicyProperties, datasetDetails, dd.getHints(), dsType,
+            dataset = new Dataset(dataverseName, datasetName, itemTypeDataverseName, itemTypeName, ngName,
+                    compactionPolicy, compactionPolicyProperties, datasetDetails, dd.getHints(), dsType,
                     DatasetIdFactory.generateDatasetId(), IMetadataEntity.PENDING_ADD_OP);
             MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
 
@@ -687,8 +689,9 @@ public class QueryTranslator extends AbstractLangTranslator {
 
             throw e;
         } finally {
-            MetadataLockManager.INSTANCE.createDatasetEnd(dataverseName, dataverseName + "." + itemTypeName,
-                    nodegroupName, compactionPolicy, dataverseName + "." + datasetName, defaultCompactionPolicy);
+            MetadataLockManager.INSTANCE.createDatasetEnd(dataverseName, itemTypeDataverseName,
+                    itemTypeDataverseName + "." + itemTypeName, nodegroupName, compactionPolicy,
+                    dataverseName + "." + datasetName, defaultCompactionPolicy);
         }
     }
 
@@ -812,8 +815,8 @@ public class QueryTranslator extends AbstractLangTranslator {
                     datasetName, indexName);
 
             String itemTypeName = ds.getItemTypeName();
-            Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), dataverseName,
-                    itemTypeName);
+            Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(),
+                    ds.getItemTypeDataverseName(), itemTypeName);
             IAType itemType = dt.getDatatype();
             ARecordType aRecordType = (ARecordType) itemType;
 
@@ -837,19 +840,22 @@ public class QueryTranslator extends AbstractLangTranslator {
                 if (fieldExpr.second == null) {
                     fieldType = subType.getSubFieldType(fieldExpr.first.subList(i, fieldExpr.first.size()));
                 } else {
-                    if (!stmtCreateIndex.isEnforced())
+                    if (!stmtCreateIndex.isEnforced()) {
                         throw new AlgebricksException("Cannot create typed index on \"" + fieldExpr.first
                                 + "\" field without enforcing it's type");
-                    if (!isOpen)
+                    }
+                    if (!isOpen) {
                         throw new AlgebricksException("Typed index on \"" + fieldExpr.first
                                 + "\" field could be created only for open datatype");
+                    }
                     Map<TypeSignature, IAType> typeMap = TypeTranslator.computeTypes(mdTxnCtx, fieldExpr.second,
                             indexName, dataverseName);
                     TypeSignature typeSignature = new TypeSignature(dataverseName, indexName);
                     fieldType = typeMap.get(typeSignature);
                 }
-                if (fieldType == null)
+                if (fieldType == null) {
                     throw new AlgebricksException("Unknown type " + fieldExpr.second);
+                }
 
                 indexFields.add(fieldExpr.first);
                 indexFieldTypes.add(fieldType);
@@ -951,10 +957,11 @@ public class QueryTranslator extends AbstractLangTranslator {
                         .getDatasetIndexes(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
                 for (Index index : indexes) {
                     if (index.getKeyFieldNames().equals(indexFields)
-                            && !index.getKeyFieldTypes().equals(indexFieldTypes) && index.isEnforcingKeyFileds())
+                            && !index.getKeyFieldTypes().equals(indexFieldTypes) && index.isEnforcingKeyFileds()) {
                         throw new AsterixException(
                                 "Cannot create index " + indexName + " , enforced index " + index.getIndexName()
                                         + " on field \"" + StringUtils.join(indexFields, ',') + "\" already exist");
+                    }
                 }
             }
 
@@ -1366,9 +1373,9 @@ public class QueryTranslator extends AbstractLangTranslator {
                 //#. mark the existing dataset as PendingDropOp
                 MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
                 MetadataManager.INSTANCE.addDataset(mdTxnCtx,
-                        new Dataset(dataverseName, datasetName, ds.getItemTypeName(), ds.getNodeGroupName(),
-                                ds.getCompactionPolicy(), ds.getCompactionPolicyProperties(), ds.getDatasetDetails(),
-                                ds.getHints(), ds.getDatasetType(), ds.getDatasetId(),
+                        new Dataset(dataverseName, datasetName, ds.getItemTypeDataverseName(), ds.getItemTypeName(),
+                                ds.getNodeGroupName(), ds.getCompactionPolicy(), ds.getCompactionPolicyProperties(),
+                                ds.getDatasetDetails(), ds.getHints(), ds.getDatasetType(), ds.getDatasetId(),
                                 IMetadataEntity.PENDING_DROP_OP));
 
                 MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
@@ -1409,9 +1416,9 @@ public class QueryTranslator extends AbstractLangTranslator {
                 //#. mark the existing dataset as PendingDropOp
                 MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
                 MetadataManager.INSTANCE.addDataset(mdTxnCtx,
-                        new Dataset(dataverseName, datasetName, ds.getItemTypeName(), ds.getNodeGroupName(),
-                                ds.getCompactionPolicy(), ds.getCompactionPolicyProperties(), ds.getDatasetDetails(),
-                                ds.getHints(), ds.getDatasetType(), ds.getDatasetId(),
+                        new Dataset(dataverseName, datasetName, ds.getItemTypeDataverseName(), ds.getItemTypeName(),
+                                ds.getNodeGroupName(), ds.getCompactionPolicy(), ds.getCompactionPolicyProperties(),
+                                ds.getDatasetDetails(), ds.getHints(), ds.getDatasetType(), ds.getDatasetId(),
                                 IMetadataEntity.PENDING_DROP_OP));
 
                 MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
@@ -1686,8 +1693,9 @@ public class QueryTranslator extends AbstractLangTranslator {
         try {
             Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataverseName, typeName);
             if (dt == null) {
-                if (!stmtTypeDrop.getIfExists())
+                if (!stmtTypeDrop.getIfExists()) {
                     throw new AlgebricksException("There is no datatype with this name " + typeName + ".");
+                }
             } else {
                 MetadataManager.INSTANCE.dropDatatype(mdTxnCtx, dataverseName, typeName);
             }
@@ -1710,8 +1718,9 @@ public class QueryTranslator extends AbstractLangTranslator {
         try {
             NodeGroup ng = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodegroupName);
             if (ng == null) {
-                if (!stmtDelete.getIfExists())
+                if (!stmtDelete.getIfExists()) {
                     throw new AlgebricksException("There is no nodegroup with this name " + nodegroupName + ".");
+                }
             } else {
                 MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, nodegroupName);
             }
@@ -1763,8 +1772,9 @@ public class QueryTranslator extends AbstractLangTranslator {
         try {
             Function function = MetadataManager.INSTANCE.getFunction(mdTxnCtx, signature);
             if (function == null) {
-                if (!stmtDropFunction.getIfExists())
+                if (!stmtDropFunction.getIfExists()) {
                     throw new AlgebricksException("Unknonw function " + signature);
+                }
             } else {
                 MetadataManager.INSTANCE.dropFunction(mdTxnCtx, signature);
             }
@@ -2399,8 +2409,8 @@ public class QueryTranslator extends AbstractLangTranslator {
             }
 
             String itemTypeName = ds.getItemTypeName();
-            Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(), dataverseName,
-                    itemTypeName);
+            Datatype dt = MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(),
+                    ds.getItemTypeDataverseName(), itemTypeName);
 
             // Prepare jobs to compact the datatset and its indexes
             List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
@@ -2538,8 +2548,9 @@ public class QueryTranslator extends AbstractLangTranslator {
         try {
             NodeGroup ng = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, ngName);
             if (ng != null) {
-                if (!stmtCreateNodegroup.getIfNotExists())
+                if (!stmtCreateNodegroup.getIfNotExists()) {
                     throw new AlgebricksException("A nodegroup with this name " + ngName + " already exists.");
+                }
             } else {
                 List<Identifier> ncIdentifiers = stmtCreateNodegroup.getNodeControllerNames();
                 List<String> ncNames = new ArrayList<String>(ncIdentifiers.size());
@@ -2895,10 +2906,10 @@ public class QueryTranslator extends AbstractLangTranslator {
             IDatasetDetailsDecl idd = new InternalDetailsDecl(toIndex.getKeyFieldNames(), false, null,
                     toDataset.getDatasetDetails().isTemp());
             DatasetDecl createToDataset = new DatasetDecl(new Identifier(dataverseNameTo),
-                    pregelixStmt.getDatasetNameTo(), new Identifier(toDataset.getItemTypeName()),
-                    new Identifier(toDataset.getNodeGroupName()), toDataset.getCompactionPolicy(),
-                    toDataset.getCompactionPolicyProperties(), toDataset.getHints(), toDataset.getDatasetType(), idd,
-                    false);
+                    pregelixStmt.getDatasetNameTo(), new Identifier(toDataset.getItemTypeDataverseName()),
+                    new Identifier(toDataset.getItemTypeName()), new Identifier(toDataset.getNodeGroupName()),
+                    toDataset.getCompactionPolicy(), toDataset.getCompactionPolicyProperties(), toDataset.getHints(),
+                    toDataset.getDatasetType(), idd, false);
             this.handleCreateDatasetStatement(metadataProvider, createToDataset, hcc);
         } catch (Exception e) {
             e.printStackTrace();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java b/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
index 013e021..85ff6be 100644
--- a/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
+++ b/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
@@ -68,8 +68,8 @@ public class DatasetOperations {
     private static Logger LOGGER = Logger.getLogger(DatasetOperations.class.getName());
 
     public static JobSpecification createDropDatasetJobSpec(CompiledDatasetDropStatement datasetDropStmt,
-            AqlMetadataProvider metadataProvider) throws AlgebricksException, HyracksDataException, RemoteException,
-            ACIDException, AsterixException {
+            AqlMetadataProvider metadataProvider)
+                    throws AlgebricksException, HyracksDataException, RemoteException, ACIDException, AsterixException {
 
         String dataverseName = null;
         if (datasetDropStmt.getDataverseName() != null) {
@@ -101,7 +101,8 @@ public class DatasetOperations {
             throw new AsterixException(e);
         }
 
-        ARecordType itemType = (ARecordType) metadataProvider.findType(dataverseName, dataset.getItemTypeName());
+        ARecordType itemType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(),
+                dataset.getItemTypeName());
 
         ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, itemType);
         IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
@@ -119,12 +120,13 @@ public class DatasetOperations {
 
         IndexDropOperatorDescriptor primaryBtreeDrop = new IndexDropOperatorDescriptor(specPrimary,
                 AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                splitsAndConstraint.first, new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(
-                        dataset.getDatasetId()), compactionInfo.first, compactionInfo.second,
+                splitsAndConstraint.first,
+                new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
+                        compactionInfo.first, compactionInfo.second,
                         new PrimaryIndexOperationTrackerProvider(dataset.getDatasetId()),
                         AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
-                        storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits,
-                        filterCmpFactories, btreeFields, filterFields, !temp));
+                        storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits, filterCmpFactories,
+                        btreeFields, filterFields, !temp));
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(specPrimary, primaryBtreeDrop,
                 splitsAndConstraint.second);
 
@@ -147,7 +149,8 @@ public class DatasetOperations {
             throw new AsterixException("Could not find dataset " + datasetName + " in dataverse " + dataverseName);
         }
         boolean temp = dataset.getDatasetDetails().isTemp();
-        ARecordType itemType = (ARecordType) metadata.findType(dataverseName, dataset.getItemTypeName());
+        ARecordType itemType = (ARecordType) metadata.findType(dataset.getItemTypeDataverseName(),
+                dataset.getItemTypeName());
         JobSpecification spec = JobSpecificationUtils.createJobSpecification();
         IBinaryComparatorFactory[] comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(dataset,
                 itemType, format.getBinaryComparatorFactoryProvider());
@@ -183,12 +186,12 @@ public class DatasetOperations {
                 AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
                 splitsAndConstraint.first, typeTraits, comparatorFactories, bloomFilterKeyFields,
                 new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
-                        compactionInfo.first, compactionInfo.second, new PrimaryIndexOperationTrackerProvider(dataset
-                                .getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                        LSMBTreeIOOperationCallbackFactory.INSTANCE, storageProperties
-                                .getBloomFilterFalsePositiveRate(), true, filterTypeTraits, filterCmpFactories,
-                        btreeFields, filterFields, !temp), localResourceFactoryProvider,
-                NoOpOperationCallbackFactory.INSTANCE);
+                        compactionInfo.first, compactionInfo.second,
+                        new PrimaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                        storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits, filterCmpFactories,
+                        btreeFields, filterFields, !temp),
+                localResourceFactoryProvider, NoOpOperationCallbackFactory.INSTANCE);
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, indexCreateOp,
                 splitsAndConstraint.second);
         spec.addRoot(indexCreateOp);
@@ -214,7 +217,8 @@ public class DatasetOperations {
         }
         boolean temp = dataset.getDatasetDetails().isTemp();
 
-        ARecordType itemType = (ARecordType) metadata.findType(dataverseName, dataset.getItemTypeName());
+        ARecordType itemType = (ARecordType) metadata.findType(dataset.getItemTypeDataverseName(),
+                dataset.getItemTypeName());
         JobSpecification spec = JobSpecificationUtils.createJobSpecification();
         IBinaryComparatorFactory[] comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(dataset,
                 itemType, format.getBinaryComparatorFactoryProvider());
@@ -238,16 +242,17 @@ public class DatasetOperations {
                 AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
                 splitsAndConstraint.first, typeTraits, comparatorFactories, blooFilterKeyFields,
                 new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
-                        compactionInfo.first, compactionInfo.second, new PrimaryIndexOperationTrackerProvider(
-                                dataset.getDatasetId()), AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                        LSMBTreeIOOperationCallbackFactory.INSTANCE,
-                        storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits,
-                        filterCmpFactories, btreeFields, filterFields, !temp), NoOpOperationCallbackFactory.INSTANCE);
-        AlgebricksPartitionConstraintHelper
-                .setPartitionConstraintInJobSpec(spec, compactOp, splitsAndConstraint.second);
-
-        AlgebricksPartitionConstraintHelper
-                .setPartitionConstraintInJobSpec(spec, compactOp, splitsAndConstraint.second);
+                        compactionInfo.first, compactionInfo.second,
+                        new PrimaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                        storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits, filterCmpFactories,
+                        btreeFields, filterFields, !temp),
+                NoOpOperationCallbackFactory.INSTANCE);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp,
+                splitsAndConstraint.second);
+
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp,
+                splitsAndConstraint.second);
         spec.addRoot(compactOp);
         return spec;
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java b/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java
index 595fe4c..2ee9dd4 100644
--- a/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java
+++ b/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java
@@ -250,6 +250,7 @@ public class ExternalIndexingOperations {
 
     /**
      * This method create an indexing operator that index records in HDFS
+     *
      * @param jobSpec
      * @param itemType
      * @param dataset
@@ -283,9 +284,10 @@ public class ExternalIndexingOperations {
      * At the end of this method, we expect to have 4 sets as follows:
      * metadataFiles should contain only the files that are appended in their original state
      * addedFiles should contain new files that has number assigned starting after the max original file number
-     * deleteedFiles should contain files that are no longer there in the file system
+     * deletedFiles should contain files that are no longer there in the file system
      * appendedFiles should have the new file information of existing files
      * The method should return false in case of zero delta
+     *
      * @param dataset
      * @param metadataFiles
      * @param addedFiles
@@ -340,8 +342,9 @@ public class ExternalIndexingOperations {
                         uptodate = false;
                     }
                 }
-                if (fileFound)
+                if (fileFound) {
                     break;
+                }
             }
             if (!fileFound) {
                 // File not stored previously in metadata -> pending add op
@@ -385,9 +388,9 @@ public class ExternalIndexingOperations {
         ExternalDatasetDetails dsd = new ExternalDatasetDetails(originalDsd.getAdapter(), originalDsd.getProperties(),
                 originalDsd.getTimestamp(), ExternalDatasetTransactionState.BEGIN);
         Dataset transactionDatset = new Dataset(dataset.getDataverseName(), dataset.getDatasetName(),
-                dataset.getItemTypeName(), dataset.getNodeGroupName(), dataset.getCompactionPolicy(),
-                dataset.getCompactionPolicyProperties(), dsd, dataset.getHints(), DatasetType.EXTERNAL,
-                dataset.getDatasetId(), dataset.getPendingOp());
+                dataset.getItemTypeDataverseName(), dataset.getItemTypeName(), dataset.getNodeGroupName(),
+                dataset.getCompactionPolicy(), dataset.getCompactionPolicyProperties(), dsd, dataset.getHints(),
+                DatasetType.EXTERNAL, dataset.getDatasetId(), dataset.getPendingOp());
         return transactionDatset;
     }
 
@@ -428,9 +431,9 @@ public class ExternalIndexingOperations {
             AqlMetadataProvider metadataProvider) throws MetadataException, AlgebricksException {
         ArrayList<ExternalFile> files = new ArrayList<ExternalFile>();
         for (ExternalFile file : metadataFiles) {
-            if (file.getPendingOp() == ExternalFilePendingOp.PENDING_DROP_OP)
+            if (file.getPendingOp() == ExternalFilePendingOp.PENDING_DROP_OP) {
                 files.add(file);
-            else if (file.getPendingOp() == ExternalFilePendingOp.PENDING_APPEND_OP) {
+            } else if (file.getPendingOp() == ExternalFilePendingOp.PENDING_APPEND_OP) {
                 for (ExternalFile appendedFile : appendedFiles) {
                     if (appendedFile.getFileName().equals(file.getFileName())) {
                         files.add(new ExternalFile(file.getDataverseName(), file.getDatasetName(), file.getFileNumber(),
@@ -560,7 +563,8 @@ public class ExternalIndexingOperations {
         int numPrimaryKeys = getRIDSize(ds);
         List<List<String>> secondaryKeyFields = index.getKeyFieldNames();
         secondaryKeyFields.size();
-        ARecordType itemType = (ARecordType) metadataProvider.findType(ds.getDataverseName(), ds.getItemTypeName());
+        ARecordType itemType = (ARecordType) metadataProvider.findType(ds.getItemTypeDataverseName(),
+                ds.getItemTypeName());
         Pair<IAType, Boolean> spatialTypePair = Index.getNonNullableKeyFieldType(secondaryKeyFields.get(0), itemType);
         IAType spatialType = spatialTypePair.first;
         if (spatialType == null) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java b/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java
index ef1629b..8ff6d9b 100644
--- a/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java
+++ b/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java
@@ -36,14 +36,6 @@ import javax.servlet.ServletContext;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import junit.extensions.PA;
-import junit.framework.Assert;
-
-import org.json.JSONArray;
-import org.json.JSONObject;
-import org.json.JSONTokener;
-import org.junit.Test;
-
 import org.apache.asterix.feeds.CentralFeedManager;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
@@ -58,6 +50,13 @@ import org.apache.hyracks.api.client.IHyracksClientConnection;
 import org.apache.hyracks.api.client.NodeControllerInfo;
 import org.apache.hyracks.api.comm.NetworkAddress;
 import org.apache.hyracks.dataflow.std.file.FileSplit;
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.json.JSONTokener;
+import org.junit.Test;
+
+import junit.extensions.PA;
+import junit.framework.Assert;
 
 @SuppressWarnings("deprecation")
 public class ConnectorAPIServletTest {
@@ -99,8 +98,8 @@ public class ConnectorAPIServletTest {
         servlet.doGet(mockRequest, mockResponse);
 
         // Constructs the actual response.
-        JSONTokener tokener = new JSONTokener(new InputStreamReader(
-                new ByteArrayInputStream(outputStream.toByteArray())));
+        JSONTokener tokener = new JSONTokener(
+                new InputStreamReader(new ByteArrayInputStream(outputStream.toByteArray())));
         JSONObject actualResponse = new JSONObject(tokener);
 
         // Checks the temp-or-not, primary key, data type of the dataset.
@@ -108,8 +107,8 @@ public class ConnectorAPIServletTest {
         Assert.assertFalse(temp);
         String primaryKey = actualResponse.getString("keys");
         Assert.assertEquals("DataverseName,DatasetName", primaryKey);
-        ARecordType recordType = (ARecordType) JSONDeserializerForTypes.convertFromJSON((JSONObject) actualResponse
-                .get("type"));
+        ARecordType recordType = (ARecordType) JSONDeserializerForTypes
+                .convertFromJSON((JSONObject) actualResponse.get("type"));
         Assert.assertEquals(getMetadataRecordType("Metadata", "Dataset"), recordType);
 
         // Checks the correctness of results.
@@ -177,7 +176,8 @@ public class ConnectorAPIServletTest {
         AqlMetadataProvider metadataProvider = new AqlMetadataProvider(null, CentralFeedManager.getInstance());
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
         Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
-        ARecordType recordType = (ARecordType) metadataProvider.findType(dataverseName, dataset.getItemTypeName());
+        ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(),
+                dataset.getItemTypeName());
         // Metadata transaction commits.
         MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
         return recordType;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/test/resources/metadata/queries/basic/meta16/meta16.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta16/meta16.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta16/meta16.1.ddl.aql
index 042f3ce..a64569f 100644
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta16/meta16.1.ddl.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta16/meta16.1.ddl.aql
@@ -16,3 +16,21 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+/*
+ * Description  : Create primary index & secondary index on open field & query Metadata dataset to verify.
+ * Expected Res : Success
+ * Date         : 30 Sep 2013
+ */
+
+drop dataverse testdv if exists;
+create dataverse testdv;
+
+create type testdv.testtype as open {
+id : int32
+}
+
+create dataset testdv.t1(testtype) primary key id;
+
+create index idx1 on testdv.t1(name: string) enforced;
+
+/* drop index testdv.t1.idx1; */

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/test/resources/metadata/queries/basic/meta16/meta16.3.query.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta16/meta16.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta16/meta16.3.query.aql
index 7a457fd..a28a66c 100644
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta16/meta16.3.query.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta16/meta16.3.query.aql
@@ -16,9 +16,13 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-use dataverse Metadata;
+/*
+ * Description  : Create primary index & secondary index on open field & query Metadata dataset to verify.
+ * Expected Res : Success
+ * Date         : 30 Sep 2013
+ */
 
-for $c in dataset('Dataset')
-where $c.DataverseName='Metadata'
-return $c
+for $l in dataset('Metadata.Index')
+where $l.DataverseName='testdv'
+return $l
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/test/resources/metadata/queries/basic/meta17/meta17.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta17/meta17.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta17/meta17.1.ddl.aql
index 042f3ce..0f92326 100644
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta17/meta17.1.ddl.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta17/meta17.1.ddl.aql
@@ -16,3 +16,20 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+/*
+ * Description  : Create primary index & secondary index on open union field & query Metadata dataset to verify.
+ * Expected Res : Success
+ * Date         : 30 Sep 2013
+ */
+
+drop dataverse testdv if exists;
+create dataverse testdv;
+
+create type testdv.testtype as open {
+id : int32,
+name : string?
+}
+
+create dataset testdv.t1(testtype) primary key id;
+
+create index idx1 on testdv.t1(location: point) type rtree enforced;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/test/resources/metadata/queries/basic/meta17/meta17.3.query.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta17/meta17.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta17/meta17.3.query.aql
index 60e92c8..c7c9f69 100644
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta17/meta17.3.query.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta17/meta17.3.query.aql
@@ -16,8 +16,13 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-use dataverse Metadata;
+/*
+ * Description  : Create primary index & secondary index on open union field & query Metadata dataset to verify.
+ * Expected Res : Success
+ * Date         : 30 Sep 2013
+ */
+
+for $l in dataset('Metadata.Index')
+where $l.DataverseName='testdv'
+return $l
 
-for $c in dataset('Datatype')
-where $c.DataverseName='Metadata'
-return $c

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/test/resources/metadata/queries/basic/meta18/meta18.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta18/meta18.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta18/meta18.1.ddl.aql
deleted file mode 100644
index 042f3ce..0000000
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta18/meta18.1.ddl.aql
+++ /dev/null
@@ -1,18 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/test/resources/metadata/queries/basic/meta18/meta18.2.update.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta18/meta18.2.update.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta18/meta18.2.update.aql
deleted file mode 100644
index 042f3ce..0000000
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta18/meta18.2.update.aql
+++ /dev/null
@@ -1,18 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/test/resources/metadata/queries/basic/meta18/meta18.3.query.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta18/meta18.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta18/meta18.3.query.aql
deleted file mode 100644
index 1a05cdb..0000000
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta18/meta18.3.query.aql
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-use dataverse Metadata;
-
-for $c in dataset('Dataverse')
-return $c

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/test/resources/metadata/queries/basic/meta19/meta19.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta19/meta19.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta19/meta19.1.ddl.aql
deleted file mode 100644
index 042f3ce..0000000
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta19/meta19.1.ddl.aql
+++ /dev/null
@@ -1,18 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/test/resources/metadata/queries/basic/meta19/meta19.2.update.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta19/meta19.2.update.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta19/meta19.2.update.aql
deleted file mode 100644
index 042f3ce..0000000
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta19/meta19.2.update.aql
+++ /dev/null
@@ -1,18 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/test/resources/metadata/queries/basic/meta19/meta19.3.query.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta19/meta19.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta19/meta19.3.query.aql
deleted file mode 100644
index a11b850..0000000
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta19/meta19.3.query.aql
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-use dataverse Metadata;
-
-for $c in dataset('Index')
-where $c.DataverseName='Metadata'
-return $c

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/test/resources/metadata/queries/basic/meta20/meta20.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta20/meta20.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta20/meta20.1.ddl.aql
deleted file mode 100644
index 042f3ce..0000000
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta20/meta20.1.ddl.aql
+++ /dev/null
@@ -1,18 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/test/resources/metadata/queries/basic/meta20/meta20.2.update.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta20/meta20.2.update.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta20/meta20.2.update.aql
deleted file mode 100644
index 042f3ce..0000000
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta20/meta20.2.update.aql
+++ /dev/null
@@ -1,18 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/test/resources/metadata/queries/basic/meta20/meta20.3.query.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta20/meta20.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta20/meta20.3.query.aql
deleted file mode 100644
index 240c60f..0000000
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta20/meta20.3.query.aql
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-use dataverse Metadata;    
-
-for $c in dataset('Node')
-return $c

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/test/resources/metadata/queries/basic/meta21/meta21.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta21/meta21.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta21/meta21.1.ddl.aql
deleted file mode 100644
index 042f3ce..0000000
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta21/meta21.1.ddl.aql
+++ /dev/null
@@ -1,18 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/test/resources/metadata/queries/basic/meta21/meta21.2.update.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta21/meta21.2.update.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta21/meta21.2.update.aql
deleted file mode 100644
index 042f3ce..0000000
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta21/meta21.2.update.aql
+++ /dev/null
@@ -1,18 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/test/resources/metadata/queries/basic/meta21/meta21.3.query.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta21/meta21.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta21/meta21.3.query.aql
deleted file mode 100644
index 714c705..0000000
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta21/meta21.3.query.aql
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-use dataverse Metadata;
-
-for $c in dataset('Nodegroup')
-return $c

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/test/resources/metadata/queries/basic/meta22/meta22.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta22/meta22.1.ddl.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta22/meta22.1.ddl.aql
deleted file mode 100644
index a64569f..0000000
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta22/meta22.1.ddl.aql
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/*
- * Description  : Create primary index & secondary index on open field & query Metadata dataset to verify.
- * Expected Res : Success
- * Date         : 30 Sep 2013
- */
-
-drop dataverse testdv if exists;
-create dataverse testdv;
-
-create type testdv.testtype as open {
-id : int32
-}
-
-create dataset testdv.t1(testtype) primary key id;
-
-create index idx1 on testdv.t1(name: string) enforced;
-
-/* drop index testdv.t1.idx1; */

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/dace5f2f/asterix-app/src/test/resources/metadata/queries/basic/meta22/meta22.2.update.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta22/meta22.2.update.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta22/meta22.2.update.aql
deleted file mode 100644
index 042f3ce..0000000
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta22/meta22.2.update.aql
+++ /dev/null
@@ -1,18 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */