You are viewing a plain text version of this content. The canonical link for it is here.
Posted to notifications@asterixdb.apache.org by "Steven Jacobs (Code Review)" <do...@asterixdb.incubator.apache.org> on 2018/01/19 18:10:30 UTC

Change in asterixdb-bad[master]: Enable dependencies in the metadata for BAD entities

Steven Jacobs has uploaded a new change for review.

  https://asterix-gerrit.ics.uci.edu/2302

Change subject: Enable dependencies in the metadata for BAD entities
......................................................................

Enable dependencies in the metadata for BAD entities

Allow Channels and Procedures to store dependencies on
Datasets and Functions

Prevent dropping of these dependencies

Add Error tests

Change-Id: Ic6ac2daad03844a042aded8e17bb231a06f59cbe
---
M asterix-bad/src/main/java/org/apache/asterix/bad/BADConstants.java
M asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADLangExtension.java
M asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADStatementExecutor.java
M asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/ChannelDropStatement.java
M asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateChannelStatement.java
M asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateProcedureStatement.java
A asterix-bad/src/main/java/org/apache/asterix/bad/metadata/AllChannelsSearchKey.java
A asterix-bad/src/main/java/org/apache/asterix/bad/metadata/AllProceduresSearchKey.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/BADMetadataRecordTypes.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Channel.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ChannelTupleTranslator.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/DeployedJobSpecEventListener.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Procedure.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ProcedureTupleTranslator.java
M asterix-bad/src/main/resources/lang-extension/lang.txt
A asterix-bad/src/test/resources/runtimets/queries/channel/drop_function/drop_function.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/channel/drop_function_dataverse/drop_function_dataverse.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/channel/drop_results/drop_results.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/channel/drop_subscriptions/drop_subscriptions.1.ddl.sqlpp
M asterix-bad/src/test/resources/runtimets/queries/channel/room_occupants/room_occupants.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/create_procedure_check_metadata/create_procedure_check_metadata.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/create_procedure_check_metadata/create_procedure_check_metadata.2.query.sqlpp
M asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure/delete_procedure.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_dataset/delete_procedure_drop_dataset.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_function/delete_procedure_drop_function.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_index/delete_procedure_drop_index.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/insert_procedure_drop_dataset/insert_procedure_drop_dataset.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/insert_procedure_drop_dataverse/insert_procedure_drop_dataverse.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/query_procedure_drop_dataset/query_procedure_drop_dataset.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/query_procedure_drop_function/query_procedure_drop_function.1.ddl.sqlpp
M asterix-bad/src/test/resources/runtimets/queries/procedure/repetitive_insert_procedure/repetitive_insert_procedure.1.ddl.sqlpp
M asterix-bad/src/test/resources/runtimets/results/channel/create_channel_check_metadata/create_channel_check_metadata.1.adm
M asterix-bad/src/test/resources/runtimets/results/channel/drop_channel_check_metadata/drop_channel_check_metadata.1.adm
A asterix-bad/src/test/resources/runtimets/results/procedure/create_procedure_check_metadata/create_procedure_check_metadata.1.adm
M asterix-bad/src/test/resources/runtimets/testsuite.xml
35 files changed, 1,303 insertions(+), 103 deletions(-)


  git pull ssh://asterix-gerrit.ics.uci.edu:29418/asterixdb-bad refs/changes/02/2302/1

diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/BADConstants.java b/asterix-bad/src/main/java/org/apache/asterix/bad/BADConstants.java
index 3aca099..d2d0fa3 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/BADConstants.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/BADConstants.java
@@ -45,6 +45,7 @@
     String Duration = "Duration";
     String Function = "Function";
     String FIELD_NAME_ARITY = "Arity";
+    String FIELD_NAME_DEPENDENCIES = "Dependencies";
     String FIELD_NAME_PARAMS = "Params";
     String FIELD_NAME_RETURN_TYPE = "ReturnType";
     String FIELD_NAME_DEFINITION = "Definition";
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADLangExtension.java b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADLangExtension.java
index fa0c0da..7a085a3 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADLangExtension.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADLangExtension.java
@@ -21,6 +21,8 @@
 import java.util.List;
 
 import org.apache.asterix.algebra.base.ILangExtension;
+import org.apache.asterix.bad.metadata.AllChannelsSearchKey;
+import org.apache.asterix.bad.metadata.AllProceduresSearchKey;
 import org.apache.asterix.bad.metadata.Broker;
 import org.apache.asterix.bad.metadata.BrokerSearchKey;
 import org.apache.asterix.bad.metadata.Channel;
@@ -111,6 +113,11 @@
         return MetadataManager.INSTANCE.getEntities(mdTxnCtx, brokerSearchKey);
     }
 
+    public static List<Channel> getAllChannels(MetadataTransactionContext mdTxnCtx) throws AlgebricksException {
+        AllChannelsSearchKey channelSearchKey = new AllChannelsSearchKey();
+        return MetadataManager.INSTANCE.getEntities(mdTxnCtx, channelSearchKey);
+    }
+
     public static List<Channel> getChannels(MetadataTransactionContext mdTxnCtx, String dataverseName)
             throws AlgebricksException {
         DataverseChannelsSearchKey channelSearchKey = new DataverseChannelsSearchKey(dataverseName);
@@ -123,4 +130,9 @@
         return MetadataManager.INSTANCE.getEntities(mdTxnCtx, proceduresSearchKey);
     }
 
+    public static List<Procedure> getAllProcedures(MetadataTransactionContext mdTxnCtx) throws AlgebricksException {
+        AllProceduresSearchKey proceduresSearchKey = new AllProceduresSearchKey();
+        return MetadataManager.INSTANCE.getEntities(mdTxnCtx, proceduresSearchKey);
+    }
+
 }
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADStatementExecutor.java b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADStatementExecutor.java
index 28f7f00..8c7143f 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADStatementExecutor.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADStatementExecutor.java
@@ -21,8 +21,8 @@
 import java.util.List;
 import java.util.concurrent.ExecutorService;
 
-import org.apache.asterix.app.translator.RequestParameters;
 import org.apache.asterix.app.translator.QueryTranslator;
+import org.apache.asterix.app.translator.RequestParameters;
 import org.apache.asterix.bad.lang.statement.BrokerDropStatement;
 import org.apache.asterix.bad.lang.statement.ChannelDropStatement;
 import org.apache.asterix.bad.lang.statement.ProcedureDropStatement;
@@ -30,16 +30,21 @@
 import org.apache.asterix.bad.metadata.Channel;
 import org.apache.asterix.bad.metadata.Procedure;
 import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.common.functions.FunctionSignature;
 import org.apache.asterix.compiler.provider.ILangCompilationProvider;
 import org.apache.asterix.lang.common.base.Statement;
 import org.apache.asterix.lang.common.statement.DataverseDropStatement;
+import org.apache.asterix.lang.common.statement.DropDatasetStatement;
+import org.apache.asterix.lang.common.statement.FunctionDropStatement;
+import org.apache.asterix.lang.common.statement.IndexDropStatement;
 import org.apache.asterix.lang.common.struct.Identifier;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
 import org.apache.asterix.metadata.declared.MetadataProvider;
 import org.apache.asterix.translator.IRequestParameters;
 import org.apache.asterix.translator.SessionOutput;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.client.IHyracksClientConnection;
 
 public class BADStatementExecutor extends QueryTranslator {
@@ -49,39 +54,170 @@
         super(appCtx, statements, output, compliationProvider, executorService);
     }
 
+    //TODO: Most of this file could go away if we had metadata dependencies
+
+    private void checkIfDatasetIsInUse(MetadataTransactionContext mdTxnCtx, String dataverse, String dataset)
+            throws CompilationException, AlgebricksException {
+        List<Channel> channels = BADLangExtension.getAllChannels(mdTxnCtx);
+        for (Channel channel : channels) {
+            List<List<List<String>>> dependencies = channel.getDependencies();
+            List<List<String>> datasetDependencies = dependencies.get(0);
+            for (List<String> dependency : datasetDependencies) {
+                if (dependency.get(0).equals(dataverse) && dependency.get(1).equals(dataset)) {
+                    throw new CompilationException("Cannot alter dataset " + dataverse + "." + dataset + ". "
+                            + channel.getChannelId() + " depends on it!");
+                }
+            }
+
+        }
+        List<Procedure> procedures = BADLangExtension.getAllProcedures(mdTxnCtx);
+        for (Procedure procedure : procedures) {
+            List<List<List<String>>> dependencies = procedure.getDependencies();
+            List<List<String>> datasetDependencies = dependencies.get(0);
+            for (List<String> dependency : datasetDependencies) {
+                if (dependency.get(0).equals(dataverse) && dependency.get(1).equals(dataset)) {
+                    throw new CompilationException("Cannot alter dataset " + dataverse + "." + dataset + ". "
+                            + procedure.getEntityId() + " depends on it!");
+                }
+            }
+
+        }
+    }
+
+    @Override
+    public void handleDatasetDropStatement(MetadataProvider metadataProvider, Statement stmt,
+            IHyracksClientConnection hcc, IRequestParameters requestParameters) throws Exception {
+        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+        metadataProvider.setMetadataTxnContext(mdTxnCtx);
+        String dvId = getActiveDataverse(((DropDatasetStatement) stmt).getDataverseName());
+        Identifier dsId = ((DropDatasetStatement) stmt).getDatasetName();
+
+        checkIfDatasetIsInUse(mdTxnCtx, dvId, dsId.getValue());
+
+        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+        super.handleDatasetDropStatement(metadataProvider, stmt, hcc, requestParameters);
+    }
+
+    @Override
+    protected void handleIndexDropStatement(MetadataProvider metadataProvider, Statement stmt,
+            IHyracksClientConnection hcc, IRequestParameters requestParameters) throws Exception {
+        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+        metadataProvider.setMetadataTxnContext(mdTxnCtx);
+        String dvId = getActiveDataverse(((IndexDropStatement) stmt).getDataverseName());
+        Identifier dsId = ((IndexDropStatement) stmt).getDatasetName();
+
+        checkIfDatasetIsInUse(mdTxnCtx, dvId, dsId.getValue());
+
+        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+        super.handleIndexDropStatement(metadataProvider, stmt, hcc, requestParameters);
+    }
+
+    @Override
+    protected void handleFunctionDropStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
+        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+        metadataProvider.setMetadataTxnContext(mdTxnCtx);
+        FunctionSignature sig = ((FunctionDropStatement) stmt).getFunctionSignature();
+
+        String dvId = getActiveDataverseName(sig.getNamespace());
+        String function = sig.getName();
+        String arity = Integer.toString(sig.getArity());
+
+        List<Channel> channels = BADLangExtension.getAllChannels(mdTxnCtx);
+        for (Channel channel : channels) {
+            List<List<List<String>>> dependencies = channel.getDependencies();
+            List<List<String>> datasetDependencies = dependencies.get(1);
+            for (List<String> dependency : datasetDependencies) {
+                if (dependency.get(0).equals(dvId) && dependency.get(1).equals(function)
+                        && dependency.get(2).equals(arity)) {
+                    throw new CompilationException(
+                            "Cannot drop function " + sig + ". " + channel.getChannelId() + " depends on it!");
+                }
+            }
+
+        }
+        List<Procedure> procedures = BADLangExtension.getAllProcedures(mdTxnCtx);
+        for (Procedure procedure : procedures) {
+            List<List<List<String>>> dependencies = procedure.getDependencies();
+            List<List<String>> datasetDependencies = dependencies.get(1);
+            for (List<String> dependency : datasetDependencies) {
+                if (dependency.get(0).equals(dvId) && dependency.get(1).equals(function)
+                        && dependency.get(2).equals(arity)) {
+                    throw new CompilationException(
+                            "Cannot drop function " + sig + ". " + procedure.getEntityId() + " depends on it!");
+                }
+            }
+
+        }
+
+        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+        super.handleFunctionDropStatement(metadataProvider, stmt);
+    }
+
     @Override
     protected void handleDataverseDropStatement(MetadataProvider metadataProvider, Statement stmt,
             IHyracksClientConnection hcc) throws Exception {
-        //TODO: Remove this when metadata dependencies are in place
-        //TODO: Stop dataset drop when dataset used by channel
-        super.handleDataverseDropStatement(metadataProvider, stmt, hcc);
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
         Identifier dvId = ((DataverseDropStatement) stmt).getDataverseName();
-        List<Broker> brokers = BADLangExtension.getBrokers(mdTxnCtx, dvId.getValue());
         MetadataProvider tempMdProvider = new MetadataProvider(appCtx, metadataProvider.getDefaultDataverse());
         tempMdProvider.getConfig().putAll(metadataProvider.getConfig());
-        final IRequestParameters requestParameters = new RequestParameters(null, null, null, null, null, null);
-        for (Broker broker : brokers) {
-            tempMdProvider.getLocks().reset();
-            BrokerDropStatement drop = new BrokerDropStatement(dvId, new Identifier(broker.getBrokerName()), false);
-            drop.handle(hcc, this, requestParameters, tempMdProvider, 0);
-        }
-        List<Channel> channels = BADLangExtension.getChannels(mdTxnCtx, dvId.getValue());
+        List<Channel> channels = BADLangExtension.getAllChannels(mdTxnCtx);
         for (Channel channel : channels) {
+            if (channel.getChannelId().getDataverse().equals(dvId.getValue())) {
+                continue;
+            }
+            List<List<List<String>>> dependencies = channel.getDependencies();
+            for (List<List<String>> dependencyList : dependencies) {
+                for (List<String> dependency : dependencyList) {
+                    if (dependency.get(0).equals(dvId.getValue())) {
+                        throw new CompilationException("Cannot drop dataverse " + dvId.getValue() + ". "
+                                + channel.getChannelId() + " depends on it!");
+                    }
+                }
+            }
+        }
+        List<Procedure> procedures = BADLangExtension.getAllProcedures(mdTxnCtx);
+        for (Procedure procedure : procedures) {
+            if (procedure.getEntityId().getDataverse().equals(dvId.getValue())) {
+                continue;
+            }
+            List<List<List<String>>> dependencies = procedure.getDependencies();
+            for (List<List<String>> dependencyList : dependencies) {
+                for (List<String> dependency : dependencyList) {
+                    if (dependency.get(0).equals(dvId.getValue())) {
+                        throw new CompilationException("Cannot drop dataverse " + dvId.getValue() + ". "
+                                + procedure.getEntityId() + " depends on it!");
+                    }
+                }
+            }
+        }
+        final IRequestParameters requestParameters = new RequestParameters(null, null, null, null, null, null);
+        for (Channel channel : channels) {
+            if (!channel.getChannelId().getDataverse().equals(dvId.getValue())) {
+                continue;
+            }
             tempMdProvider.getLocks().reset();
             ChannelDropStatement drop =
                     new ChannelDropStatement(dvId, new Identifier(channel.getChannelId().getEntityName()), false);
             drop.handle(hcc, this, requestParameters, tempMdProvider, 0);
         }
-        List<Procedure> procedures = BADLangExtension.getProcedures(mdTxnCtx, dvId.getValue());
         for (Procedure procedure : procedures) {
+            if (!procedure.getEntityId().getDataverse().equals(dvId.getValue())) {
+                continue;
+            }
             tempMdProvider.getLocks().reset();
             ProcedureDropStatement drop = new ProcedureDropStatement(new FunctionSignature(dvId.getValue(),
                     procedure.getEntityId().getEntityName(), procedure.getArity()), false);
             drop.handle(hcc, this, requestParameters, tempMdProvider, 0);
         }
+        List<Broker> brokers = BADLangExtension.getBrokers(mdTxnCtx, dvId.getValue());
+        for (Broker broker : brokers) {
+            tempMdProvider.getLocks().reset();
+            BrokerDropStatement drop = new BrokerDropStatement(dvId, new Identifier(broker.getBrokerName()), false);
+            drop.handle(hcc, this, requestParameters, tempMdProvider, 0);
+        }
         MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+        super.handleDataverseDropStatement(metadataProvider, stmt, hcc);
     }
 
 }
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/ChannelDropStatement.java b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/ChannelDropStatement.java
index 80355c0..e4b6d89 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/ChannelDropStatement.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/ChannelDropStatement.java
@@ -139,8 +139,9 @@
             tempMdProvider.getConfig().putAll(metadataProvider.getConfig());
             //Drop the Channel Datasets
             //TODO: Need to find some way to handle if this fails.
-            //TODO: Prevent datasets for Channels from being dropped elsewhere
 
+            //Remove the Channel Metadata
+            MetadataManager.INSTANCE.deleteEntity(mdTxnCtx, channel);
             DropDatasetStatement dropStmt = new DropDatasetStatement(new Identifier(dataverse),
                     new Identifier(channel.getResultsDatasetName()), true);
             ((QueryTranslator) statementExecutor).handleDatasetDropStatement(tempMdProvider, dropStmt, hcc, null);
@@ -148,9 +149,6 @@
             dropStmt = new DropDatasetStatement(new Identifier(dataverse),
                     new Identifier(channel.getSubscriptionsDataset()), true);
             ((QueryTranslator) statementExecutor).handleDatasetDropStatement(tempMdProvider, dropStmt, hcc, null);
-
-            //Remove the Channel Metadata
-            MetadataManager.INSTANCE.deleteEntity(mdTxnCtx, channel);
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
         } catch (Exception e) {
             e.printStackTrace();
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateChannelStatement.java b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateChannelStatement.java
index 3864248..feaa3ca 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateChannelStatement.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateChannelStatement.java
@@ -47,7 +47,6 @@
 import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.common.exceptions.MetadataException;
 import org.apache.asterix.common.functions.FunctionSignature;
-import org.apache.asterix.common.metadata.IDataset;
 import org.apache.asterix.lang.common.base.Expression;
 import org.apache.asterix.lang.common.base.Statement;
 import org.apache.asterix.lang.common.expression.CallExpr;
@@ -83,15 +82,15 @@
 public class CreateChannelStatement implements IExtensionStatement {
 
     private static final Logger LOGGER = Logger.getLogger(CreateChannelStatement.class.getName());
-
-    private final Identifier dataverseName;
     private final Identifier channelName;
     private final FunctionSignature function;
     private final CallExpr period;
+    private Identifier dataverseName;
     private String duration;
     private InsertStatement channelResultsInsertQuery;
     private String subscriptionsTableName;
     private String resultsTableName;
+    private String dataverse;
 
     public CreateChannelStatement(Identifier dataverseName, Identifier channelName, FunctionSignature function,
             Expression period) {
@@ -144,7 +143,7 @@
         return null;
     }
 
-    public void initialize(MetadataTransactionContext mdTxnCtx, String subscriptionsTableName, String resultsTableName)
+    public void initialize(MetadataTransactionContext mdTxnCtx)
             throws AlgebricksException, HyracksDataException {
         Function lookup = MetadataManager.INSTANCE.getFunction(mdTxnCtx, function);
         if (lookup == null) {
@@ -160,8 +159,6 @@
         ByteArrayOutputStream bos = new ByteArrayOutputStream();
         DataOutputStream outputStream = new DataOutputStream(bos);
         durationParser.parse(duration.toCharArray(), 0, duration.toCharArray().length, outputStream);
-        this.resultsTableName = resultsTableName;
-        this.subscriptionsTableName = subscriptionsTableName;
     }
 
     @Override
@@ -169,9 +166,8 @@
         return Kind.EXTENSION;
     }
 
-    private void createDatasets(IStatementExecutor statementExecutor, Identifier subscriptionsName,
-            Identifier resultsName, MetadataProvider metadataProvider, IHyracksClientConnection hcc,
-            IHyracksDataset hdc, String dataverse) throws AsterixException, Exception {
+    private void createDatasets(IStatementExecutor statementExecutor, MetadataProvider metadataProvider,
+            IHyracksClientConnection hcc) throws AsterixException, Exception {
 
         Identifier subscriptionsTypeName = new Identifier(BADConstants.ChannelSubscriptionsType);
         Identifier resultsTypeName = new Identifier(BADConstants.ChannelResultsType);
@@ -183,7 +179,7 @@
         fieldNames.add(BADConstants.SubscriptionId);
         partitionFields.add(fieldNames);
         IDatasetDetailsDecl idd = new InternalDetailsDecl(partitionFields, keyIndicators, true, null);
-        DatasetDecl createSubscriptionsDataset = new DatasetDecl(new Identifier(dataverse), subscriptionsName,
+        DatasetDecl createSubscriptionsDataset = new DatasetDecl(dataverseName, new Identifier(subscriptionsTableName),
                 new Identifier(BADConstants.BAD_DATAVERSE_NAME), subscriptionsTypeName, null, null, null,
                 new HashMap<String, String>(), DatasetType.INTERNAL, idd, null, true);
 
@@ -193,15 +189,15 @@
         fieldNames.add(BADConstants.ResultId);
         partitionFields.add(fieldNames);
         idd = new InternalDetailsDecl(partitionFields, keyIndicators, true, null);
-        DatasetDecl createResultsDataset = new DatasetDecl(new Identifier(dataverse), resultsName,
+        DatasetDecl createResultsDataset = new DatasetDecl(dataverseName, new Identifier(resultsTableName),
                 new Identifier(BADConstants.BAD_DATAVERSE_NAME), resultsTypeName, null, null, null,
                 new HashMap<String, String>(), DatasetType.INTERNAL, idd, null, true);
 
         //Create an index on timestamp for results
         CreateIndexStatement createTimeIndex = new CreateIndexStatement();
-        createTimeIndex.setDatasetName(resultsName);
-        createTimeIndex.setDataverseName(new Identifier(dataverse));
-        createTimeIndex.setIndexName(new Identifier(resultsName + "TimeIndex"));
+        createTimeIndex.setDatasetName(new Identifier(resultsTableName));
+        createTimeIndex.setDataverseName(dataverseName);
+        createTimeIndex.setIndexName(new Identifier(resultsTableName + "TimeIndex"));
         createTimeIndex.setIfNotExists(false);
         createTimeIndex.setIndexType(IndexType.BTREE);
         createTimeIndex.setEnforced(false);
@@ -226,18 +222,17 @@
 
     }
 
-    private JobSpecification createChannelJob(IStatementExecutor statementExecutor, Identifier subscriptionsName,
-            Identifier resultsName, MetadataProvider metadataProvider, IHyracksClientConnection hcc,
-            IHyracksDataset hdc, Stats stats, String dataverse) throws Exception {
+    private JobSpecification createChannelJob(IStatementExecutor statementExecutor, MetadataProvider metadataProvider,
+            IHyracksClientConnection hcc, IHyracksDataset hdc, Stats stats) throws Exception {
         StringBuilder builder = new StringBuilder();
         builder.append("SET inline_with \"false\";\n");
-        builder.append("insert into " + dataverse + "." + resultsName);
+        builder.append("insert into " + dataverse + "." + resultsTableName);
         builder.append(" as a (\n" + "with " + BADConstants.ChannelExecutionTime + " as current_datetime() \n");
         builder.append("select result, ");
         builder.append(BADConstants.ChannelExecutionTime + ", ");
         builder.append("sub." + BADConstants.SubscriptionId + " as " + BADConstants.SubscriptionId + ",");
         builder.append("current_datetime() as " + BADConstants.DeliveryTime + "\n");
-        builder.append("from " + dataverse + "." + subscriptionsName + " sub,\n");
+        builder.append("from " + dataverse + "." + subscriptionsTableName + " sub,\n");
         builder.append(BADConstants.BAD_DATAVERSE_NAME + "." + BADConstants.BROKER_KEYWORD + " b, \n");
         builder.append(function.getNamespace() + "." + function.getName() + "(");
         int i = 0;
@@ -281,13 +276,12 @@
         //3. Create the metadata entry for the channel
 
         //TODO: Figure out how to handle when a subset of the 3 tasks fails
-        //TODO: The compiled job will break if anything changes on the function or two datasets
-        // Need to make sure we do proper checking when altering these things
 
-        String dataverse = ((QueryTranslator) statementExecutor).getActiveDataverse(dataverseName);
+        dataverseName = new Identifier(((QueryTranslator) statementExecutor).getActiveDataverse(dataverseName));
+        dataverse = dataverseName.getValue();
+        subscriptionsTableName = channelName + BADConstants.subscriptionEnding;
+        resultsTableName = channelName + BADConstants.resultsEnding;
 
-        Identifier subscriptionsName = new Identifier(channelName + BADConstants.subscriptionEnding);
-        Identifier resultsName = new Identifier(channelName + BADConstants.resultsEnding);
         EntityId entityId = new EntityId(BADConstants.CHANNEL_EXTENSION_NAME, dataverse, channelName.getValue());
         ICcApplicationContext appCtx = metadataProvider.getApplicationContext();
         ActiveNotificationHandler activeEventHandler =
@@ -310,15 +304,13 @@
             if (alreadyActive) {
                 throw new AsterixException("Channel " + channelName + " is already running");
             }
-            initialize(mdTxnCtx, subscriptionsName.getValue(), resultsName.getValue());
-            channel = new Channel(dataverse, channelName.getValue(), subscriptionsTableName, resultsTableName, function,
-                    duration);
+            initialize(mdTxnCtx);
 
             //check if names are available before creating anything
-            if (MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverse, subscriptionsName.getValue()) != null) {
+            if (MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverse, subscriptionsTableName) != null) {
                 throw new AsterixException("The channel name:" + channelName + " is not available.");
             }
-            if (MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverse, resultsName.getValue()) != null) {
+            if (MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverse, resultsTableName) != null) {
                 throw new AsterixException("The channel name:" + channelName + " is not available.");
             }
             MetadataProvider tempMdProvider = new MetadataProvider(metadataProvider.getApplicationContext(),
@@ -327,24 +319,21 @@
             final IHyracksDataset hdc = requestContext.getHyracksDataset();
             final Stats stats = requestContext.getStats();
             //Create Channel Datasets
-            createDatasets(statementExecutor, subscriptionsName, resultsName, tempMdProvider, hcc, hdc, dataverse);
+            createDatasets(statementExecutor, tempMdProvider, hcc);
             tempMdProvider.getLocks().reset();
             //Create Channel Internal Job
-            JobSpecification channeljobSpec = createChannelJob(statementExecutor, subscriptionsName, resultsName,
-                    tempMdProvider, hcc, hdc, stats, dataverse);
+            JobSpecification channeljobSpec = createChannelJob(statementExecutor, tempMdProvider, hcc, hdc, stats);
 
             // Now we subscribe
             if (listener == null) {
-                List<IDataset> datasets = new ArrayList<>();
-                datasets.add(MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverse, subscriptionsName.getValue()));
-                datasets.add(MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverse, resultsName.getValue()));
-                //TODO: Add datasets used by channel function
-                listener = new DeployedJobSpecEventListener(appCtx, entityId, PrecompiledType.CHANNEL, datasets, null,
+                listener = new DeployedJobSpecEventListener(appCtx, entityId, PrecompiledType.CHANNEL, null,
                         "BadListener");
                 activeEventHandler.registerListener(listener);
             }
 
             setupExecutorJob(entityId, channeljobSpec, hcc, listener);
+            channel = new Channel(dataverse, channelName.getValue(), subscriptionsTableName, resultsTableName, function,
+                    duration, null);
 
             MetadataManager.INSTANCE.addEntity(mdTxnCtx, channel);
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateProcedureStatement.java b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateProcedureStatement.java
index b93f778..cd60b1a 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateProcedureStatement.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateProcedureStatement.java
@@ -21,6 +21,7 @@
 import java.io.ByteArrayOutputStream;
 import java.io.DataOutputStream;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
@@ -47,11 +48,14 @@
 import org.apache.asterix.lang.common.expression.VariableExpr;
 import org.apache.asterix.lang.common.literal.StringLiteral;
 import org.apache.asterix.lang.common.statement.DeleteStatement;
+import org.apache.asterix.lang.common.statement.InsertStatement;
 import org.apache.asterix.lang.common.statement.Query;
 import org.apache.asterix.lang.common.struct.Identifier;
 import org.apache.asterix.lang.common.struct.VarIdentifier;
+import org.apache.asterix.lang.common.util.FunctionUtil;
 import org.apache.asterix.lang.common.visitor.base.ILangVisitor;
 import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
+import org.apache.asterix.lang.sqlpp.rewrites.SqlppRewriterFactory;
 import org.apache.asterix.lang.sqlpp.visitor.SqlppDeleteRewriteVisitor;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
@@ -85,6 +89,7 @@
     private final List<VariableExpr> varList;
     private final CallExpr period;
     private String duration = "";
+    private List<List<List<String>>> dependencies;
 
     public CreateProcedureStatement(FunctionSignature signature, List<VarIdentifier> parameterList,
             List<Integer> paramIds, String functionBody, Statement procedureBodyStatement, Expression period) {
@@ -98,6 +103,9 @@
             this.varList.add(new VariableExpr(new VarIdentifier(parameterList.get(i).toString(), paramIds.get(i))));
         }
         this.period = (CallExpr) period;
+        this.dependencies = new ArrayList<>();
+        this.dependencies.add(new ArrayList<>());
+        this.dependencies.add(new ArrayList<>());
     }
 
     public String getProcedureBody() {
@@ -195,6 +203,10 @@
             if (!varList.isEmpty()) {
                 throw new CompilationException("Insert procedures cannot have parameters");
             }
+            InsertStatement insertStatement = (InsertStatement) getProcedureBodyStatement();
+            dependencies.get(0).add(Arrays.asList(
+                    ((QueryTranslator) statementExecutor).getActiveDataverse(insertStatement.getDataverseName()),
+                    insertStatement.getDatasetName().getValue()));
             return new Pair<>(
                     ((QueryTranslator) statementExecutor).handleInsertUpsertStatement(metadataProvider,
                             getProcedureBodyStatement(), hcc, hdc, ResultDelivery.ASYNC, null, stats, true, null),
@@ -202,9 +214,14 @@
         } else if (getProcedureBodyStatement().getKind() == Statement.Kind.QUERY) {
             Query s = (Query) getProcedureBodyStatement();
             addLets((SelectExpression) s.getBody());
+            SqlppRewriterFactory fact = new SqlppRewriterFactory();
+            dependencies.get(1).addAll(FunctionUtil.getFunctionDependencies(fact.createQueryRewriter(),
+                    ((Query) getProcedureBodyStatement()).getBody(), metadataProvider).get(1));
             Pair<JobSpecification, PrecompiledType> pair = new Pair<>(
                     compileQueryJob(statementExecutor, metadataProvider, hcc, (Query) getProcedureBodyStatement()),
                     PrecompiledType.QUERY);
+            dependencies.get(0).addAll(FunctionUtil.getFunctionDependencies(fact.createQueryRewriter(),
+                    ((Query) getProcedureBodyStatement()).getBody(), metadataProvider).get(0));
             metadataProvider.getLocks().unlock();
             return pair;
         } else if (getProcedureBodyStatement().getKind() == Statement.Kind.DELETE) {
@@ -212,8 +229,15 @@
             getProcedureBodyStatement().accept(visitor, null);
             DeleteStatement delete = (DeleteStatement) getProcedureBodyStatement();
             addLets((SelectExpression) delete.getQuery().getBody());
-            return new Pair<>(((QueryTranslator) statementExecutor).handleDeleteStatement(metadataProvider,
+
+            SqlppRewriterFactory fact = new SqlppRewriterFactory();
+            dependencies = FunctionUtil.getFunctionDependencies(fact.createQueryRewriter(), delete.getQuery().getBody(),
+                    metadataProvider);
+
+            Pair<JobSpecification, PrecompiledType> pair =
+                    new Pair<>(((QueryTranslator) statementExecutor).handleDeleteStatement(metadataProvider,
                     getProcedureBodyStatement(), hcc, true), PrecompiledType.DELETE);
+            return pair;
         } else {
             throw new CompilationException("Procedure can only execute a single delete, insert, or query");
         }
@@ -256,8 +280,6 @@
             if (alreadyActive) {
                 throw new AsterixException("Procedure " + signature.getName() + " is already running");
             }
-            procedure = new Procedure(dataverse, signature.getName(), signature.getArity(), getParamList(),
-                    Function.RETURNTYPE_VOID, getProcedureBody(), Function.LANGUAGE_AQL, duration);
             MetadataProvider tempMdProvider = new MetadataProvider(metadataProvider.getApplicationContext(),
                     metadataProvider.getDefaultDataverse());
             tempMdProvider.getConfig().putAll(metadataProvider.getConfig());
@@ -279,16 +301,16 @@
 
             // Now we subscribe
             if (listener == null) {
-                //TODO: Add datasets used by channel function
-                listener = new DeployedJobSpecEventListener(appCtx, entityId, procedureJobSpec.second,
-                        new ArrayList<>(),
-                        null, "BadListener");
+                listener = new DeployedJobSpecEventListener(appCtx, entityId, procedureJobSpec.second, null,
+                        "BadListener");
                 activeEventHandler.registerListener(listener);
             }
-            setupDeployedJobSpec(entityId, procedureJobSpec.first, hcc, listener, tempMdProvider.getResultSetId(),
-                    hdc,
+            setupDeployedJobSpec(entityId, procedureJobSpec.first, hcc, listener, tempMdProvider.getResultSetId(), hdc,
                     stats);
 
+            procedure = new Procedure(dataverse, signature.getName(), signature.getArity(), getParamList(),
+                    Function.RETURNTYPE_VOID, getProcedureBody(), Function.LANGUAGE_AQL, duration, dependencies);
+
             MetadataManager.INSTANCE.addEntity(mdTxnCtx, procedure);
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
         } catch (Exception e) {
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/AllChannelsSearchKey.java b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/AllChannelsSearchKey.java
new file mode 100644
index 0000000..62f16c7
--- /dev/null
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/AllChannelsSearchKey.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.bad.metadata;
+
+import org.apache.asterix.metadata.api.ExtensionMetadataDatasetId;
+import org.apache.asterix.metadata.api.IExtensionMetadataSearchKey;
+import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+public class AllChannelsSearchKey implements IExtensionMetadataSearchKey {
+    private static final long serialVersionUID = 1L;
+
+    public AllChannelsSearchKey() {
+    }
+
+    @Override
+    public ExtensionMetadataDatasetId getDatasetId() {
+        return BADMetadataIndexes.BAD_CHANNEL_INDEX_ID;
+    }
+
+    @Override
+    public ITupleReference getSearchKey() {
+        return null;
+    }
+}
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/AllProceduresSearchKey.java b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/AllProceduresSearchKey.java
new file mode 100644
index 0000000..6b995fb
--- /dev/null
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/AllProceduresSearchKey.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.bad.metadata;
+
+import org.apache.asterix.metadata.api.ExtensionMetadataDatasetId;
+import org.apache.asterix.metadata.api.IExtensionMetadataSearchKey;
+import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+public class AllProceduresSearchKey implements IExtensionMetadataSearchKey {
+    private static final long serialVersionUID = 1L;
+
+    public AllProceduresSearchKey() {
+    }
+
+    @Override
+    public ExtensionMetadataDatasetId getDatasetId() {
+        return BADMetadataIndexes.BAD_PROCEDURE_INDEX_ID;
+    }
+
+    @Override
+    public ITupleReference getSearchKey() {
+        return null;
+    }
+}
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/BADMetadataRecordTypes.java b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/BADMetadataRecordTypes.java
index 0430118..526e091 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/BADMetadataRecordTypes.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/BADMetadataRecordTypes.java
@@ -49,15 +49,19 @@
     public static final int CHANNEL_ARECORD_RESULTS_NAME_FIELD_INDEX = 3;
     public static final int CHANNEL_ARECORD_FUNCTION_FIELD_INDEX = 4;
     public static final int CHANNEL_ARECORD_DURATION_FIELD_INDEX = 5;
+    public static final int CHANNEL_ARECORD_DEPENDENCIES_FIELD_INDEX = 6;
     public static final ARecordType CHANNEL_RECORDTYPE = MetadataRecordTypes.createRecordType(
             // RecordTypeName
             BADConstants.RECORD_TYPENAME_CHANNEL,
             // FieldNames
             new String[] { BADConstants.DataverseName, BADConstants.ChannelName, BADConstants.SubscriptionsDatasetName,
-                    BADConstants.ResultsDatasetName, BADConstants.Function, BADConstants.Duration },
+                    BADConstants.ResultsDatasetName, BADConstants.Function, BADConstants.Duration,
+                    BADConstants.FIELD_NAME_DEPENDENCIES },
             // FieldTypes
             new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
-                    BuiltinType.ASTRING, BuiltinType.ASTRING },
+                    new AOrderedListType(BuiltinType.ASTRING, null), BuiltinType.ASTRING,
+                    new AOrderedListType(new AOrderedListType(new AOrderedListType(BuiltinType.ASTRING, null), null),
+                            null) },
             //IsOpen?
             true);
     //------------------------------------------ Broker ----------------------------------------//
@@ -84,17 +88,21 @@
     public static final int PROCEDURE_ARECORD_PROCEDURE_DEFINITION_FIELD_INDEX = 5;
     public static final int PROCEDURE_ARECORD_PROCEDURE_LANGUAGE_FIELD_INDEX = 6;
     public static final int PROCEDURE_ARECORD_PROCEDURE_DURATION_FIELD_INDEX = 7;
+    public static final int PROCEDURE_ARECORD_DEPENDENCIES_FIELD_INDEX = 8;
     public static final ARecordType PROCEDURE_RECORDTYPE = MetadataRecordTypes.createRecordType(
             // RecordTypeName
             BADConstants.RECORD_TYPENAME_PROCEDURE,
             // FieldNames
             new String[] { BADConstants.DataverseName, BADConstants.ProcedureName, BADConstants.FIELD_NAME_ARITY,
                     BADConstants.FIELD_NAME_PARAMS, BADConstants.FIELD_NAME_RETURN_TYPE,
-                    BADConstants.FIELD_NAME_DEFINITION, BADConstants.FIELD_NAME_LANGUAGE, BADConstants.Duration },
+                    BADConstants.FIELD_NAME_DEFINITION, BADConstants.FIELD_NAME_LANGUAGE, BADConstants.Duration,
+                    BADConstants.FIELD_NAME_DEPENDENCIES },
             // FieldTypes
             new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
                     new AOrderedListType(BuiltinType.ASTRING, null), BuiltinType.ASTRING, BuiltinType.ASTRING,
-                    BuiltinType.ASTRING, BuiltinType.ASTRING },
+                    BuiltinType.ASTRING, BuiltinType.ASTRING,
+                    new AOrderedListType(new AOrderedListType(new AOrderedListType(BuiltinType.ASTRING, null), null),
+                            null) },
             //IsOpen?
             true);
 
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Channel.java b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Channel.java
index b201af6..05af9c0 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Channel.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Channel.java
@@ -15,6 +15,10 @@
 
 package org.apache.asterix.bad.metadata;
 
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
 import org.apache.asterix.active.EntityId;
 import org.apache.asterix.bad.BADConstants;
 import org.apache.asterix.common.functions.FunctionSignature;
@@ -34,18 +38,41 @@
     private final String resultsDatasetName;
     private final String duration;
     private final FunctionSignature function;
+    private final List<String> functionAsPath;
+    private final List<List<List<String>>> dependencies;
 
     public Channel(String dataverseName, String channelName, String subscriptionsDataset, String resultsDataset,
-            FunctionSignature function, String duration) {
+            FunctionSignature function, String duration, List<List<List<String>>> dependencies) {
         this.channelId = new EntityId(BADConstants.CHANNEL_EXTENSION_NAME, dataverseName, channelName);
         this.function = function;
         this.duration = duration;
         this.resultsDatasetName = resultsDataset;
         this.subscriptionsDatasetName = subscriptionsDataset;
+        if (this.function.getNamespace() == null) {
+            this.function.setNamespace(dataverseName);
+        }
+        functionAsPath = Arrays.asList(this.function.getNamespace(), this.function.getName(),
+                Integer.toString(this.function.getArity()));
+        if (dependencies == null) {
+            this.dependencies = new ArrayList<>();
+            this.dependencies.add(new ArrayList<>());
+            this.dependencies.add(new ArrayList<>());
+            List<String> resultsList = Arrays.asList(dataverseName, resultsDatasetName);
+            List<String> subscriptionList = Arrays.asList(dataverseName, subscriptionsDatasetName);
+            this.dependencies.get(0).add(resultsList);
+            this.dependencies.get(0).add(subscriptionList);
+            this.dependencies.get(1).add(functionAsPath);
+        } else {
+            this.dependencies = dependencies;
+        }
     }
 
     public EntityId getChannelId() {
         return channelId;
+    }
+
+    public List<List<List<String>>> getDependencies() {
+        return dependencies;
     }
 
     public String getSubscriptionsDataset() {
@@ -60,6 +87,10 @@
         return duration;
     }
 
+    public List<String> getFunctionAsPath() {
+        return functionAsPath;
+    }
+
     public FunctionSignature getFunction() {
         return function;
     }
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ChannelTupleTranslator.java b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ChannelTupleTranslator.java
index d577260..14db134 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ChannelTupleTranslator.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ChannelTupleTranslator.java
@@ -18,15 +18,23 @@
 import java.io.ByteArrayInputStream;
 import java.io.DataInput;
 import java.io.DataInputStream;
+import java.util.ArrayList;
+import java.util.List;
 
+import org.apache.asterix.builders.OrderedListBuilder;
 import org.apache.asterix.common.exceptions.MetadataException;
 import org.apache.asterix.common.functions.FunctionSignature;
 import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
 import org.apache.asterix.metadata.entitytupletranslators.AbstractTupleTranslator;
+import org.apache.asterix.om.base.AOrderedList;
 import org.apache.asterix.om.base.ARecord;
 import org.apache.asterix.om.base.AString;
+import org.apache.asterix.om.base.IACursor;
+import org.apache.asterix.om.types.AOrderedListType;
+import org.apache.asterix.om.types.BuiltinType;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 
 /**
@@ -42,11 +50,16 @@
     // Payload field containing serialized feed.
     public static final int CHANNEL_PAYLOAD_TUPLE_FIELD_INDEX = 2;
 
-    @SuppressWarnings("unchecked")
     private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
             .getSerializerDeserializer(BADMetadataRecordTypes.CHANNEL_RECORDTYPE);
 
-    @SuppressWarnings("unchecked")
+    private transient OrderedListBuilder dependenciesListBuilder = new OrderedListBuilder();
+    private transient OrderedListBuilder dependencyListBuilder = new OrderedListBuilder();
+    private transient OrderedListBuilder dependencyNameListBuilder = new OrderedListBuilder();
+    private transient AOrderedListType stringList = new AOrderedListType(BuiltinType.ASTRING, null);
+    private transient AOrderedListType ListofLists =
+            new AOrderedListType(new AOrderedListType(BuiltinType.ASTRING, null), null);
+
     public ChannelTupleTranslator(boolean getTuple) {
         super(getTuple, BADMetadataIndexes.NUM_FIELDS_CHANNEL_IDX);
     }
@@ -74,30 +87,47 @@
         String resultsName =
                 ((AString) channelRecord.getValueByPos(BADMetadataRecordTypes.CHANNEL_ARECORD_RESULTS_NAME_FIELD_INDEX))
                         .getStringValue();
-        String fName =
-                ((AString) channelRecord.getValueByPos(BADMetadataRecordTypes.CHANNEL_ARECORD_FUNCTION_FIELD_INDEX))
-                        .getStringValue();
+
+        IACursor cursor = ((AOrderedList) channelRecord
+                .getValueByPos(BADMetadataRecordTypes.CHANNEL_ARECORD_FUNCTION_FIELD_INDEX)).getCursor();
+        List<String> functionSignature = new ArrayList<>();
+        while (cursor.next()) {
+            functionSignature.add(((AString) cursor.get()).getStringValue());
+        }
+
         String duration =
                 ((AString) channelRecord.getValueByPos(BADMetadataRecordTypes.CHANNEL_ARECORD_DURATION_FIELD_INDEX))
                         .getStringValue();
 
-        FunctionSignature signature = null;
+        IACursor dependenciesCursor = ((AOrderedList) channelRecord
+                .getValueByPos(BADMetadataRecordTypes.CHANNEL_ARECORD_DEPENDENCIES_FIELD_INDEX)).getCursor();
+        List<List<List<String>>> dependencies = new ArrayList<>();
+        AOrderedList dependencyList;
+        AOrderedList qualifiedList;
+        int i = 0;
+        while (dependenciesCursor.next()) {
+            dependencies.add(new ArrayList<>());
+            dependencyList = (AOrderedList) dependenciesCursor.get();
+            IACursor qualifiedDependencyCursor = dependencyList.getCursor();
+            int j = 0;
+            while (qualifiedDependencyCursor.next()) {
+                qualifiedList = (AOrderedList) qualifiedDependencyCursor.get();
+                IACursor qualifiedNameCursor = qualifiedList.getCursor();
+                dependencies.get(i).add(new ArrayList<>());
+                while (qualifiedNameCursor.next()) {
+                    dependencies.get(i).get(j).add(((AString) qualifiedNameCursor.get()).getStringValue());
+                }
+                j++;
+            }
+            i++;
 
-        String[] qnameComponents = fName.split("\\.");
-        String functionDataverse;
-        String functionName;
-        if (qnameComponents.length == 2) {
-            functionDataverse = qnameComponents[0];
-            functionName = qnameComponents[1];
-        } else {
-            functionDataverse = dataverseName;
-            functionName = qnameComponents[0];
         }
 
-        String[] nameComponents = functionName.split("@");
-        signature = new FunctionSignature(functionDataverse, nameComponents[0], Integer.parseInt(nameComponents[1]));
+        FunctionSignature signature = new FunctionSignature(functionSignature.get(0), functionSignature.get(1),
+                Integer.parseInt(functionSignature.get(2)));
 
-        channel = new Channel(dataverseName, channelName, subscriptionsName, resultsName, signature, duration);
+        channel = new Channel(dataverseName, channelName, subscriptionsName, resultsName, signature, duration,
+                dependencies);
         return channel;
     }
 
@@ -141,9 +171,17 @@
         recordBuilder.addField(BADMetadataRecordTypes.CHANNEL_ARECORD_RESULTS_NAME_FIELD_INDEX, fieldValue);
 
         // write field 4
+        OrderedListBuilder listBuilder = new OrderedListBuilder();
+        ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
+        listBuilder.reset(stringList);
+        for (String pathPart : channel.getFunctionAsPath()) {
+            itemValue.reset();
+            aString.setValue(pathPart);
+            stringSerde.serialize(aString, itemValue.getDataOutput());
+            listBuilder.addItem(itemValue);
+        }
         fieldValue.reset();
-        aString.setValue(channel.getFunction().toString());
-        stringSerde.serialize(aString, fieldValue.getDataOutput());
+        listBuilder.write(fieldValue.getDataOutput(), true);
         recordBuilder.addField(BADMetadataRecordTypes.CHANNEL_ARECORD_FUNCTION_FIELD_INDEX, fieldValue);
 
         // write field 5
@@ -152,6 +190,33 @@
         stringSerde.serialize(aString, fieldValue.getDataOutput());
         recordBuilder.addField(BADMetadataRecordTypes.CHANNEL_ARECORD_DURATION_FIELD_INDEX, fieldValue);
 
+        // write field 6
+        dependenciesListBuilder.reset((AOrderedListType) BADMetadataRecordTypes.CHANNEL_RECORDTYPE
+                .getFieldTypes()[BADMetadataRecordTypes.CHANNEL_ARECORD_DEPENDENCIES_FIELD_INDEX]);
+        List<List<List<String>>> dependenciesList = channel.getDependencies();
+        for (List<List<String>> dependencies : dependenciesList) {
+            dependencyListBuilder.reset(ListofLists);
+            for (List<String> dependency : dependencies) {
+                dependencyNameListBuilder.reset(stringList);
+                for (String subName : dependency) {
+                    itemValue.reset();
+                    aString.setValue(subName);
+                    stringSerde.serialize(aString, itemValue.getDataOutput());
+                    dependencyNameListBuilder.addItem(itemValue);
+                }
+                itemValue.reset();
+                dependencyNameListBuilder.write(itemValue.getDataOutput(), true);
+                dependencyListBuilder.addItem(itemValue);
+
+            }
+            itemValue.reset();
+            dependencyListBuilder.write(itemValue.getDataOutput(), true);
+            dependenciesListBuilder.addItem(itemValue);
+        }
+        fieldValue.reset();
+        dependenciesListBuilder.write(fieldValue.getDataOutput(), true);
+        recordBuilder.addField(BADMetadataRecordTypes.CHANNEL_ARECORD_DEPENDENCIES_FIELD_INDEX, fieldValue);
+
         // write record
         recordBuilder.write(tupleBuilder.getDataOutput(), true);
 
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/DeployedJobSpecEventListener.java b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/DeployedJobSpecEventListener.java
index 950612c..13f9e0d 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/DeployedJobSpecEventListener.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/DeployedJobSpecEventListener.java
@@ -73,7 +73,6 @@
     protected final List<IActiveEntityEventSubscriber> subscribers = new ArrayList<>();
     protected final ICcApplicationContext appCtx;
     protected final EntityId entityId;
-    protected final List<IDataset> datasets;
     protected final ActiveEvent statsUpdatedEvent;
     protected long statsTimestamp;
     protected String stats;
@@ -83,10 +82,9 @@
     protected int numRegistered;
 
     public DeployedJobSpecEventListener(ICcApplicationContext appCtx, EntityId entityId, PrecompiledType type,
-            List<IDataset> datasets, AlgebricksAbsolutePartitionConstraint locations, String runtimeName) {
+            AlgebricksAbsolutePartitionConstraint locations, String runtimeName) {
         this.appCtx = appCtx;
         this.entityId = entityId;
-        this.datasets = datasets;
         this.state = ActivityState.STOPPED;
         this.statsTimestamp = -1;
         this.statsRequestState = RequestState.INIT;
@@ -133,7 +131,7 @@
 
     @Override
     public boolean isEntityUsingDataset(IDataset dataset) {
-        return datasets.contains(dataset);
+        return false;
     }
 
     public JobId getJobId() {
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Procedure.java b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Procedure.java
index e3ed7fc..6391da4 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Procedure.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Procedure.java
@@ -18,6 +18,7 @@
  */
 package org.apache.asterix.bad.metadata;
 
+import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.asterix.active.EntityId;
@@ -39,9 +40,10 @@
     private final String returnType;
     private final String language;
     private final String duration;
+    private final List<List<List<String>>> dependencies;
 
     public Procedure(String dataverseName, String functionName, int arity, List<String> params, String returnType,
-            String functionBody, String language, String duration) {
+            String functionBody, String language, String duration, List<List<List<String>>> dependencies) {
         this.procedureId = new EntityId(BADConstants.PROCEDURE_KEYWORD, dataverseName, functionName);
         this.params = params;
         this.body = functionBody;
@@ -49,6 +51,13 @@
         this.language = language;
         this.arity = arity;
         this.duration = duration;
+        if (dependencies == null) {
+            this.dependencies = new ArrayList<>();
+            this.dependencies.add(new ArrayList<>());
+            this.dependencies.add(new ArrayList<>());
+        } else {
+            this.dependencies = dependencies;
+        }
     }
 
     public EntityId getEntityId() {
@@ -79,6 +88,10 @@
         return duration;
     }
 
+    public List<List<List<String>>> getDependencies() {
+        return dependencies;
+    }
+
     @Override
     public boolean equals(Object other) {
         if (this == other) {
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ProcedureTupleTranslator.java b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ProcedureTupleTranslator.java
index 1aa633f..0a6acb9 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ProcedureTupleTranslator.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ProcedureTupleTranslator.java
@@ -34,6 +34,7 @@
 import org.apache.asterix.om.base.AString;
 import org.apache.asterix.om.base.IACursor;
 import org.apache.asterix.om.types.AOrderedListType;
+import org.apache.asterix.om.types.BuiltinType;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
@@ -54,9 +55,15 @@
     // Payload field containing serialized Procedure.
     public static final int PROCEDURE_PAYLOAD_TUPLE_FIELD_INDEX = 3;
 
-    @SuppressWarnings("unchecked")
     private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
             .getSerializerDeserializer(BADMetadataRecordTypes.PROCEDURE_RECORDTYPE);
+
+    private transient OrderedListBuilder dependenciesListBuilder = new OrderedListBuilder();
+    private transient OrderedListBuilder dependencyListBuilder = new OrderedListBuilder();
+    private transient OrderedListBuilder dependencyNameListBuilder = new OrderedListBuilder();
+    private transient AOrderedListType stringList = new AOrderedListType(BuiltinType.ASTRING, null);
+    private transient AOrderedListType ListofLists =
+            new AOrderedListType(new AOrderedListType(BuiltinType.ASTRING, null), null);
 
     protected ProcedureTupleTranslator(boolean getTuple) {
         super(getTuple, BADMetadataIndexes.NUM_FIELDS_PROCEDURE_IDX);
@@ -104,8 +111,32 @@
                 .getValueByPos(BADMetadataRecordTypes.PROCEDURE_ARECORD_PROCEDURE_DURATION_FIELD_INDEX))
                         .getStringValue();
 
+        IACursor dependenciesCursor = ((AOrderedList) procedureRecord
+                .getValueByPos(BADMetadataRecordTypes.PROCEDURE_ARECORD_DEPENDENCIES_FIELD_INDEX)).getCursor();
+        List<List<List<String>>> dependencies = new ArrayList<>();
+        AOrderedList dependencyList;
+        AOrderedList qualifiedList;
+        int i = 0;
+        while (dependenciesCursor.next()) {
+            dependencies.add(new ArrayList<>());
+            dependencyList = (AOrderedList) dependenciesCursor.get();
+            IACursor qualifiedDependencyCursor = dependencyList.getCursor();
+            int j = 0;
+            while (qualifiedDependencyCursor.next()) {
+                qualifiedList = (AOrderedList) qualifiedDependencyCursor.get();
+                IACursor qualifiedNameCursor = qualifiedList.getCursor();
+                dependencies.get(i).add(new ArrayList<>());
+                while (qualifiedNameCursor.next()) {
+                    dependencies.get(i).get(j).add(((AString) qualifiedNameCursor.get()).getStringValue());
+                }
+                j++;
+            }
+            i++;
+
+        }
+
         return new Procedure(dataverseName, procedureName, Integer.parseInt(arity), params, returnType, definition,
-                language, duration);
+                language, duration, dependencies);
 
     }
 
@@ -185,6 +216,33 @@
         stringSerde.serialize(aString, fieldValue.getDataOutput());
         recordBuilder.addField(BADMetadataRecordTypes.PROCEDURE_ARECORD_PROCEDURE_DURATION_FIELD_INDEX, fieldValue);
 
+        // write field 8
+        dependenciesListBuilder.reset((AOrderedListType) BADMetadataRecordTypes.PROCEDURE_RECORDTYPE
+                .getFieldTypes()[BADMetadataRecordTypes.PROCEDURE_ARECORD_DEPENDENCIES_FIELD_INDEX]);
+        List<List<List<String>>> dependenciesList = procedure.getDependencies();
+        for (List<List<String>> dependencies : dependenciesList) {
+            dependencyListBuilder.reset(ListofLists);
+            for (List<String> dependency : dependencies) {
+                dependencyNameListBuilder.reset(stringList);
+                for (String subName : dependency) {
+                    itemValue.reset();
+                    aString.setValue(subName);
+                    stringSerde.serialize(aString, itemValue.getDataOutput());
+                    dependencyNameListBuilder.addItem(itemValue);
+                }
+                itemValue.reset();
+                dependencyNameListBuilder.write(itemValue.getDataOutput(), true);
+                dependencyListBuilder.addItem(itemValue);
+
+            }
+            itemValue.reset();
+            dependencyListBuilder.write(itemValue.getDataOutput(), true);
+            dependenciesListBuilder.addItem(itemValue);
+        }
+        fieldValue.reset();
+        dependenciesListBuilder.write(fieldValue.getDataOutput(), true);
+        recordBuilder.addField(BADMetadataRecordTypes.PROCEDURE_ARECORD_DEPENDENCIES_FIELD_INDEX, fieldValue);
+
         // write record
         recordBuilder.write(tupleBuilder.getDataOutput(), true);
         tupleBuilder.addFieldEndOffset();
diff --git a/asterix-bad/src/main/resources/lang-extension/lang.txt b/asterix-bad/src/main/resources/lang-extension/lang.txt
index 7c5931c..02aba78 100644
--- a/asterix-bad/src/main/resources/lang-extension/lang.txt
+++ b/asterix-bad/src/main/resources/lang-extension/lang.txt
@@ -129,9 +129,14 @@
   Token endPos;
   Statement functionBodyExpr;
   Expression period = null;
+  String currentDataverse = defaultDataverse;
+  createNewScope();
 }
 {
      "procedure" fctName = FunctionName()
+     {
+        defaultDataverse = fctName.dataverse;
+     }
      paramList = ParameterList()
     <LEFTBRACE>
   {
@@ -149,6 +154,7 @@
       functionBody = extractFragment(beginPos.beginLine, beginPos.beginColumn, endPos.beginLine, endPos.beginColumn);
       signature = new FunctionSignature(fctName.dataverse, fctName.function, paramList.size());
       removeCurrentScope();
+      defaultDataverse = currentDataverse;
     }
   ("period" period = FunctionCallExpr())?
   {
diff --git a/asterix-bad/src/test/resources/runtimets/queries/channel/drop_function/drop_function.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/channel/drop_function/drop_function.1.ddl.sqlpp
new file mode 100644
index 0000000..a5d3775
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/channel/drop_function/drop_function.1.ddl.sqlpp
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Drop Channel Function
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+
+create type TweetMessageTypeuuid as closed {
+  tweetid: uuid,
+  sender_location: point,
+  send_time: datetime,
+  referred_topics: {{ string }},
+  message_text: string,
+  countA: int32,
+  countB: int32
+};
+
+
+create dataset TweetMessageuuids(TweetMessageTypeuuid)
+primary key tweetid autogenerated;
+
+create function NearbyTweetsContainingText(place, text) {
+  (select m.message_text
+  from TweetMessageuuids m
+  where contains(m.message_text,text)
+  and spatial_intersect(m.sender_location, place))
+};
+
+create dataverse two;
+use two;
+
+create repetitive channel nearbyTweetChannel using channels.NearbyTweetsContainingText@2 period duration("PT10M");
+
+use channels;
+drop function NearbyTweetsContainingText@2;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/channel/drop_function_dataverse/drop_function_dataverse.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/channel/drop_function_dataverse/drop_function_dataverse.1.ddl.sqlpp
new file mode 100644
index 0000000..d1047b0
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/channel/drop_function_dataverse/drop_function_dataverse.1.ddl.sqlpp
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Drop Channel Function Dataverse
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+
+create type TweetMessageTypeuuid as closed {
+  tweetid: uuid,
+  sender_location: point,
+  send_time: datetime,
+  referred_topics: {{ string }},
+  message_text: string,
+  countA: int32,
+  countB: int32
+};
+
+
+create dataset TweetMessageuuids(TweetMessageTypeuuid)
+primary key tweetid autogenerated;
+
+create function NearbyTweetsContainingText(place, text) {
+  (select m.message_text
+  from TweetMessageuuids m
+  where contains(m.message_text,text)
+  and spatial_intersect(m.sender_location, place))
+};
+
+create dataverse two;
+use two;
+
+create repetitive channel nearbyTweetChannel using channels.NearbyTweetsContainingText@2 period duration("PT10M");
+
+drop dataverse channels;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/channel/drop_results/drop_results.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/channel/drop_results/drop_results.1.ddl.sqlpp
new file mode 100644
index 0000000..432f3c5
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/channel/drop_results/drop_results.1.ddl.sqlpp
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Drop Channel Results
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse two if exists;
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+
+create type TweetMessageTypeuuid as closed {
+  tweetid: uuid,
+  sender_location: point,
+  send_time: datetime,
+  referred_topics: {{ string }},
+  message_text: string,
+  countA: int32,
+  countB: int32
+};
+
+
+create dataset TweetMessageuuids(TweetMessageTypeuuid)
+primary key tweetid autogenerated;
+
+create function NearbyTweetsContainingText(place, text) {
+  (select m.message_text
+  from TweetMessageuuids m
+  where contains(m.message_text,text)
+  and spatial_intersect(m.sender_location, place))
+};
+
+create dataverse two;
+use two;
+
+create repetitive channel nearbyTweetChannel using channels.NearbyTweetsContainingText@2 period duration("PT10M");
+
+drop dataset two.nearbyTweetChannelResults;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/channel/drop_subscriptions/drop_subscriptions.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/channel/drop_subscriptions/drop_subscriptions.1.ddl.sqlpp
new file mode 100644
index 0000000..f6dc2bf
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/channel/drop_subscriptions/drop_subscriptions.1.ddl.sqlpp
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Drop Channel Subscriptions
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse two if exists;
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+
+create type TweetMessageTypeuuid as closed {
+  tweetid: uuid,
+  sender_location: point,
+  send_time: datetime,
+  referred_topics: {{ string }},
+  message_text: string,
+  countA: int32,
+  countB: int32
+};
+
+
+create dataset TweetMessageuuids(TweetMessageTypeuuid)
+primary key tweetid autogenerated;
+
+create function NearbyTweetsContainingText(place, text) {
+  (select m.message_text
+  from TweetMessageuuids m
+  where contains(m.message_text,text)
+  and spatial_intersect(m.sender_location, place))
+};
+
+create dataverse two;
+use two;
+
+create repetitive channel nearbyTweetChannel using channels.NearbyTweetsContainingText@2 period duration("PT10M");
+
+drop dataset two.nearbyTweetChannelSubscriptions;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/channel/room_occupants/room_occupants.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/channel/room_occupants/room_occupants.1.ddl.sqlpp
index fe1db99..638f7b5 100644
--- a/asterix-bad/src/test/resources/runtimets/queries/channel/room_occupants/room_occupants.1.ddl.sqlpp
+++ b/asterix-bad/src/test/resources/runtimets/queries/channel/room_occupants/room_occupants.1.ddl.sqlpp
@@ -23,6 +23,7 @@
 * Author       : Steven Jacobs
 */
 
+drop dataverse two if exists;
 drop dataverse channels if exists;
 create dataverse channels;
 use channels;
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/create_procedure_check_metadata/create_procedure_check_metadata.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/create_procedure_check_metadata/create_procedure_check_metadata.1.ddl.sqlpp
new file mode 100644
index 0000000..71b2ed4
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/create_procedure_check_metadata/create_procedure_check_metadata.1.ddl.sqlpp
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Create Procedure Check Metadata
+* Expected Res : Success
+* Date         : Jan 2017
+* Author       : Steven Jacobs
+*/
+
+drop dataverse two if exists;
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+create type myLocation as {
+  id: int
+};
+create dataset UserLocations(myLocation)
+primary key id;
+
+create function really_contains(word,letter){
+contains(word,letter)
+};
+
+create dataverse two;
+use two;
+
+create function really_contains(word,letter){
+contains(word,letter)
+};
+
+create type myLocation as {
+  id: int
+};
+create dataset UserLocations(myLocation)
+primary key id;
+
+create procedure selectSome(r, otherRoom) {
+select roomNumber from channels.UserLocations
+where roomNumber = r
+or roomNumber = otherRoom
+and channels.really_contains(roomNumber,"l")
+order by id
+};
+
+create procedure deleteSome(r, otherRoom) {
+delete from channels.UserLocations
+where roomNumber = r
+or roomNumber = otherRoom
+and channels.really_contains(roomNumber,"l")
+};
+
+create procedure addMe() {
+  insert into channels.UserLocations([
+    {"timeStamp":current_datetime(), "roomNumber":222}]
+  )
+};
+
+create procedure localSelectSome(r, otherRoom) {
+select roomNumber from UserLocations
+where roomNumber = r
+or roomNumber = otherRoom
+and really_contains(roomNumber,"l")
+order by id
+};
+
+create procedure localDeleteSome(r, otherRoom) {
+delete from UserLocations
+where roomNumber = r
+or roomNumber = otherRoom
+and really_contains(roomNumber,"l")
+};
+
+create procedure localAddMe() {
+  insert into UserLocations([
+    {"timeStamp":current_datetime(), "roomNumber":222}]
+  )
+};
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/create_procedure_check_metadata/create_procedure_check_metadata.2.query.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/create_procedure_check_metadata/create_procedure_check_metadata.2.query.sqlpp
new file mode 100644
index 0000000..023c343
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/create_procedure_check_metadata/create_procedure_check_metadata.2.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+select value x
+from Metadata.`Procedure` x
+order by x.ProcedureName;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure/delete_procedure.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure/delete_procedure.1.ddl.sqlpp
index 7dbf136..905211f 100644
--- a/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure/delete_procedure.1.ddl.sqlpp
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure/delete_procedure.1.ddl.sqlpp
@@ -23,6 +23,7 @@
 * Author       : Steven Jacobs
 */
 
+drop dataverse two if exists;
 drop dataverse channels if exists;
 create dataverse channels;
 use channels;
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_dataset/delete_procedure_drop_dataset.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_dataset/delete_procedure_drop_dataset.1.ddl.sqlpp
new file mode 100644
index 0000000..1b88b19
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_dataset/delete_procedure_drop_dataset.1.ddl.sqlpp
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Drop Procedure Dataset
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse two if exists;
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+
+create type myLocation as {
+  timeStamp: datetime,
+  roomNumber: int
+};
+
+
+create dataset UserLocations(myLocation)
+primary key timeStamp;
+
+create dataverse two;
+use two;
+
+create procedure deleteAll() {
+delete from channels.UserLocations
+};
+
+use channels;
+drop dataset UserLocations;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_function/delete_procedure_drop_function.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_function/delete_procedure_drop_function.1.ddl.sqlpp
new file mode 100644
index 0000000..2322154
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_function/delete_procedure_drop_function.1.ddl.sqlpp
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Delete Procedure drop function
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse two if exists;
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+create type myLocation as {
+  id: int
+};
+create dataset UserLocations(myLocation)
+primary key id;
+insert into UserLocations(
+  [{"id":0, "roomNumber":4815162342},
+  {"id":1, "roomNumber":"lost"},
+  {"id":2, "roomNumber":108},
+  {"id":3, "roomNumber":"jacob"}]
+);
+
+create dataverse two;
+use two;
+
+create function really_contains(word,letter){
+contains(word,letter)
+};
+
+create procedure deleteSome(r, otherRoom) {
+delete from channels.UserLocations
+where roomNumber = r
+or roomNumber = otherRoom
+and really_contains(roomNumber,"l")
+};
+
+drop function really_contains@2;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_index/delete_procedure_drop_index.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_index/delete_procedure_drop_index.1.ddl.sqlpp
new file mode 100644
index 0000000..ab66a22
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_index/delete_procedure_drop_index.1.ddl.sqlpp
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Drop Procedure Dataset Index
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse two if exists;
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+
+create type myLocation as {
+  timeStamp: datetime,
+  roomNumber: int
+};
+
+
+create dataset UserLocations(myLocation)
+primary key timeStamp;
+
+create index rooms on UserLocations(roomNumber);
+
+create dataverse two;
+use two;
+
+create procedure deleteAll() {
+delete from channels.UserLocations
+};
+
+use channels;
+drop index UserLocations.rooms;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/insert_procedure_drop_dataset/insert_procedure_drop_dataset.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/insert_procedure_drop_dataset/insert_procedure_drop_dataset.1.ddl.sqlpp
new file mode 100644
index 0000000..260afab
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/insert_procedure_drop_dataset/insert_procedure_drop_dataset.1.ddl.sqlpp
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Drop Procedure Dataset
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse two if exists;
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+
+create type myLocation as {
+  timeStamp: datetime,
+  roomNumber: int
+};
+
+
+create dataset UserLocations(myLocation)
+primary key timeStamp;
+
+create dataverse two;
+use two;
+
+create procedure addMe() {
+  insert into channels.UserLocations([
+    {"timeStamp":current_datetime(), "roomNumber":222}]
+  )
+};
+
+use channels;
+drop dataset UserLocations;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/insert_procedure_drop_dataverse/insert_procedure_drop_dataverse.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/insert_procedure_drop_dataverse/insert_procedure_drop_dataverse.1.ddl.sqlpp
new file mode 100644
index 0000000..ad47b4e
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/insert_procedure_drop_dataverse/insert_procedure_drop_dataverse.1.ddl.sqlpp
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Drop Procedure Dataset Dataverse
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse two if exists;
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+
+create type myLocation as {
+  timeStamp: datetime,
+  roomNumber: int
+};
+
+
+create dataset UserLocations(myLocation)
+primary key timeStamp;
+
+create dataverse two;
+use two;
+
+create procedure addMe() {
+  insert into channels.UserLocations([
+    {"timeStamp":current_datetime(), "roomNumber":222}]
+  )
+};
+
+drop dataverse channels;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/query_procedure_drop_dataset/query_procedure_drop_dataset.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/query_procedure_drop_dataset/query_procedure_drop_dataset.1.ddl.sqlpp
new file mode 100644
index 0000000..acb9f04
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/query_procedure_drop_dataset/query_procedure_drop_dataset.1.ddl.sqlpp
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Drop Procedure Dataset
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse two if exists;
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+
+create type myLocation as {
+  timeStamp: datetime,
+  roomNumber: int
+};
+
+
+create dataset UserLocations(myLocation)
+primary key timeStamp;
+
+create dataverse two;
+use two;
+
+create procedure findMe() {
+select roomNumber from channels.UserLocations
+order by timeStamp
+};
+
+use channels;
+drop dataset UserLocations;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/query_procedure_drop_function/query_procedure_drop_function.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/query_procedure_drop_function/query_procedure_drop_function.1.ddl.sqlpp
new file mode 100644
index 0000000..0e648cd
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/query_procedure_drop_function/query_procedure_drop_function.1.ddl.sqlpp
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Query Procedure drop function
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse two if exists;
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+create type myLocation as {
+  id: int
+};
+create dataset UserLocations(myLocation)
+primary key id;
+insert into UserLocations(
+  [{"id":0, "roomNumber":4815162342},
+  {"id":1, "roomNumber":"lost"},
+  {"id":2, "roomNumber":108},
+  {"id":3, "roomNumber":"jacob"}]
+);
+
+create function really_contains(word,letter){
+contains(word,letter)
+};
+
+create dataverse two;
+use two;
+
+create procedure selectSome(r, otherRoom) {
+select roomNumber from channels.UserLocations
+where roomNumber = r
+or roomNumber = otherRoom
+and channels.really_contains(roomNumber,"l")
+order by id
+};
+
+
+use channels;
+drop function really_contains@2;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/repetitive_insert_procedure/repetitive_insert_procedure.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/repetitive_insert_procedure/repetitive_insert_procedure.1.ddl.sqlpp
index 76f7f38..370d516 100644
--- a/asterix-bad/src/test/resources/runtimets/queries/procedure/repetitive_insert_procedure/repetitive_insert_procedure.1.ddl.sqlpp
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/repetitive_insert_procedure/repetitive_insert_procedure.1.ddl.sqlpp
@@ -23,6 +23,7 @@
 * Author       : Steven Jacobs
 */
 
+drop dataverse two if exists;
 drop dataverse channels if exists;
 create dataverse channels;
 use channels;
diff --git a/asterix-bad/src/test/resources/runtimets/results/channel/create_channel_check_metadata/create_channel_check_metadata.1.adm b/asterix-bad/src/test/resources/runtimets/results/channel/create_channel_check_metadata/create_channel_check_metadata.1.adm
index e009733..bee9157 100644
--- a/asterix-bad/src/test/resources/runtimets/results/channel/create_channel_check_metadata/create_channel_check_metadata.1.adm
+++ b/asterix-bad/src/test/resources/runtimets/results/channel/create_channel_check_metadata/create_channel_check_metadata.1.adm
@@ -1 +1 @@
-{ "DataverseName": "channels", "ChannelName": "nearbyTweetChannel", "SubscriptionsDatasetName": "nearbyTweetChannelSubscriptions", "ResultsDatasetName": "nearbyTweetChannelResults", "Function": "channels.NearbyTweetsContainingText@2", "Duration": "PT10M" }
\ No newline at end of file
+{ "DataverseName": "channels", "ChannelName": "nearbyTweetChannel", "SubscriptionsDatasetName": "nearbyTweetChannelSubscriptions", "ResultsDatasetName": "nearbyTweetChannelResults", "Function": [ "channels", "NearbyTweetsContainingText", "2" ], "Duration": "PT10M", "Dependencies": [ [ [ "channels", "nearbyTweetChannelResults" ], [ "channels", "nearbyTweetChannelSubscriptions" ] ], [ [ "channels", "NearbyTweetsContainingText", "2" ] ] ] }
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/results/channel/drop_channel_check_metadata/drop_channel_check_metadata.1.adm b/asterix-bad/src/test/resources/runtimets/results/channel/drop_channel_check_metadata/drop_channel_check_metadata.1.adm
index 1da5787..1c492ac 100644
--- a/asterix-bad/src/test/resources/runtimets/results/channel/drop_channel_check_metadata/drop_channel_check_metadata.1.adm
+++ b/asterix-bad/src/test/resources/runtimets/results/channel/drop_channel_check_metadata/drop_channel_check_metadata.1.adm
@@ -1,2 +1,2 @@
-{ "DataverseName": "channels", "ChannelName": "nearbyTweetChannel1", "SubscriptionsDatasetName": "nearbyTweetChannel1Subscriptions", "ResultsDatasetName": "nearbyTweetChannel1Results", "Function": "channels.NearbyTweetsContainingText@2", "Duration": "PT10M" }
-{ "DataverseName": "channels", "ChannelName": "nearbyTweetChannel3", "SubscriptionsDatasetName": "nearbyTweetChannel3Subscriptions", "ResultsDatasetName": "nearbyTweetChannel3Results", "Function": "channels.NearbyTweetsContainingText@2", "Duration": "PT10M" }
\ No newline at end of file
+{ "DataverseName": "channels", "ChannelName": "nearbyTweetChannel1", "SubscriptionsDatasetName": "nearbyTweetChannel1Subscriptions", "ResultsDatasetName": "nearbyTweetChannel1Results", "Function": [ "channels", "NearbyTweetsContainingText", "2" ], "Duration": "PT10M", "Dependencies": [ [ [ "channels", "nearbyTweetChannel1Results" ], [ "channels", "nearbyTweetChannel1Subscriptions" ] ], [ [ "channels", "NearbyTweetsContainingText", "2" ] ] ] }
+{ "DataverseName": "channels", "ChannelName": "nearbyTweetChannel3", "SubscriptionsDatasetName": "nearbyTweetChannel3Subscriptions", "ResultsDatasetName": "nearbyTweetChannel3Results", "Function": [ "channels", "NearbyTweetsContainingText", "2" ], "Duration": "PT10M", "Dependencies": [ [ [ "channels", "nearbyTweetChannel3Results" ], [ "channels", "nearbyTweetChannel3Subscriptions" ] ], [ [ "channels", "NearbyTweetsContainingText", "2" ] ] ] }
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/results/procedure/create_procedure_check_metadata/create_procedure_check_metadata.1.adm b/asterix-bad/src/test/resources/runtimets/results/procedure/create_procedure_check_metadata/create_procedure_check_metadata.1.adm
new file mode 100644
index 0000000..4308c83
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/results/procedure/create_procedure_check_metadata/create_procedure_check_metadata.1.adm
@@ -0,0 +1,6 @@
+{ "DataverseName": "two", "ProcedureName": "addMe", "Arity": "0", "Params": [  ], "ReturnType": "VOID", "Definition": "insert into channels.UserLocations([\n    {\"timeStamp\":current_datetime(), \"roomNumber\":222}]\n  )", "Language": "AQL", "Duration": "", "Dependencies": [ [ [ "channels", "UserLocations" ] ], [  ] ] }
+{ "DataverseName": "two", "ProcedureName": "deleteSome", "Arity": "2", "Params": [ "$r", "$otherRoom" ], "ReturnType": "VOID", "Definition": "delete from channels.UserLocations\nwhere roomNumber = r\nor roomNumber = otherRoom\nand channels.really_contains(roomNumber,\"l\")", "Language": "AQL", "Duration": "", "Dependencies": [ [ [ "channels", "UserLocations" ] ], [ [ "channels", "really_contains", "2" ] ] ] }
+{ "DataverseName": "two", "ProcedureName": "localAddMe", "Arity": "0", "Params": [  ], "ReturnType": "VOID", "Definition": "insert into UserLocations([\n    {\"timeStamp\":current_datetime(), \"roomNumber\":222}]\n  )", "Language": "AQL", "Duration": "", "Dependencies": [ [ [ "two", "UserLocations" ] ], [  ] ] }
+{ "DataverseName": "two", "ProcedureName": "localDeleteSome", "Arity": "2", "Params": [ "$r", "$otherRoom" ], "ReturnType": "VOID", "Definition": "delete from UserLocations\nwhere roomNumber = r\nor roomNumber = otherRoom\nand really_contains(roomNumber,\"l\")", "Language": "AQL", "Duration": "", "Dependencies": [ [ [ "two", "UserLocations" ] ], [ [ "two", "really_contains", "2" ] ] ] }
+{ "DataverseName": "two", "ProcedureName": "localSelectSome", "Arity": "2", "Params": [ "$r", "$otherRoom" ], "ReturnType": "VOID", "Definition": "select roomNumber from UserLocations\nwhere roomNumber = r\nor roomNumber = otherRoom\nand really_contains(roomNumber,\"l\")\norder by id", "Language": "AQL", "Duration": "", "Dependencies": [ [ [ "two", "UserLocations" ] ], [ [ "two", "really_contains", "2" ] ] ] }
+{ "DataverseName": "two", "ProcedureName": "selectSome", "Arity": "2", "Params": [ "$r", "$otherRoom" ], "ReturnType": "VOID", "Definition": "select roomNumber from channels.UserLocations\nwhere roomNumber = r\nor roomNumber = otherRoom\nand channels.really_contains(roomNumber,\"l\")\norder by id", "Language": "AQL", "Duration": "", "Dependencies": [ [ [ "channels", "UserLocations" ] ], [ [ "channels", "really_contains", "2" ] ] ] }
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/testsuite.xml b/asterix-bad/src/test/resources/runtimets/testsuite.xml
index 1b2844b..3c72a14 100644
--- a/asterix-bad/src/test/resources/runtimets/testsuite.xml
+++ b/asterix-bad/src/test/resources/runtimets/testsuite.xml
@@ -22,6 +22,11 @@
              QueryFileExtension=".sqlpp">
   <test-group name="channel">
     <test-case FilePath="procedure">
+      <compilation-unit name="create_procedure_check_metadata">
+        <output-dir compare="Text">create_procedure_check_metadata</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="procedure">
       <compilation-unit name="delete_procedure">
         <output-dir compare="Text">delete_procedure</output-dir>
       </compilation-unit>
@@ -52,13 +57,50 @@
       </compilation-unit>
     </test-case>
     <test-case FilePath="procedure">
-      <compilation-unit name="repetitive_insert_procedure">
-        <output-dir compare="Text">repetitive_insert_procedure</output-dir>
+      <compilation-unit name="insert_procedure_drop_dataset">
+        <output-dir compare="Text">insert_procedure_drop_dataset</output-dir>
+        <expected-error>Cannot alter dataset channels.UserLocations. two.addMe(Procedure) depends on it!</expected-error>
       </compilation-unit>
     </test-case>
-    <test-case FilePath="channel">
-      <compilation-unit name="room_occupants">
-        <output-dir compare="Text">room_occupants</output-dir>
+    <test-case FilePath="procedure">
+      <compilation-unit name="insert_procedure_drop_dataverse">
+        <output-dir compare="Text">insert_procedure_drop_dataverse</output-dir>
+        <expected-error>Cannot drop dataverse channels. two.addMe(Procedure) depends on it!</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="procedure">
+      <compilation-unit name="delete_procedure_drop_dataset">
+        <output-dir compare="Text">delete_procedure_drop_dataset</output-dir>
+        <expected-error>Cannot alter dataset channels.UserLocations. two.deleteAll(Procedure) depends on it!</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="procedure">
+      <compilation-unit name="delete_procedure_drop_function">
+        <output-dir compare="Text">delete_procedure_drop_function</output-dir>
+        <expected-error>Cannot drop function two.really_contains@2. two.deleteSome(Procedure) depends on it!</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="procedure">
+      <compilation-unit name="delete_procedure_drop_index">
+        <output-dir compare="Text">delete_procedure_drop_index</output-dir>
+        <expected-error>Cannot alter dataset channels.UserLocations. two.deleteAll(Procedure) depends on it!</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="procedure">
+      <compilation-unit name="query_procedure_drop_dataset">
+        <output-dir compare="Text">query_procedure_drop_dataset</output-dir>
+        <expected-error>Cannot alter dataset channels.UserLocations. two.findMe(Procedure) depends on it!</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="procedure">
+      <compilation-unit name="query_procedure_drop_function">
+        <output-dir compare="Text">query_procedure_drop_function</output-dir>
+        <expected-error>Cannot drop function channels.really_contains@2. two.selectSome(Procedure) depends on it!</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="procedure">
+      <compilation-unit name="repetitive_insert_procedure">
+        <output-dir compare="Text">repetitive_insert_procedure</output-dir>
       </compilation-unit>
     </test-case>
     <test-case FilePath="channel">
@@ -87,6 +129,35 @@
       </compilation-unit>
     </test-case>
     <test-case FilePath="channel">
+      <compilation-unit name="drop_function">
+        <output-dir compare="Text">drop_function</output-dir>
+        <expected-error>Cannot drop function channels.NearbyTweetsContainingText@2. two.nearbyTweetChannel(Channel) depends on it!</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="channel">
+      <compilation-unit name="drop_results">
+        <output-dir compare="Text">drop_results</output-dir>
+        <expected-error>Cannot alter dataset two.nearbyTweetChannelResults. two.nearbyTweetChannel(Channel) depends on it!</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="channel">
+      <compilation-unit name="drop_subscriptions">
+        <output-dir compare="Text">drop_subscriptions</output-dir>
+        <expected-error>Cannot alter dataset two.nearbyTweetChannelSubscriptions. two.nearbyTweetChannel(Channel) depends on it!</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="channel">
+      <compilation-unit name="drop_function_dataverse">
+        <output-dir compare="Text">drop_function_dataverse</output-dir>
+        <expected-error>Cannot drop dataverse channels. two.nearbyTweetChannel(Channel) depends on it!</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="channel">
+      <compilation-unit name="room_occupants">
+        <output-dir compare="Text">room_occupants</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="channel">
       <compilation-unit name="disasters_with_friends">
         <output-dir compare="Text">disasters_with_friends</output-dir>
       </compilation-unit>

-- 
To view, visit https://asterix-gerrit.ics.uci.edu/2302
To unsubscribe, visit https://asterix-gerrit.ics.uci.edu/settings

Gerrit-MessageType: newchange
Gerrit-Change-Id: Ic6ac2daad03844a042aded8e17bb231a06f59cbe
Gerrit-PatchSet: 1
Gerrit-Project: asterixdb-bad
Gerrit-Branch: master
Gerrit-Owner: Steven Jacobs <sj...@ucr.edu>

Change in asterixdb-bad[master]: Enable dependencies in the metadata for BAD entities

Posted by "Jenkins (Code Review)" <do...@asterixdb.incubator.apache.org>.
Jenkins has posted comments on this change.

Change subject: Enable dependencies in the metadata for BAD entities
......................................................................


Patch Set 2:

Build Started https://asterix-jenkins.ics.uci.edu/job/asterixbad-gerrit/233/

-- 
To view, visit https://asterix-gerrit.ics.uci.edu/2302
To unsubscribe, visit https://asterix-gerrit.ics.uci.edu/settings

Gerrit-MessageType: comment
Gerrit-Change-Id: Ic6ac2daad03844a042aded8e17bb231a06f59cbe
Gerrit-PatchSet: 2
Gerrit-Project: asterixdb-bad
Gerrit-Branch: master
Gerrit-Owner: Steven Jacobs <sj...@ucr.edu>
Gerrit-Reviewer: Jenkins <je...@fulliautomatix.ics.uci.edu>
Gerrit-Reviewer: Steven Jacobs <sj...@ucr.edu>
Gerrit-Reviewer: Xikui Wang <xk...@gmail.com>
Gerrit-HasComments: No

Change in asterixdb-bad[master]: Enable dependencies in the metadata for BAD entities

Posted by "Steven Jacobs (Code Review)" <do...@asterixdb.incubator.apache.org>.
Steven Jacobs has posted comments on this change.

Change subject: Enable dependencies in the metadata for BAD entities
......................................................................


Patch Set 1:

(1 comment)

https://asterix-gerrit.ics.uci.edu/#/c/2302/1/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateProcedureStatement.java
File asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateProcedureStatement.java:

Line 107:         this.dependencies.add(new ArrayList<>());
> Could you add one or two lines comment to explains the hierarchy here?  In 
Done, I added the description to Channel.java and Procedure.java


-- 
To view, visit https://asterix-gerrit.ics.uci.edu/2302
To unsubscribe, visit https://asterix-gerrit.ics.uci.edu/settings

Gerrit-MessageType: comment
Gerrit-Change-Id: Ic6ac2daad03844a042aded8e17bb231a06f59cbe
Gerrit-PatchSet: 1
Gerrit-Project: asterixdb-bad
Gerrit-Branch: master
Gerrit-Owner: Steven Jacobs <sj...@ucr.edu>
Gerrit-Reviewer: Jenkins <je...@fulliautomatix.ics.uci.edu>
Gerrit-Reviewer: Steven Jacobs <sj...@ucr.edu>
Gerrit-Reviewer: Xikui Wang <xk...@gmail.com>
Gerrit-HasComments: Yes

Change in asterixdb-bad[master]: Enable dependencies in the metadata for BAD entities

Posted by "Xikui Wang (Code Review)" <do...@asterixdb.incubator.apache.org>.
Xikui Wang has posted comments on this change.

Change subject: Enable dependencies in the metadata for BAD entities
......................................................................


Patch Set 3: Code-Review+2

-- 
To view, visit https://asterix-gerrit.ics.uci.edu/2302
To unsubscribe, visit https://asterix-gerrit.ics.uci.edu/settings

Gerrit-MessageType: comment
Gerrit-Change-Id: Ic6ac2daad03844a042aded8e17bb231a06f59cbe
Gerrit-PatchSet: 3
Gerrit-Project: asterixdb-bad
Gerrit-Branch: master
Gerrit-Owner: Steven Jacobs <sj...@ucr.edu>
Gerrit-Reviewer: Jenkins <je...@fulliautomatix.ics.uci.edu>
Gerrit-Reviewer: Steven Jacobs <sj...@ucr.edu>
Gerrit-Reviewer: Xikui Wang <xk...@gmail.com>
Gerrit-HasComments: No

Change in asterixdb-bad[master]: Enable dependencies in the metadata for BAD entities

Posted by "Jenkins (Code Review)" <do...@asterixdb.incubator.apache.org>.
Jenkins has posted comments on this change.

Change subject: Enable dependencies in the metadata for BAD entities
......................................................................


Patch Set 3:

Build Started https://asterix-jenkins.ics.uci.edu/job/asterixbad-gerrit/234/

-- 
To view, visit https://asterix-gerrit.ics.uci.edu/2302
To unsubscribe, visit https://asterix-gerrit.ics.uci.edu/settings

Gerrit-MessageType: comment
Gerrit-Change-Id: Ic6ac2daad03844a042aded8e17bb231a06f59cbe
Gerrit-PatchSet: 3
Gerrit-Project: asterixdb-bad
Gerrit-Branch: master
Gerrit-Owner: Steven Jacobs <sj...@ucr.edu>
Gerrit-Reviewer: Jenkins <je...@fulliautomatix.ics.uci.edu>
Gerrit-Reviewer: Steven Jacobs <sj...@ucr.edu>
Gerrit-Reviewer: Xikui Wang <xk...@gmail.com>
Gerrit-HasComments: No

Change in asterixdb-bad[master]: Enable dependencies in the metadata for BAD entities

Posted by "Steven Jacobs (Code Review)" <do...@asterixdb.incubator.apache.org>.
Steven Jacobs has submitted this change and it was merged.

Change subject: Enable dependencies in the metadata for BAD entities
......................................................................


Enable dependencies in the metadata for BAD entities

Allow Channels and Procedures to store dependencies on
Datasets and Functions

Prevent dropping of these dependencies

Add Error tests

Change-Id: Ic6ac2daad03844a042aded8e17bb231a06f59cbe
---
M asterix-bad/src/main/java/org/apache/asterix/bad/BADConstants.java
M asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADLangExtension.java
M asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADStatementExecutor.java
M asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/ChannelDropStatement.java
M asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateChannelStatement.java
M asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateProcedureStatement.java
A asterix-bad/src/main/java/org/apache/asterix/bad/metadata/AllChannelsSearchKey.java
A asterix-bad/src/main/java/org/apache/asterix/bad/metadata/AllProceduresSearchKey.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/BADMetadataRecordTypes.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Channel.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ChannelTupleTranslator.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/DeployedJobSpecEventListener.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Procedure.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ProcedureTupleTranslator.java
M asterix-bad/src/main/resources/lang-extension/lang.txt
A asterix-bad/src/test/resources/runtimets/queries/channel/drop_function/drop_function.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/channel/drop_function_dataverse/drop_function_dataverse.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/channel/drop_results/drop_results.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/channel/drop_subscriptions/drop_subscriptions.1.ddl.sqlpp
M asterix-bad/src/test/resources/runtimets/queries/channel/room_occupants/room_occupants.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/create_procedure_check_metadata/create_procedure_check_metadata.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/create_procedure_check_metadata/create_procedure_check_metadata.2.query.sqlpp
M asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure/delete_procedure.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_dataset/delete_procedure_drop_dataset.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_function/delete_procedure_drop_function.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_index/delete_procedure_drop_index.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/insert_procedure_drop_dataset/insert_procedure_drop_dataset.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/insert_procedure_drop_dataverse/insert_procedure_drop_dataverse.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/query_procedure_drop_dataset/query_procedure_drop_dataset.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/query_procedure_drop_function/query_procedure_drop_function.1.ddl.sqlpp
M asterix-bad/src/test/resources/runtimets/queries/procedure/repetitive_insert_procedure/repetitive_insert_procedure.1.ddl.sqlpp
M asterix-bad/src/test/resources/runtimets/results/channel/create_channel_check_metadata/create_channel_check_metadata.1.adm
M asterix-bad/src/test/resources/runtimets/results/channel/drop_channel_check_metadata/drop_channel_check_metadata.1.adm
A asterix-bad/src/test/resources/runtimets/results/procedure/create_procedure_check_metadata/create_procedure_check_metadata.1.adm
M asterix-bad/src/test/resources/runtimets/testsuite.xml
35 files changed, 1,317 insertions(+), 103 deletions(-)

Approvals:
  Jenkins: Verified
  Xikui Wang: Looks good to me, approved



diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/BADConstants.java b/asterix-bad/src/main/java/org/apache/asterix/bad/BADConstants.java
index 3aca099..d2d0fa3 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/BADConstants.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/BADConstants.java
@@ -45,6 +45,7 @@
     String Duration = "Duration";
     String Function = "Function";
     String FIELD_NAME_ARITY = "Arity";
+    String FIELD_NAME_DEPENDENCIES = "Dependencies";
     String FIELD_NAME_PARAMS = "Params";
     String FIELD_NAME_RETURN_TYPE = "ReturnType";
     String FIELD_NAME_DEFINITION = "Definition";
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADLangExtension.java b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADLangExtension.java
index fa0c0da..7a085a3 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADLangExtension.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADLangExtension.java
@@ -21,6 +21,8 @@
 import java.util.List;
 
 import org.apache.asterix.algebra.base.ILangExtension;
+import org.apache.asterix.bad.metadata.AllChannelsSearchKey;
+import org.apache.asterix.bad.metadata.AllProceduresSearchKey;
 import org.apache.asterix.bad.metadata.Broker;
 import org.apache.asterix.bad.metadata.BrokerSearchKey;
 import org.apache.asterix.bad.metadata.Channel;
@@ -111,6 +113,11 @@
         return MetadataManager.INSTANCE.getEntities(mdTxnCtx, brokerSearchKey);
     }
 
+    public static List<Channel> getAllChannels(MetadataTransactionContext mdTxnCtx) throws AlgebricksException {
+        AllChannelsSearchKey channelSearchKey = new AllChannelsSearchKey();
+        return MetadataManager.INSTANCE.getEntities(mdTxnCtx, channelSearchKey);
+    }
+
     public static List<Channel> getChannels(MetadataTransactionContext mdTxnCtx, String dataverseName)
             throws AlgebricksException {
         DataverseChannelsSearchKey channelSearchKey = new DataverseChannelsSearchKey(dataverseName);
@@ -123,4 +130,9 @@
         return MetadataManager.INSTANCE.getEntities(mdTxnCtx, proceduresSearchKey);
     }
 
+    public static List<Procedure> getAllProcedures(MetadataTransactionContext mdTxnCtx) throws AlgebricksException {
+        AllProceduresSearchKey proceduresSearchKey = new AllProceduresSearchKey();
+        return MetadataManager.INSTANCE.getEntities(mdTxnCtx, proceduresSearchKey);
+    }
+
 }
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADStatementExecutor.java b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADStatementExecutor.java
index 28f7f00..8c7143f 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADStatementExecutor.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADStatementExecutor.java
@@ -21,8 +21,8 @@
 import java.util.List;
 import java.util.concurrent.ExecutorService;
 
-import org.apache.asterix.app.translator.RequestParameters;
 import org.apache.asterix.app.translator.QueryTranslator;
+import org.apache.asterix.app.translator.RequestParameters;
 import org.apache.asterix.bad.lang.statement.BrokerDropStatement;
 import org.apache.asterix.bad.lang.statement.ChannelDropStatement;
 import org.apache.asterix.bad.lang.statement.ProcedureDropStatement;
@@ -30,16 +30,21 @@
 import org.apache.asterix.bad.metadata.Channel;
 import org.apache.asterix.bad.metadata.Procedure;
 import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.common.functions.FunctionSignature;
 import org.apache.asterix.compiler.provider.ILangCompilationProvider;
 import org.apache.asterix.lang.common.base.Statement;
 import org.apache.asterix.lang.common.statement.DataverseDropStatement;
+import org.apache.asterix.lang.common.statement.DropDatasetStatement;
+import org.apache.asterix.lang.common.statement.FunctionDropStatement;
+import org.apache.asterix.lang.common.statement.IndexDropStatement;
 import org.apache.asterix.lang.common.struct.Identifier;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
 import org.apache.asterix.metadata.declared.MetadataProvider;
 import org.apache.asterix.translator.IRequestParameters;
 import org.apache.asterix.translator.SessionOutput;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.api.client.IHyracksClientConnection;
 
 public class BADStatementExecutor extends QueryTranslator {
@@ -49,39 +54,170 @@
         super(appCtx, statements, output, compliationProvider, executorService);
     }
 
+    //TODO: Most of this file could go away if we had metadata dependencies
+
+    private void checkIfDatasetIsInUse(MetadataTransactionContext mdTxnCtx, String dataverse, String dataset)
+            throws CompilationException, AlgebricksException {
+        List<Channel> channels = BADLangExtension.getAllChannels(mdTxnCtx);
+        for (Channel channel : channels) {
+            List<List<List<String>>> dependencies = channel.getDependencies();
+            List<List<String>> datasetDependencies = dependencies.get(0);
+            for (List<String> dependency : datasetDependencies) {
+                if (dependency.get(0).equals(dataverse) && dependency.get(1).equals(dataset)) {
+                    throw new CompilationException("Cannot alter dataset " + dataverse + "." + dataset + ". "
+                            + channel.getChannelId() + " depends on it!");
+                }
+            }
+
+        }
+        List<Procedure> procedures = BADLangExtension.getAllProcedures(mdTxnCtx);
+        for (Procedure procedure : procedures) {
+            List<List<List<String>>> dependencies = procedure.getDependencies();
+            List<List<String>> datasetDependencies = dependencies.get(0);
+            for (List<String> dependency : datasetDependencies) {
+                if (dependency.get(0).equals(dataverse) && dependency.get(1).equals(dataset)) {
+                    throw new CompilationException("Cannot alter dataset " + dataverse + "." + dataset + ". "
+                            + procedure.getEntityId() + " depends on it!");
+                }
+            }
+
+        }
+    }
+
+    @Override
+    public void handleDatasetDropStatement(MetadataProvider metadataProvider, Statement stmt,
+            IHyracksClientConnection hcc, IRequestParameters requestParameters) throws Exception {
+        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+        metadataProvider.setMetadataTxnContext(mdTxnCtx);
+        String dvId = getActiveDataverse(((DropDatasetStatement) stmt).getDataverseName());
+        Identifier dsId = ((DropDatasetStatement) stmt).getDatasetName();
+
+        checkIfDatasetIsInUse(mdTxnCtx, dvId, dsId.getValue());
+
+        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+        super.handleDatasetDropStatement(metadataProvider, stmt, hcc, requestParameters);
+    }
+
+    @Override
+    protected void handleIndexDropStatement(MetadataProvider metadataProvider, Statement stmt,
+            IHyracksClientConnection hcc, IRequestParameters requestParameters) throws Exception {
+        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+        metadataProvider.setMetadataTxnContext(mdTxnCtx);
+        String dvId = getActiveDataverse(((IndexDropStatement) stmt).getDataverseName());
+        Identifier dsId = ((IndexDropStatement) stmt).getDatasetName();
+
+        checkIfDatasetIsInUse(mdTxnCtx, dvId, dsId.getValue());
+
+        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+        super.handleIndexDropStatement(metadataProvider, stmt, hcc, requestParameters);
+    }
+
+    @Override
+    protected void handleFunctionDropStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception {
+        MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+        metadataProvider.setMetadataTxnContext(mdTxnCtx);
+        FunctionSignature sig = ((FunctionDropStatement) stmt).getFunctionSignature();
+
+        String dvId = getActiveDataverseName(sig.getNamespace());
+        String function = sig.getName();
+        String arity = Integer.toString(sig.getArity());
+
+        List<Channel> channels = BADLangExtension.getAllChannels(mdTxnCtx);
+        for (Channel channel : channels) {
+            List<List<List<String>>> dependencies = channel.getDependencies();
+            List<List<String>> datasetDependencies = dependencies.get(1);
+            for (List<String> dependency : datasetDependencies) {
+                if (dependency.get(0).equals(dvId) && dependency.get(1).equals(function)
+                        && dependency.get(2).equals(arity)) {
+                    throw new CompilationException(
+                            "Cannot drop function " + sig + ". " + channel.getChannelId() + " depends on it!");
+                }
+            }
+
+        }
+        List<Procedure> procedures = BADLangExtension.getAllProcedures(mdTxnCtx);
+        for (Procedure procedure : procedures) {
+            List<List<List<String>>> dependencies = procedure.getDependencies();
+            List<List<String>> datasetDependencies = dependencies.get(1);
+            for (List<String> dependency : datasetDependencies) {
+                if (dependency.get(0).equals(dvId) && dependency.get(1).equals(function)
+                        && dependency.get(2).equals(arity)) {
+                    throw new CompilationException(
+                            "Cannot drop function " + sig + ". " + procedure.getEntityId() + " depends on it!");
+                }
+            }
+
+        }
+
+        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+        super.handleFunctionDropStatement(metadataProvider, stmt);
+    }
+
     @Override
     protected void handleDataverseDropStatement(MetadataProvider metadataProvider, Statement stmt,
             IHyracksClientConnection hcc) throws Exception {
-        //TODO: Remove this when metadata dependencies are in place
-        //TODO: Stop dataset drop when dataset used by channel
-        super.handleDataverseDropStatement(metadataProvider, stmt, hcc);
         MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
         metadataProvider.setMetadataTxnContext(mdTxnCtx);
         Identifier dvId = ((DataverseDropStatement) stmt).getDataverseName();
-        List<Broker> brokers = BADLangExtension.getBrokers(mdTxnCtx, dvId.getValue());
         MetadataProvider tempMdProvider = new MetadataProvider(appCtx, metadataProvider.getDefaultDataverse());
         tempMdProvider.getConfig().putAll(metadataProvider.getConfig());
-        final IRequestParameters requestParameters = new RequestParameters(null, null, null, null, null, null);
-        for (Broker broker : brokers) {
-            tempMdProvider.getLocks().reset();
-            BrokerDropStatement drop = new BrokerDropStatement(dvId, new Identifier(broker.getBrokerName()), false);
-            drop.handle(hcc, this, requestParameters, tempMdProvider, 0);
-        }
-        List<Channel> channels = BADLangExtension.getChannels(mdTxnCtx, dvId.getValue());
+        List<Channel> channels = BADLangExtension.getAllChannels(mdTxnCtx);
         for (Channel channel : channels) {
+            if (channel.getChannelId().getDataverse().equals(dvId.getValue())) {
+                continue;
+            }
+            List<List<List<String>>> dependencies = channel.getDependencies();
+            for (List<List<String>> dependencyList : dependencies) {
+                for (List<String> dependency : dependencyList) {
+                    if (dependency.get(0).equals(dvId.getValue())) {
+                        throw new CompilationException("Cannot drop dataverse " + dvId.getValue() + ". "
+                                + channel.getChannelId() + " depends on it!");
+                    }
+                }
+            }
+        }
+        List<Procedure> procedures = BADLangExtension.getAllProcedures(mdTxnCtx);
+        for (Procedure procedure : procedures) {
+            if (procedure.getEntityId().getDataverse().equals(dvId.getValue())) {
+                continue;
+            }
+            List<List<List<String>>> dependencies = procedure.getDependencies();
+            for (List<List<String>> dependencyList : dependencies) {
+                for (List<String> dependency : dependencyList) {
+                    if (dependency.get(0).equals(dvId.getValue())) {
+                        throw new CompilationException("Cannot drop dataverse " + dvId.getValue() + ". "
+                                + procedure.getEntityId() + " depends on it!");
+                    }
+                }
+            }
+        }
+        final IRequestParameters requestParameters = new RequestParameters(null, null, null, null, null, null);
+        for (Channel channel : channels) {
+            if (!channel.getChannelId().getDataverse().equals(dvId.getValue())) {
+                continue;
+            }
             tempMdProvider.getLocks().reset();
             ChannelDropStatement drop =
                     new ChannelDropStatement(dvId, new Identifier(channel.getChannelId().getEntityName()), false);
             drop.handle(hcc, this, requestParameters, tempMdProvider, 0);
         }
-        List<Procedure> procedures = BADLangExtension.getProcedures(mdTxnCtx, dvId.getValue());
         for (Procedure procedure : procedures) {
+            if (!procedure.getEntityId().getDataverse().equals(dvId.getValue())) {
+                continue;
+            }
             tempMdProvider.getLocks().reset();
             ProcedureDropStatement drop = new ProcedureDropStatement(new FunctionSignature(dvId.getValue(),
                     procedure.getEntityId().getEntityName(), procedure.getArity()), false);
             drop.handle(hcc, this, requestParameters, tempMdProvider, 0);
         }
+        List<Broker> brokers = BADLangExtension.getBrokers(mdTxnCtx, dvId.getValue());
+        for (Broker broker : brokers) {
+            tempMdProvider.getLocks().reset();
+            BrokerDropStatement drop = new BrokerDropStatement(dvId, new Identifier(broker.getBrokerName()), false);
+            drop.handle(hcc, this, requestParameters, tempMdProvider, 0);
+        }
         MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+        super.handleDataverseDropStatement(metadataProvider, stmt, hcc);
     }
 
 }
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/ChannelDropStatement.java b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/ChannelDropStatement.java
index 80355c0..e4b6d89 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/ChannelDropStatement.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/ChannelDropStatement.java
@@ -139,8 +139,9 @@
             tempMdProvider.getConfig().putAll(metadataProvider.getConfig());
             //Drop the Channel Datasets
             //TODO: Need to find some way to handle if this fails.
-            //TODO: Prevent datasets for Channels from being dropped elsewhere
 
+            //Remove the Channel Metadata
+            MetadataManager.INSTANCE.deleteEntity(mdTxnCtx, channel);
             DropDatasetStatement dropStmt = new DropDatasetStatement(new Identifier(dataverse),
                     new Identifier(channel.getResultsDatasetName()), true);
             ((QueryTranslator) statementExecutor).handleDatasetDropStatement(tempMdProvider, dropStmt, hcc, null);
@@ -148,9 +149,6 @@
             dropStmt = new DropDatasetStatement(new Identifier(dataverse),
                     new Identifier(channel.getSubscriptionsDataset()), true);
             ((QueryTranslator) statementExecutor).handleDatasetDropStatement(tempMdProvider, dropStmt, hcc, null);
-
-            //Remove the Channel Metadata
-            MetadataManager.INSTANCE.deleteEntity(mdTxnCtx, channel);
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
         } catch (Exception e) {
             e.printStackTrace();
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateChannelStatement.java b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateChannelStatement.java
index 3864248..feaa3ca 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateChannelStatement.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateChannelStatement.java
@@ -47,7 +47,6 @@
 import org.apache.asterix.common.exceptions.CompilationException;
 import org.apache.asterix.common.exceptions.MetadataException;
 import org.apache.asterix.common.functions.FunctionSignature;
-import org.apache.asterix.common.metadata.IDataset;
 import org.apache.asterix.lang.common.base.Expression;
 import org.apache.asterix.lang.common.base.Statement;
 import org.apache.asterix.lang.common.expression.CallExpr;
@@ -83,15 +82,15 @@
 public class CreateChannelStatement implements IExtensionStatement {
 
     private static final Logger LOGGER = Logger.getLogger(CreateChannelStatement.class.getName());
-
-    private final Identifier dataverseName;
     private final Identifier channelName;
     private final FunctionSignature function;
     private final CallExpr period;
+    private Identifier dataverseName;
     private String duration;
     private InsertStatement channelResultsInsertQuery;
     private String subscriptionsTableName;
     private String resultsTableName;
+    private String dataverse;
 
     public CreateChannelStatement(Identifier dataverseName, Identifier channelName, FunctionSignature function,
             Expression period) {
@@ -144,7 +143,7 @@
         return null;
     }
 
-    public void initialize(MetadataTransactionContext mdTxnCtx, String subscriptionsTableName, String resultsTableName)
+    public void initialize(MetadataTransactionContext mdTxnCtx)
             throws AlgebricksException, HyracksDataException {
         Function lookup = MetadataManager.INSTANCE.getFunction(mdTxnCtx, function);
         if (lookup == null) {
@@ -160,8 +159,6 @@
         ByteArrayOutputStream bos = new ByteArrayOutputStream();
         DataOutputStream outputStream = new DataOutputStream(bos);
         durationParser.parse(duration.toCharArray(), 0, duration.toCharArray().length, outputStream);
-        this.resultsTableName = resultsTableName;
-        this.subscriptionsTableName = subscriptionsTableName;
     }
 
     @Override
@@ -169,9 +166,8 @@
         return Kind.EXTENSION;
     }
 
-    private void createDatasets(IStatementExecutor statementExecutor, Identifier subscriptionsName,
-            Identifier resultsName, MetadataProvider metadataProvider, IHyracksClientConnection hcc,
-            IHyracksDataset hdc, String dataverse) throws AsterixException, Exception {
+    private void createDatasets(IStatementExecutor statementExecutor, MetadataProvider metadataProvider,
+            IHyracksClientConnection hcc) throws AsterixException, Exception {
 
         Identifier subscriptionsTypeName = new Identifier(BADConstants.ChannelSubscriptionsType);
         Identifier resultsTypeName = new Identifier(BADConstants.ChannelResultsType);
@@ -183,7 +179,7 @@
         fieldNames.add(BADConstants.SubscriptionId);
         partitionFields.add(fieldNames);
         IDatasetDetailsDecl idd = new InternalDetailsDecl(partitionFields, keyIndicators, true, null);
-        DatasetDecl createSubscriptionsDataset = new DatasetDecl(new Identifier(dataverse), subscriptionsName,
+        DatasetDecl createSubscriptionsDataset = new DatasetDecl(dataverseName, new Identifier(subscriptionsTableName),
                 new Identifier(BADConstants.BAD_DATAVERSE_NAME), subscriptionsTypeName, null, null, null,
                 new HashMap<String, String>(), DatasetType.INTERNAL, idd, null, true);
 
@@ -193,15 +189,15 @@
         fieldNames.add(BADConstants.ResultId);
         partitionFields.add(fieldNames);
         idd = new InternalDetailsDecl(partitionFields, keyIndicators, true, null);
-        DatasetDecl createResultsDataset = new DatasetDecl(new Identifier(dataverse), resultsName,
+        DatasetDecl createResultsDataset = new DatasetDecl(dataverseName, new Identifier(resultsTableName),
                 new Identifier(BADConstants.BAD_DATAVERSE_NAME), resultsTypeName, null, null, null,
                 new HashMap<String, String>(), DatasetType.INTERNAL, idd, null, true);
 
         //Create an index on timestamp for results
         CreateIndexStatement createTimeIndex = new CreateIndexStatement();
-        createTimeIndex.setDatasetName(resultsName);
-        createTimeIndex.setDataverseName(new Identifier(dataverse));
-        createTimeIndex.setIndexName(new Identifier(resultsName + "TimeIndex"));
+        createTimeIndex.setDatasetName(new Identifier(resultsTableName));
+        createTimeIndex.setDataverseName(dataverseName);
+        createTimeIndex.setIndexName(new Identifier(resultsTableName + "TimeIndex"));
         createTimeIndex.setIfNotExists(false);
         createTimeIndex.setIndexType(IndexType.BTREE);
         createTimeIndex.setEnforced(false);
@@ -226,18 +222,17 @@
 
     }
 
-    private JobSpecification createChannelJob(IStatementExecutor statementExecutor, Identifier subscriptionsName,
-            Identifier resultsName, MetadataProvider metadataProvider, IHyracksClientConnection hcc,
-            IHyracksDataset hdc, Stats stats, String dataverse) throws Exception {
+    private JobSpecification createChannelJob(IStatementExecutor statementExecutor, MetadataProvider metadataProvider,
+            IHyracksClientConnection hcc, IHyracksDataset hdc, Stats stats) throws Exception {
         StringBuilder builder = new StringBuilder();
         builder.append("SET inline_with \"false\";\n");
-        builder.append("insert into " + dataverse + "." + resultsName);
+        builder.append("insert into " + dataverse + "." + resultsTableName);
         builder.append(" as a (\n" + "with " + BADConstants.ChannelExecutionTime + " as current_datetime() \n");
         builder.append("select result, ");
         builder.append(BADConstants.ChannelExecutionTime + ", ");
         builder.append("sub." + BADConstants.SubscriptionId + " as " + BADConstants.SubscriptionId + ",");
         builder.append("current_datetime() as " + BADConstants.DeliveryTime + "\n");
-        builder.append("from " + dataverse + "." + subscriptionsName + " sub,\n");
+        builder.append("from " + dataverse + "." + subscriptionsTableName + " sub,\n");
         builder.append(BADConstants.BAD_DATAVERSE_NAME + "." + BADConstants.BROKER_KEYWORD + " b, \n");
         builder.append(function.getNamespace() + "." + function.getName() + "(");
         int i = 0;
@@ -281,13 +276,12 @@
         //3. Create the metadata entry for the channel
 
         //TODO: Figure out how to handle when a subset of the 3 tasks fails
-        //TODO: The compiled job will break if anything changes on the function or two datasets
-        // Need to make sure we do proper checking when altering these things
 
-        String dataverse = ((QueryTranslator) statementExecutor).getActiveDataverse(dataverseName);
+        dataverseName = new Identifier(((QueryTranslator) statementExecutor).getActiveDataverse(dataverseName));
+        dataverse = dataverseName.getValue();
+        subscriptionsTableName = channelName + BADConstants.subscriptionEnding;
+        resultsTableName = channelName + BADConstants.resultsEnding;
 
-        Identifier subscriptionsName = new Identifier(channelName + BADConstants.subscriptionEnding);
-        Identifier resultsName = new Identifier(channelName + BADConstants.resultsEnding);
         EntityId entityId = new EntityId(BADConstants.CHANNEL_EXTENSION_NAME, dataverse, channelName.getValue());
         ICcApplicationContext appCtx = metadataProvider.getApplicationContext();
         ActiveNotificationHandler activeEventHandler =
@@ -310,15 +304,13 @@
             if (alreadyActive) {
                 throw new AsterixException("Channel " + channelName + " is already running");
             }
-            initialize(mdTxnCtx, subscriptionsName.getValue(), resultsName.getValue());
-            channel = new Channel(dataverse, channelName.getValue(), subscriptionsTableName, resultsTableName, function,
-                    duration);
+            initialize(mdTxnCtx);
 
             //check if names are available before creating anything
-            if (MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverse, subscriptionsName.getValue()) != null) {
+            if (MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverse, subscriptionsTableName) != null) {
                 throw new AsterixException("The channel name:" + channelName + " is not available.");
             }
-            if (MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverse, resultsName.getValue()) != null) {
+            if (MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverse, resultsTableName) != null) {
                 throw new AsterixException("The channel name:" + channelName + " is not available.");
             }
             MetadataProvider tempMdProvider = new MetadataProvider(metadataProvider.getApplicationContext(),
@@ -327,24 +319,21 @@
             final IHyracksDataset hdc = requestContext.getHyracksDataset();
             final Stats stats = requestContext.getStats();
             //Create Channel Datasets
-            createDatasets(statementExecutor, subscriptionsName, resultsName, tempMdProvider, hcc, hdc, dataverse);
+            createDatasets(statementExecutor, tempMdProvider, hcc);
             tempMdProvider.getLocks().reset();
             //Create Channel Internal Job
-            JobSpecification channeljobSpec = createChannelJob(statementExecutor, subscriptionsName, resultsName,
-                    tempMdProvider, hcc, hdc, stats, dataverse);
+            JobSpecification channeljobSpec = createChannelJob(statementExecutor, tempMdProvider, hcc, hdc, stats);
 
             // Now we subscribe
             if (listener == null) {
-                List<IDataset> datasets = new ArrayList<>();
-                datasets.add(MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverse, subscriptionsName.getValue()));
-                datasets.add(MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverse, resultsName.getValue()));
-                //TODO: Add datasets used by channel function
-                listener = new DeployedJobSpecEventListener(appCtx, entityId, PrecompiledType.CHANNEL, datasets, null,
+                listener = new DeployedJobSpecEventListener(appCtx, entityId, PrecompiledType.CHANNEL, null,
                         "BadListener");
                 activeEventHandler.registerListener(listener);
             }
 
             setupExecutorJob(entityId, channeljobSpec, hcc, listener);
+            channel = new Channel(dataverse, channelName.getValue(), subscriptionsTableName, resultsTableName, function,
+                    duration, null);
 
             MetadataManager.INSTANCE.addEntity(mdTxnCtx, channel);
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateProcedureStatement.java b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateProcedureStatement.java
index b93f778..cd60b1a 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateProcedureStatement.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateProcedureStatement.java
@@ -21,6 +21,7 @@
 import java.io.ByteArrayOutputStream;
 import java.io.DataOutputStream;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
@@ -47,11 +48,14 @@
 import org.apache.asterix.lang.common.expression.VariableExpr;
 import org.apache.asterix.lang.common.literal.StringLiteral;
 import org.apache.asterix.lang.common.statement.DeleteStatement;
+import org.apache.asterix.lang.common.statement.InsertStatement;
 import org.apache.asterix.lang.common.statement.Query;
 import org.apache.asterix.lang.common.struct.Identifier;
 import org.apache.asterix.lang.common.struct.VarIdentifier;
+import org.apache.asterix.lang.common.util.FunctionUtil;
 import org.apache.asterix.lang.common.visitor.base.ILangVisitor;
 import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
+import org.apache.asterix.lang.sqlpp.rewrites.SqlppRewriterFactory;
 import org.apache.asterix.lang.sqlpp.visitor.SqlppDeleteRewriteVisitor;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
@@ -85,6 +89,7 @@
     private final List<VariableExpr> varList;
     private final CallExpr period;
     private String duration = "";
+    private List<List<List<String>>> dependencies;
 
     public CreateProcedureStatement(FunctionSignature signature, List<VarIdentifier> parameterList,
             List<Integer> paramIds, String functionBody, Statement procedureBodyStatement, Expression period) {
@@ -98,6 +103,9 @@
             this.varList.add(new VariableExpr(new VarIdentifier(parameterList.get(i).toString(), paramIds.get(i))));
         }
         this.period = (CallExpr) period;
+        this.dependencies = new ArrayList<>();
+        this.dependencies.add(new ArrayList<>());
+        this.dependencies.add(new ArrayList<>());
     }
 
     public String getProcedureBody() {
@@ -195,6 +203,10 @@
             if (!varList.isEmpty()) {
                 throw new CompilationException("Insert procedures cannot have parameters");
             }
+            InsertStatement insertStatement = (InsertStatement) getProcedureBodyStatement();
+            dependencies.get(0).add(Arrays.asList(
+                    ((QueryTranslator) statementExecutor).getActiveDataverse(insertStatement.getDataverseName()),
+                    insertStatement.getDatasetName().getValue()));
             return new Pair<>(
                     ((QueryTranslator) statementExecutor).handleInsertUpsertStatement(metadataProvider,
                             getProcedureBodyStatement(), hcc, hdc, ResultDelivery.ASYNC, null, stats, true, null),
@@ -202,9 +214,14 @@
         } else if (getProcedureBodyStatement().getKind() == Statement.Kind.QUERY) {
             Query s = (Query) getProcedureBodyStatement();
             addLets((SelectExpression) s.getBody());
+            SqlppRewriterFactory fact = new SqlppRewriterFactory();
+            dependencies.get(1).addAll(FunctionUtil.getFunctionDependencies(fact.createQueryRewriter(),
+                    ((Query) getProcedureBodyStatement()).getBody(), metadataProvider).get(1));
             Pair<JobSpecification, PrecompiledType> pair = new Pair<>(
                     compileQueryJob(statementExecutor, metadataProvider, hcc, (Query) getProcedureBodyStatement()),
                     PrecompiledType.QUERY);
+            dependencies.get(0).addAll(FunctionUtil.getFunctionDependencies(fact.createQueryRewriter(),
+                    ((Query) getProcedureBodyStatement()).getBody(), metadataProvider).get(0));
             metadataProvider.getLocks().unlock();
             return pair;
         } else if (getProcedureBodyStatement().getKind() == Statement.Kind.DELETE) {
@@ -212,8 +229,15 @@
             getProcedureBodyStatement().accept(visitor, null);
             DeleteStatement delete = (DeleteStatement) getProcedureBodyStatement();
             addLets((SelectExpression) delete.getQuery().getBody());
-            return new Pair<>(((QueryTranslator) statementExecutor).handleDeleteStatement(metadataProvider,
+
+            SqlppRewriterFactory fact = new SqlppRewriterFactory();
+            dependencies = FunctionUtil.getFunctionDependencies(fact.createQueryRewriter(), delete.getQuery().getBody(),
+                    metadataProvider);
+
+            Pair<JobSpecification, PrecompiledType> pair =
+                    new Pair<>(((QueryTranslator) statementExecutor).handleDeleteStatement(metadataProvider,
                     getProcedureBodyStatement(), hcc, true), PrecompiledType.DELETE);
+            return pair;
         } else {
             throw new CompilationException("Procedure can only execute a single delete, insert, or query");
         }
@@ -256,8 +280,6 @@
             if (alreadyActive) {
                 throw new AsterixException("Procedure " + signature.getName() + " is already running");
             }
-            procedure = new Procedure(dataverse, signature.getName(), signature.getArity(), getParamList(),
-                    Function.RETURNTYPE_VOID, getProcedureBody(), Function.LANGUAGE_AQL, duration);
             MetadataProvider tempMdProvider = new MetadataProvider(metadataProvider.getApplicationContext(),
                     metadataProvider.getDefaultDataverse());
             tempMdProvider.getConfig().putAll(metadataProvider.getConfig());
@@ -279,16 +301,16 @@
 
             // Now we subscribe
             if (listener == null) {
-                //TODO: Add datasets used by channel function
-                listener = new DeployedJobSpecEventListener(appCtx, entityId, procedureJobSpec.second,
-                        new ArrayList<>(),
-                        null, "BadListener");
+                listener = new DeployedJobSpecEventListener(appCtx, entityId, procedureJobSpec.second, null,
+                        "BadListener");
                 activeEventHandler.registerListener(listener);
             }
-            setupDeployedJobSpec(entityId, procedureJobSpec.first, hcc, listener, tempMdProvider.getResultSetId(),
-                    hdc,
+            setupDeployedJobSpec(entityId, procedureJobSpec.first, hcc, listener, tempMdProvider.getResultSetId(), hdc,
                     stats);
 
+            procedure = new Procedure(dataverse, signature.getName(), signature.getArity(), getParamList(),
+                    Function.RETURNTYPE_VOID, getProcedureBody(), Function.LANGUAGE_AQL, duration, dependencies);
+
             MetadataManager.INSTANCE.addEntity(mdTxnCtx, procedure);
             MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
         } catch (Exception e) {
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/AllChannelsSearchKey.java b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/AllChannelsSearchKey.java
new file mode 100644
index 0000000..62f16c7
--- /dev/null
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/AllChannelsSearchKey.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.bad.metadata;
+
+import org.apache.asterix.metadata.api.ExtensionMetadataDatasetId;
+import org.apache.asterix.metadata.api.IExtensionMetadataSearchKey;
+import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+public class AllChannelsSearchKey implements IExtensionMetadataSearchKey {
+    private static final long serialVersionUID = 1L;
+
+    public AllChannelsSearchKey() {
+    }
+
+    @Override
+    public ExtensionMetadataDatasetId getDatasetId() {
+        return BADMetadataIndexes.BAD_CHANNEL_INDEX_ID;
+    }
+
+    @Override
+    public ITupleReference getSearchKey() {
+        return null;
+    }
+}
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/AllProceduresSearchKey.java b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/AllProceduresSearchKey.java
new file mode 100644
index 0000000..6b995fb
--- /dev/null
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/AllProceduresSearchKey.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.bad.metadata;
+
+import org.apache.asterix.metadata.api.ExtensionMetadataDatasetId;
+import org.apache.asterix.metadata.api.IExtensionMetadataSearchKey;
+import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+public class AllProceduresSearchKey implements IExtensionMetadataSearchKey {
+    private static final long serialVersionUID = 1L;
+
+    public AllProceduresSearchKey() {
+    }
+
+    @Override
+    public ExtensionMetadataDatasetId getDatasetId() {
+        return BADMetadataIndexes.BAD_PROCEDURE_INDEX_ID;
+    }
+
+    @Override
+    public ITupleReference getSearchKey() {
+        return null;
+    }
+}
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/BADMetadataRecordTypes.java b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/BADMetadataRecordTypes.java
index 0430118..526e091 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/BADMetadataRecordTypes.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/BADMetadataRecordTypes.java
@@ -49,15 +49,19 @@
     public static final int CHANNEL_ARECORD_RESULTS_NAME_FIELD_INDEX = 3;
     public static final int CHANNEL_ARECORD_FUNCTION_FIELD_INDEX = 4;
     public static final int CHANNEL_ARECORD_DURATION_FIELD_INDEX = 5;
+    public static final int CHANNEL_ARECORD_DEPENDENCIES_FIELD_INDEX = 6;
     public static final ARecordType CHANNEL_RECORDTYPE = MetadataRecordTypes.createRecordType(
             // RecordTypeName
             BADConstants.RECORD_TYPENAME_CHANNEL,
             // FieldNames
             new String[] { BADConstants.DataverseName, BADConstants.ChannelName, BADConstants.SubscriptionsDatasetName,
-                    BADConstants.ResultsDatasetName, BADConstants.Function, BADConstants.Duration },
+                    BADConstants.ResultsDatasetName, BADConstants.Function, BADConstants.Duration,
+                    BADConstants.FIELD_NAME_DEPENDENCIES },
             // FieldTypes
             new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
-                    BuiltinType.ASTRING, BuiltinType.ASTRING },
+                    new AOrderedListType(BuiltinType.ASTRING, null), BuiltinType.ASTRING,
+                    new AOrderedListType(new AOrderedListType(new AOrderedListType(BuiltinType.ASTRING, null), null),
+                            null) },
             //IsOpen?
             true);
     //------------------------------------------ Broker ----------------------------------------//
@@ -84,17 +88,21 @@
     public static final int PROCEDURE_ARECORD_PROCEDURE_DEFINITION_FIELD_INDEX = 5;
     public static final int PROCEDURE_ARECORD_PROCEDURE_LANGUAGE_FIELD_INDEX = 6;
     public static final int PROCEDURE_ARECORD_PROCEDURE_DURATION_FIELD_INDEX = 7;
+    public static final int PROCEDURE_ARECORD_DEPENDENCIES_FIELD_INDEX = 8;
     public static final ARecordType PROCEDURE_RECORDTYPE = MetadataRecordTypes.createRecordType(
             // RecordTypeName
             BADConstants.RECORD_TYPENAME_PROCEDURE,
             // FieldNames
             new String[] { BADConstants.DataverseName, BADConstants.ProcedureName, BADConstants.FIELD_NAME_ARITY,
                     BADConstants.FIELD_NAME_PARAMS, BADConstants.FIELD_NAME_RETURN_TYPE,
-                    BADConstants.FIELD_NAME_DEFINITION, BADConstants.FIELD_NAME_LANGUAGE, BADConstants.Duration },
+                    BADConstants.FIELD_NAME_DEFINITION, BADConstants.FIELD_NAME_LANGUAGE, BADConstants.Duration,
+                    BADConstants.FIELD_NAME_DEPENDENCIES },
             // FieldTypes
             new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
                     new AOrderedListType(BuiltinType.ASTRING, null), BuiltinType.ASTRING, BuiltinType.ASTRING,
-                    BuiltinType.ASTRING, BuiltinType.ASTRING },
+                    BuiltinType.ASTRING, BuiltinType.ASTRING,
+                    new AOrderedListType(new AOrderedListType(new AOrderedListType(BuiltinType.ASTRING, null), null),
+                            null) },
             //IsOpen?
             true);
 
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Channel.java b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Channel.java
index b201af6..5f7dad0 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Channel.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Channel.java
@@ -15,6 +15,10 @@
 
 package org.apache.asterix.bad.metadata;
 
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
 import org.apache.asterix.active.EntityId;
 import org.apache.asterix.bad.BADConstants;
 import org.apache.asterix.common.functions.FunctionSignature;
@@ -34,18 +38,48 @@
     private final String resultsDatasetName;
     private final String duration;
     private final FunctionSignature function;
+    private final List<String> functionAsPath;
+    /*
+    Dependencies are stored as an array of size two:
+    element 0 is a list of dataset dependencies
+    -stored as lists of [DataverseName, Dataset] for the datasets
+    element 1 is a list of function dependencies
+    -stored as lists of [DataverseName, FunctionName, Arity] for the functions
+    */
+    private final List<List<List<String>>> dependencies;
 
     public Channel(String dataverseName, String channelName, String subscriptionsDataset, String resultsDataset,
-            FunctionSignature function, String duration) {
+            FunctionSignature function, String duration, List<List<List<String>>> dependencies) {
         this.channelId = new EntityId(BADConstants.CHANNEL_EXTENSION_NAME, dataverseName, channelName);
         this.function = function;
         this.duration = duration;
         this.resultsDatasetName = resultsDataset;
         this.subscriptionsDatasetName = subscriptionsDataset;
+        if (this.function.getNamespace() == null) {
+            this.function.setNamespace(dataverseName);
+        }
+        functionAsPath = Arrays.asList(this.function.getNamespace(), this.function.getName(),
+                Integer.toString(this.function.getArity()));
+        if (dependencies == null) {
+            this.dependencies = new ArrayList<>();
+            this.dependencies.add(new ArrayList<>());
+            this.dependencies.add(new ArrayList<>());
+            List<String> resultsList = Arrays.asList(dataverseName, resultsDatasetName);
+            List<String> subscriptionList = Arrays.asList(dataverseName, subscriptionsDatasetName);
+            this.dependencies.get(0).add(resultsList);
+            this.dependencies.get(0).add(subscriptionList);
+            this.dependencies.get(1).add(functionAsPath);
+        } else {
+            this.dependencies = dependencies;
+        }
     }
 
     public EntityId getChannelId() {
         return channelId;
+    }
+
+    public List<List<List<String>>> getDependencies() {
+        return dependencies;
     }
 
     public String getSubscriptionsDataset() {
@@ -60,6 +94,10 @@
         return duration;
     }
 
+    public List<String> getFunctionAsPath() {
+        return functionAsPath;
+    }
+
     public FunctionSignature getFunction() {
         return function;
     }
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ChannelTupleTranslator.java b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ChannelTupleTranslator.java
index d577260..14db134 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ChannelTupleTranslator.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ChannelTupleTranslator.java
@@ -18,15 +18,23 @@
 import java.io.ByteArrayInputStream;
 import java.io.DataInput;
 import java.io.DataInputStream;
+import java.util.ArrayList;
+import java.util.List;
 
+import org.apache.asterix.builders.OrderedListBuilder;
 import org.apache.asterix.common.exceptions.MetadataException;
 import org.apache.asterix.common.functions.FunctionSignature;
 import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
 import org.apache.asterix.metadata.entitytupletranslators.AbstractTupleTranslator;
+import org.apache.asterix.om.base.AOrderedList;
 import org.apache.asterix.om.base.ARecord;
 import org.apache.asterix.om.base.AString;
+import org.apache.asterix.om.base.IACursor;
+import org.apache.asterix.om.types.AOrderedListType;
+import org.apache.asterix.om.types.BuiltinType;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 
 /**
@@ -42,11 +50,16 @@
     // Payload field containing serialized feed.
     public static final int CHANNEL_PAYLOAD_TUPLE_FIELD_INDEX = 2;
 
-    @SuppressWarnings("unchecked")
     private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
             .getSerializerDeserializer(BADMetadataRecordTypes.CHANNEL_RECORDTYPE);
 
-    @SuppressWarnings("unchecked")
+    private transient OrderedListBuilder dependenciesListBuilder = new OrderedListBuilder();
+    private transient OrderedListBuilder dependencyListBuilder = new OrderedListBuilder();
+    private transient OrderedListBuilder dependencyNameListBuilder = new OrderedListBuilder();
+    private transient AOrderedListType stringList = new AOrderedListType(BuiltinType.ASTRING, null);
+    private transient AOrderedListType ListofLists =
+            new AOrderedListType(new AOrderedListType(BuiltinType.ASTRING, null), null);
+
     public ChannelTupleTranslator(boolean getTuple) {
         super(getTuple, BADMetadataIndexes.NUM_FIELDS_CHANNEL_IDX);
     }
@@ -74,30 +87,47 @@
         String resultsName =
                 ((AString) channelRecord.getValueByPos(BADMetadataRecordTypes.CHANNEL_ARECORD_RESULTS_NAME_FIELD_INDEX))
                         .getStringValue();
-        String fName =
-                ((AString) channelRecord.getValueByPos(BADMetadataRecordTypes.CHANNEL_ARECORD_FUNCTION_FIELD_INDEX))
-                        .getStringValue();
+
+        IACursor cursor = ((AOrderedList) channelRecord
+                .getValueByPos(BADMetadataRecordTypes.CHANNEL_ARECORD_FUNCTION_FIELD_INDEX)).getCursor();
+        List<String> functionSignature = new ArrayList<>();
+        while (cursor.next()) {
+            functionSignature.add(((AString) cursor.get()).getStringValue());
+        }
+
         String duration =
                 ((AString) channelRecord.getValueByPos(BADMetadataRecordTypes.CHANNEL_ARECORD_DURATION_FIELD_INDEX))
                         .getStringValue();
 
-        FunctionSignature signature = null;
+        IACursor dependenciesCursor = ((AOrderedList) channelRecord
+                .getValueByPos(BADMetadataRecordTypes.CHANNEL_ARECORD_DEPENDENCIES_FIELD_INDEX)).getCursor();
+        List<List<List<String>>> dependencies = new ArrayList<>();
+        AOrderedList dependencyList;
+        AOrderedList qualifiedList;
+        int i = 0;
+        while (dependenciesCursor.next()) {
+            dependencies.add(new ArrayList<>());
+            dependencyList = (AOrderedList) dependenciesCursor.get();
+            IACursor qualifiedDependencyCursor = dependencyList.getCursor();
+            int j = 0;
+            while (qualifiedDependencyCursor.next()) {
+                qualifiedList = (AOrderedList) qualifiedDependencyCursor.get();
+                IACursor qualifiedNameCursor = qualifiedList.getCursor();
+                dependencies.get(i).add(new ArrayList<>());
+                while (qualifiedNameCursor.next()) {
+                    dependencies.get(i).get(j).add(((AString) qualifiedNameCursor.get()).getStringValue());
+                }
+                j++;
+            }
+            i++;
 
-        String[] qnameComponents = fName.split("\\.");
-        String functionDataverse;
-        String functionName;
-        if (qnameComponents.length == 2) {
-            functionDataverse = qnameComponents[0];
-            functionName = qnameComponents[1];
-        } else {
-            functionDataverse = dataverseName;
-            functionName = qnameComponents[0];
         }
 
-        String[] nameComponents = functionName.split("@");
-        signature = new FunctionSignature(functionDataverse, nameComponents[0], Integer.parseInt(nameComponents[1]));
+        FunctionSignature signature = new FunctionSignature(functionSignature.get(0), functionSignature.get(1),
+                Integer.parseInt(functionSignature.get(2)));
 
-        channel = new Channel(dataverseName, channelName, subscriptionsName, resultsName, signature, duration);
+        channel = new Channel(dataverseName, channelName, subscriptionsName, resultsName, signature, duration,
+                dependencies);
         return channel;
     }
 
@@ -141,9 +171,17 @@
         recordBuilder.addField(BADMetadataRecordTypes.CHANNEL_ARECORD_RESULTS_NAME_FIELD_INDEX, fieldValue);
 
         // write field 4
+        OrderedListBuilder listBuilder = new OrderedListBuilder();
+        ArrayBackedValueStorage itemValue = new ArrayBackedValueStorage();
+        listBuilder.reset(stringList);
+        for (String pathPart : channel.getFunctionAsPath()) {
+            itemValue.reset();
+            aString.setValue(pathPart);
+            stringSerde.serialize(aString, itemValue.getDataOutput());
+            listBuilder.addItem(itemValue);
+        }
         fieldValue.reset();
-        aString.setValue(channel.getFunction().toString());
-        stringSerde.serialize(aString, fieldValue.getDataOutput());
+        listBuilder.write(fieldValue.getDataOutput(), true);
         recordBuilder.addField(BADMetadataRecordTypes.CHANNEL_ARECORD_FUNCTION_FIELD_INDEX, fieldValue);
 
         // write field 5
@@ -152,6 +190,33 @@
         stringSerde.serialize(aString, fieldValue.getDataOutput());
         recordBuilder.addField(BADMetadataRecordTypes.CHANNEL_ARECORD_DURATION_FIELD_INDEX, fieldValue);
 
+        // write field 6
+        dependenciesListBuilder.reset((AOrderedListType) BADMetadataRecordTypes.CHANNEL_RECORDTYPE
+                .getFieldTypes()[BADMetadataRecordTypes.CHANNEL_ARECORD_DEPENDENCIES_FIELD_INDEX]);
+        List<List<List<String>>> dependenciesList = channel.getDependencies();
+        for (List<List<String>> dependencies : dependenciesList) {
+            dependencyListBuilder.reset(ListofLists);
+            for (List<String> dependency : dependencies) {
+                dependencyNameListBuilder.reset(stringList);
+                for (String subName : dependency) {
+                    itemValue.reset();
+                    aString.setValue(subName);
+                    stringSerde.serialize(aString, itemValue.getDataOutput());
+                    dependencyNameListBuilder.addItem(itemValue);
+                }
+                itemValue.reset();
+                dependencyNameListBuilder.write(itemValue.getDataOutput(), true);
+                dependencyListBuilder.addItem(itemValue);
+
+            }
+            itemValue.reset();
+            dependencyListBuilder.write(itemValue.getDataOutput(), true);
+            dependenciesListBuilder.addItem(itemValue);
+        }
+        fieldValue.reset();
+        dependenciesListBuilder.write(fieldValue.getDataOutput(), true);
+        recordBuilder.addField(BADMetadataRecordTypes.CHANNEL_ARECORD_DEPENDENCIES_FIELD_INDEX, fieldValue);
+
         // write record
         recordBuilder.write(tupleBuilder.getDataOutput(), true);
 
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/DeployedJobSpecEventListener.java b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/DeployedJobSpecEventListener.java
index 950612c..13f9e0d 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/DeployedJobSpecEventListener.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/DeployedJobSpecEventListener.java
@@ -73,7 +73,6 @@
     protected final List<IActiveEntityEventSubscriber> subscribers = new ArrayList<>();
     protected final ICcApplicationContext appCtx;
     protected final EntityId entityId;
-    protected final List<IDataset> datasets;
     protected final ActiveEvent statsUpdatedEvent;
     protected long statsTimestamp;
     protected String stats;
@@ -83,10 +82,9 @@
     protected int numRegistered;
 
     public DeployedJobSpecEventListener(ICcApplicationContext appCtx, EntityId entityId, PrecompiledType type,
-            List<IDataset> datasets, AlgebricksAbsolutePartitionConstraint locations, String runtimeName) {
+            AlgebricksAbsolutePartitionConstraint locations, String runtimeName) {
         this.appCtx = appCtx;
         this.entityId = entityId;
-        this.datasets = datasets;
         this.state = ActivityState.STOPPED;
         this.statsTimestamp = -1;
         this.statsRequestState = RequestState.INIT;
@@ -133,7 +131,7 @@
 
     @Override
     public boolean isEntityUsingDataset(IDataset dataset) {
-        return datasets.contains(dataset);
+        return false;
     }
 
     public JobId getJobId() {
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Procedure.java b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Procedure.java
index e3ed7fc..5712539 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Procedure.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Procedure.java
@@ -18,6 +18,7 @@
  */
 package org.apache.asterix.bad.metadata;
 
+import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.asterix.active.EntityId;
@@ -39,9 +40,17 @@
     private final String returnType;
     private final String language;
     private final String duration;
+    /*
+    Dependencies are stored as an array of size two:
+    element 0 is a list of dataset dependencies
+    -stored as lists of [DataverseName, Dataset] for the datasets
+    element 1 is a list of function dependencies
+    -stored as lists of [DataverseName, FunctionName, Arity] for the functions
+     */
+    private final List<List<List<String>>> dependencies;
 
     public Procedure(String dataverseName, String functionName, int arity, List<String> params, String returnType,
-            String functionBody, String language, String duration) {
+            String functionBody, String language, String duration, List<List<List<String>>> dependencies) {
         this.procedureId = new EntityId(BADConstants.PROCEDURE_KEYWORD, dataverseName, functionName);
         this.params = params;
         this.body = functionBody;
@@ -49,6 +58,13 @@
         this.language = language;
         this.arity = arity;
         this.duration = duration;
+        if (dependencies == null) {
+            this.dependencies = new ArrayList<>();
+            this.dependencies.add(new ArrayList<>());
+            this.dependencies.add(new ArrayList<>());
+        } else {
+            this.dependencies = dependencies;
+        }
     }
 
     public EntityId getEntityId() {
@@ -79,6 +95,10 @@
         return duration;
     }
 
+    public List<List<List<String>>> getDependencies() {
+        return dependencies;
+    }
+
     @Override
     public boolean equals(Object other) {
         if (this == other) {
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ProcedureTupleTranslator.java b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ProcedureTupleTranslator.java
index 1aa633f..0a6acb9 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ProcedureTupleTranslator.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ProcedureTupleTranslator.java
@@ -34,6 +34,7 @@
 import org.apache.asterix.om.base.AString;
 import org.apache.asterix.om.base.IACursor;
 import org.apache.asterix.om.types.AOrderedListType;
+import org.apache.asterix.om.types.BuiltinType;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
@@ -54,9 +55,15 @@
     // Payload field containing serialized Procedure.
     public static final int PROCEDURE_PAYLOAD_TUPLE_FIELD_INDEX = 3;
 
-    @SuppressWarnings("unchecked")
     private ISerializerDeserializer<ARecord> recordSerDes = SerializerDeserializerProvider.INSTANCE
             .getSerializerDeserializer(BADMetadataRecordTypes.PROCEDURE_RECORDTYPE);
+
+    private transient OrderedListBuilder dependenciesListBuilder = new OrderedListBuilder();
+    private transient OrderedListBuilder dependencyListBuilder = new OrderedListBuilder();
+    private transient OrderedListBuilder dependencyNameListBuilder = new OrderedListBuilder();
+    private transient AOrderedListType stringList = new AOrderedListType(BuiltinType.ASTRING, null);
+    private transient AOrderedListType ListofLists =
+            new AOrderedListType(new AOrderedListType(BuiltinType.ASTRING, null), null);
 
     protected ProcedureTupleTranslator(boolean getTuple) {
         super(getTuple, BADMetadataIndexes.NUM_FIELDS_PROCEDURE_IDX);
@@ -104,8 +111,32 @@
                 .getValueByPos(BADMetadataRecordTypes.PROCEDURE_ARECORD_PROCEDURE_DURATION_FIELD_INDEX))
                         .getStringValue();
 
+        IACursor dependenciesCursor = ((AOrderedList) procedureRecord
+                .getValueByPos(BADMetadataRecordTypes.PROCEDURE_ARECORD_DEPENDENCIES_FIELD_INDEX)).getCursor();
+        List<List<List<String>>> dependencies = new ArrayList<>();
+        AOrderedList dependencyList;
+        AOrderedList qualifiedList;
+        int i = 0;
+        while (dependenciesCursor.next()) {
+            dependencies.add(new ArrayList<>());
+            dependencyList = (AOrderedList) dependenciesCursor.get();
+            IACursor qualifiedDependencyCursor = dependencyList.getCursor();
+            int j = 0;
+            while (qualifiedDependencyCursor.next()) {
+                qualifiedList = (AOrderedList) qualifiedDependencyCursor.get();
+                IACursor qualifiedNameCursor = qualifiedList.getCursor();
+                dependencies.get(i).add(new ArrayList<>());
+                while (qualifiedNameCursor.next()) {
+                    dependencies.get(i).get(j).add(((AString) qualifiedNameCursor.get()).getStringValue());
+                }
+                j++;
+            }
+            i++;
+
+        }
+
         return new Procedure(dataverseName, procedureName, Integer.parseInt(arity), params, returnType, definition,
-                language, duration);
+                language, duration, dependencies);
 
     }
 
@@ -185,6 +216,33 @@
         stringSerde.serialize(aString, fieldValue.getDataOutput());
         recordBuilder.addField(BADMetadataRecordTypes.PROCEDURE_ARECORD_PROCEDURE_DURATION_FIELD_INDEX, fieldValue);
 
+        // write field 8
+        dependenciesListBuilder.reset((AOrderedListType) BADMetadataRecordTypes.PROCEDURE_RECORDTYPE
+                .getFieldTypes()[BADMetadataRecordTypes.PROCEDURE_ARECORD_DEPENDENCIES_FIELD_INDEX]);
+        List<List<List<String>>> dependenciesList = procedure.getDependencies();
+        for (List<List<String>> dependencies : dependenciesList) {
+            dependencyListBuilder.reset(ListofLists);
+            for (List<String> dependency : dependencies) {
+                dependencyNameListBuilder.reset(stringList);
+                for (String subName : dependency) {
+                    itemValue.reset();
+                    aString.setValue(subName);
+                    stringSerde.serialize(aString, itemValue.getDataOutput());
+                    dependencyNameListBuilder.addItem(itemValue);
+                }
+                itemValue.reset();
+                dependencyNameListBuilder.write(itemValue.getDataOutput(), true);
+                dependencyListBuilder.addItem(itemValue);
+
+            }
+            itemValue.reset();
+            dependencyListBuilder.write(itemValue.getDataOutput(), true);
+            dependenciesListBuilder.addItem(itemValue);
+        }
+        fieldValue.reset();
+        dependenciesListBuilder.write(fieldValue.getDataOutput(), true);
+        recordBuilder.addField(BADMetadataRecordTypes.PROCEDURE_ARECORD_DEPENDENCIES_FIELD_INDEX, fieldValue);
+
         // write record
         recordBuilder.write(tupleBuilder.getDataOutput(), true);
         tupleBuilder.addFieldEndOffset();
diff --git a/asterix-bad/src/main/resources/lang-extension/lang.txt b/asterix-bad/src/main/resources/lang-extension/lang.txt
index 7c5931c..02aba78 100644
--- a/asterix-bad/src/main/resources/lang-extension/lang.txt
+++ b/asterix-bad/src/main/resources/lang-extension/lang.txt
@@ -129,9 +129,14 @@
   Token endPos;
   Statement functionBodyExpr;
   Expression period = null;
+  String currentDataverse = defaultDataverse;
+  createNewScope();
 }
 {
      "procedure" fctName = FunctionName()
+     {
+        defaultDataverse = fctName.dataverse;
+     }
      paramList = ParameterList()
     <LEFTBRACE>
   {
@@ -149,6 +154,7 @@
       functionBody = extractFragment(beginPos.beginLine, beginPos.beginColumn, endPos.beginLine, endPos.beginColumn);
       signature = new FunctionSignature(fctName.dataverse, fctName.function, paramList.size());
       removeCurrentScope();
+      defaultDataverse = currentDataverse;
     }
   ("period" period = FunctionCallExpr())?
   {
diff --git a/asterix-bad/src/test/resources/runtimets/queries/channel/drop_function/drop_function.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/channel/drop_function/drop_function.1.ddl.sqlpp
new file mode 100644
index 0000000..a5d3775
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/channel/drop_function/drop_function.1.ddl.sqlpp
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Drop Channel Function
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+
+create type TweetMessageTypeuuid as closed {
+  tweetid: uuid,
+  sender_location: point,
+  send_time: datetime,
+  referred_topics: {{ string }},
+  message_text: string,
+  countA: int32,
+  countB: int32
+};
+
+
+create dataset TweetMessageuuids(TweetMessageTypeuuid)
+primary key tweetid autogenerated;
+
+create function NearbyTweetsContainingText(place, text) {
+  (select m.message_text
+  from TweetMessageuuids m
+  where contains(m.message_text,text)
+  and spatial_intersect(m.sender_location, place))
+};
+
+create dataverse two;
+use two;
+
+create repetitive channel nearbyTweetChannel using channels.NearbyTweetsContainingText@2 period duration("PT10M");
+
+use channels;
+drop function NearbyTweetsContainingText@2;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/channel/drop_function_dataverse/drop_function_dataverse.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/channel/drop_function_dataverse/drop_function_dataverse.1.ddl.sqlpp
new file mode 100644
index 0000000..d1047b0
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/channel/drop_function_dataverse/drop_function_dataverse.1.ddl.sqlpp
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Drop Channel Function Dataverse
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+
+create type TweetMessageTypeuuid as closed {
+  tweetid: uuid,
+  sender_location: point,
+  send_time: datetime,
+  referred_topics: {{ string }},
+  message_text: string,
+  countA: int32,
+  countB: int32
+};
+
+
+create dataset TweetMessageuuids(TweetMessageTypeuuid)
+primary key tweetid autogenerated;
+
+create function NearbyTweetsContainingText(place, text) {
+  (select m.message_text
+  from TweetMessageuuids m
+  where contains(m.message_text,text)
+  and spatial_intersect(m.sender_location, place))
+};
+
+create dataverse two;
+use two;
+
+create repetitive channel nearbyTweetChannel using channels.NearbyTweetsContainingText@2 period duration("PT10M");
+
+drop dataverse channels;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/channel/drop_results/drop_results.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/channel/drop_results/drop_results.1.ddl.sqlpp
new file mode 100644
index 0000000..432f3c5
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/channel/drop_results/drop_results.1.ddl.sqlpp
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Drop Channel Results
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse two if exists;
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+
+create type TweetMessageTypeuuid as closed {
+  tweetid: uuid,
+  sender_location: point,
+  send_time: datetime,
+  referred_topics: {{ string }},
+  message_text: string,
+  countA: int32,
+  countB: int32
+};
+
+
+create dataset TweetMessageuuids(TweetMessageTypeuuid)
+primary key tweetid autogenerated;
+
+create function NearbyTweetsContainingText(place, text) {
+  (select m.message_text
+  from TweetMessageuuids m
+  where contains(m.message_text,text)
+  and spatial_intersect(m.sender_location, place))
+};
+
+create dataverse two;
+use two;
+
+create repetitive channel nearbyTweetChannel using channels.NearbyTweetsContainingText@2 period duration("PT10M");
+
+drop dataset two.nearbyTweetChannelResults;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/channel/drop_subscriptions/drop_subscriptions.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/channel/drop_subscriptions/drop_subscriptions.1.ddl.sqlpp
new file mode 100644
index 0000000..f6dc2bf
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/channel/drop_subscriptions/drop_subscriptions.1.ddl.sqlpp
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Drop Channel Subscriptions
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse two if exists;
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+
+create type TweetMessageTypeuuid as closed {
+  tweetid: uuid,
+  sender_location: point,
+  send_time: datetime,
+  referred_topics: {{ string }},
+  message_text: string,
+  countA: int32,
+  countB: int32
+};
+
+
+create dataset TweetMessageuuids(TweetMessageTypeuuid)
+primary key tweetid autogenerated;
+
+create function NearbyTweetsContainingText(place, text) {
+  (select m.message_text
+  from TweetMessageuuids m
+  where contains(m.message_text,text)
+  and spatial_intersect(m.sender_location, place))
+};
+
+create dataverse two;
+use two;
+
+create repetitive channel nearbyTweetChannel using channels.NearbyTweetsContainingText@2 period duration("PT10M");
+
+drop dataset two.nearbyTweetChannelSubscriptions;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/channel/room_occupants/room_occupants.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/channel/room_occupants/room_occupants.1.ddl.sqlpp
index fe1db99..638f7b5 100644
--- a/asterix-bad/src/test/resources/runtimets/queries/channel/room_occupants/room_occupants.1.ddl.sqlpp
+++ b/asterix-bad/src/test/resources/runtimets/queries/channel/room_occupants/room_occupants.1.ddl.sqlpp
@@ -23,6 +23,7 @@
 * Author       : Steven Jacobs
 */
 
+drop dataverse two if exists;
 drop dataverse channels if exists;
 create dataverse channels;
 use channels;
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/create_procedure_check_metadata/create_procedure_check_metadata.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/create_procedure_check_metadata/create_procedure_check_metadata.1.ddl.sqlpp
new file mode 100644
index 0000000..71b2ed4
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/create_procedure_check_metadata/create_procedure_check_metadata.1.ddl.sqlpp
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Create Procedure Check Metadata
+* Expected Res : Success
+* Date         : Jan 2017
+* Author       : Steven Jacobs
+*/
+
+drop dataverse two if exists;
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+create type myLocation as {
+  id: int
+};
+create dataset UserLocations(myLocation)
+primary key id;
+
+create function really_contains(word,letter){
+contains(word,letter)
+};
+
+create dataverse two;
+use two;
+
+create function really_contains(word,letter){
+contains(word,letter)
+};
+
+create type myLocation as {
+  id: int
+};
+create dataset UserLocations(myLocation)
+primary key id;
+
+create procedure selectSome(r, otherRoom) {
+select roomNumber from channels.UserLocations
+where roomNumber = r
+or roomNumber = otherRoom
+and channels.really_contains(roomNumber,"l")
+order by id
+};
+
+create procedure deleteSome(r, otherRoom) {
+delete from channels.UserLocations
+where roomNumber = r
+or roomNumber = otherRoom
+and channels.really_contains(roomNumber,"l")
+};
+
+create procedure addMe() {
+  insert into channels.UserLocations([
+    {"timeStamp":current_datetime(), "roomNumber":222}]
+  )
+};
+
+create procedure localSelectSome(r, otherRoom) {
+select roomNumber from UserLocations
+where roomNumber = r
+or roomNumber = otherRoom
+and really_contains(roomNumber,"l")
+order by id
+};
+
+create procedure localDeleteSome(r, otherRoom) {
+delete from UserLocations
+where roomNumber = r
+or roomNumber = otherRoom
+and really_contains(roomNumber,"l")
+};
+
+create procedure localAddMe() {
+  insert into UserLocations([
+    {"timeStamp":current_datetime(), "roomNumber":222}]
+  )
+};
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/create_procedure_check_metadata/create_procedure_check_metadata.2.query.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/create_procedure_check_metadata/create_procedure_check_metadata.2.query.sqlpp
new file mode 100644
index 0000000..023c343
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/create_procedure_check_metadata/create_procedure_check_metadata.2.query.sqlpp
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+select value x
+from Metadata.`Procedure` x
+order by x.ProcedureName;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure/delete_procedure.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure/delete_procedure.1.ddl.sqlpp
index 7dbf136..905211f 100644
--- a/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure/delete_procedure.1.ddl.sqlpp
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure/delete_procedure.1.ddl.sqlpp
@@ -23,6 +23,7 @@
 * Author       : Steven Jacobs
 */
 
+drop dataverse two if exists;
 drop dataverse channels if exists;
 create dataverse channels;
 use channels;
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_dataset/delete_procedure_drop_dataset.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_dataset/delete_procedure_drop_dataset.1.ddl.sqlpp
new file mode 100644
index 0000000..1b88b19
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_dataset/delete_procedure_drop_dataset.1.ddl.sqlpp
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Drop Procedure Dataset
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse two if exists;
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+
+create type myLocation as {
+  timeStamp: datetime,
+  roomNumber: int
+};
+
+
+create dataset UserLocations(myLocation)
+primary key timeStamp;
+
+create dataverse two;
+use two;
+
+create procedure deleteAll() {
+delete from channels.UserLocations
+};
+
+use channels;
+drop dataset UserLocations;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_function/delete_procedure_drop_function.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_function/delete_procedure_drop_function.1.ddl.sqlpp
new file mode 100644
index 0000000..2322154
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_function/delete_procedure_drop_function.1.ddl.sqlpp
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Delete Procedure drop function
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse two if exists;
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+create type myLocation as {
+  id: int
+};
+create dataset UserLocations(myLocation)
+primary key id;
+insert into UserLocations(
+  [{"id":0, "roomNumber":4815162342},
+  {"id":1, "roomNumber":"lost"},
+  {"id":2, "roomNumber":108},
+  {"id":3, "roomNumber":"jacob"}]
+);
+
+create dataverse two;
+use two;
+
+create function really_contains(word,letter){
+contains(word,letter)
+};
+
+create procedure deleteSome(r, otherRoom) {
+delete from channels.UserLocations
+where roomNumber = r
+or roomNumber = otherRoom
+and really_contains(roomNumber,"l")
+};
+
+drop function really_contains@2;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_index/delete_procedure_drop_index.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_index/delete_procedure_drop_index.1.ddl.sqlpp
new file mode 100644
index 0000000..ab66a22
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_index/delete_procedure_drop_index.1.ddl.sqlpp
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Drop Procedure Dataset Index
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse two if exists;
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+
+create type myLocation as {
+  timeStamp: datetime,
+  roomNumber: int
+};
+
+
+create dataset UserLocations(myLocation)
+primary key timeStamp;
+
+create index rooms on UserLocations(roomNumber);
+
+create dataverse two;
+use two;
+
+create procedure deleteAll() {
+delete from channels.UserLocations
+};
+
+use channels;
+drop index UserLocations.rooms;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/insert_procedure_drop_dataset/insert_procedure_drop_dataset.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/insert_procedure_drop_dataset/insert_procedure_drop_dataset.1.ddl.sqlpp
new file mode 100644
index 0000000..260afab
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/insert_procedure_drop_dataset/insert_procedure_drop_dataset.1.ddl.sqlpp
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Drop Procedure Dataset
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse two if exists;
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+
+create type myLocation as {
+  timeStamp: datetime,
+  roomNumber: int
+};
+
+
+create dataset UserLocations(myLocation)
+primary key timeStamp;
+
+create dataverse two;
+use two;
+
+create procedure addMe() {
+  insert into channels.UserLocations([
+    {"timeStamp":current_datetime(), "roomNumber":222}]
+  )
+};
+
+use channels;
+drop dataset UserLocations;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/insert_procedure_drop_dataverse/insert_procedure_drop_dataverse.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/insert_procedure_drop_dataverse/insert_procedure_drop_dataverse.1.ddl.sqlpp
new file mode 100644
index 0000000..ad47b4e
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/insert_procedure_drop_dataverse/insert_procedure_drop_dataverse.1.ddl.sqlpp
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Drop Procedure Dataset Dataverse
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse two if exists;
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+
+create type myLocation as {
+  timeStamp: datetime,
+  roomNumber: int
+};
+
+
+create dataset UserLocations(myLocation)
+primary key timeStamp;
+
+create dataverse two;
+use two;
+
+create procedure addMe() {
+  insert into channels.UserLocations([
+    {"timeStamp":current_datetime(), "roomNumber":222}]
+  )
+};
+
+drop dataverse channels;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/query_procedure_drop_dataset/query_procedure_drop_dataset.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/query_procedure_drop_dataset/query_procedure_drop_dataset.1.ddl.sqlpp
new file mode 100644
index 0000000..acb9f04
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/query_procedure_drop_dataset/query_procedure_drop_dataset.1.ddl.sqlpp
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Drop Procedure Dataset
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse two if exists;
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+
+create type myLocation as {
+  timeStamp: datetime,
+  roomNumber: int
+};
+
+
+create dataset UserLocations(myLocation)
+primary key timeStamp;
+
+create dataverse two;
+use two;
+
+create procedure findMe() {
+select roomNumber from channels.UserLocations
+order by timeStamp
+};
+
+use channels;
+drop dataset UserLocations;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/query_procedure_drop_function/query_procedure_drop_function.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/query_procedure_drop_function/query_procedure_drop_function.1.ddl.sqlpp
new file mode 100644
index 0000000..0e648cd
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/query_procedure_drop_function/query_procedure_drop_function.1.ddl.sqlpp
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Query Procedure drop function
+* Expected Res : Error
+* Date         : Jan 2018
+* Author       : Steven Jacobs
+*/
+
+drop dataverse two if exists;
+drop dataverse channels if exists;
+create dataverse channels;
+use channels;
+create type myLocation as {
+  id: int
+};
+create dataset UserLocations(myLocation)
+primary key id;
+insert into UserLocations(
+  [{"id":0, "roomNumber":4815162342},
+  {"id":1, "roomNumber":"lost"},
+  {"id":2, "roomNumber":108},
+  {"id":3, "roomNumber":"jacob"}]
+);
+
+create function really_contains(word,letter){
+contains(word,letter)
+};
+
+create dataverse two;
+use two;
+
+create procedure selectSome(r, otherRoom) {
+select roomNumber from channels.UserLocations
+where roomNumber = r
+or roomNumber = otherRoom
+and channels.really_contains(roomNumber,"l")
+order by id
+};
+
+
+use channels;
+drop function really_contains@2;
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/queries/procedure/repetitive_insert_procedure/repetitive_insert_procedure.1.ddl.sqlpp b/asterix-bad/src/test/resources/runtimets/queries/procedure/repetitive_insert_procedure/repetitive_insert_procedure.1.ddl.sqlpp
index 76f7f38..370d516 100644
--- a/asterix-bad/src/test/resources/runtimets/queries/procedure/repetitive_insert_procedure/repetitive_insert_procedure.1.ddl.sqlpp
+++ b/asterix-bad/src/test/resources/runtimets/queries/procedure/repetitive_insert_procedure/repetitive_insert_procedure.1.ddl.sqlpp
@@ -23,6 +23,7 @@
 * Author       : Steven Jacobs
 */
 
+drop dataverse two if exists;
 drop dataverse channels if exists;
 create dataverse channels;
 use channels;
diff --git a/asterix-bad/src/test/resources/runtimets/results/channel/create_channel_check_metadata/create_channel_check_metadata.1.adm b/asterix-bad/src/test/resources/runtimets/results/channel/create_channel_check_metadata/create_channel_check_metadata.1.adm
index e009733..bee9157 100644
--- a/asterix-bad/src/test/resources/runtimets/results/channel/create_channel_check_metadata/create_channel_check_metadata.1.adm
+++ b/asterix-bad/src/test/resources/runtimets/results/channel/create_channel_check_metadata/create_channel_check_metadata.1.adm
@@ -1 +1 @@
-{ "DataverseName": "channels", "ChannelName": "nearbyTweetChannel", "SubscriptionsDatasetName": "nearbyTweetChannelSubscriptions", "ResultsDatasetName": "nearbyTweetChannelResults", "Function": "channels.NearbyTweetsContainingText@2", "Duration": "PT10M" }
\ No newline at end of file
+{ "DataverseName": "channels", "ChannelName": "nearbyTweetChannel", "SubscriptionsDatasetName": "nearbyTweetChannelSubscriptions", "ResultsDatasetName": "nearbyTweetChannelResults", "Function": [ "channels", "NearbyTweetsContainingText", "2" ], "Duration": "PT10M", "Dependencies": [ [ [ "channels", "nearbyTweetChannelResults" ], [ "channels", "nearbyTweetChannelSubscriptions" ] ], [ [ "channels", "NearbyTweetsContainingText", "2" ] ] ] }
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/results/channel/drop_channel_check_metadata/drop_channel_check_metadata.1.adm b/asterix-bad/src/test/resources/runtimets/results/channel/drop_channel_check_metadata/drop_channel_check_metadata.1.adm
index 1da5787..1c492ac 100644
--- a/asterix-bad/src/test/resources/runtimets/results/channel/drop_channel_check_metadata/drop_channel_check_metadata.1.adm
+++ b/asterix-bad/src/test/resources/runtimets/results/channel/drop_channel_check_metadata/drop_channel_check_metadata.1.adm
@@ -1,2 +1,2 @@
-{ "DataverseName": "channels", "ChannelName": "nearbyTweetChannel1", "SubscriptionsDatasetName": "nearbyTweetChannel1Subscriptions", "ResultsDatasetName": "nearbyTweetChannel1Results", "Function": "channels.NearbyTweetsContainingText@2", "Duration": "PT10M" }
-{ "DataverseName": "channels", "ChannelName": "nearbyTweetChannel3", "SubscriptionsDatasetName": "nearbyTweetChannel3Subscriptions", "ResultsDatasetName": "nearbyTweetChannel3Results", "Function": "channels.NearbyTweetsContainingText@2", "Duration": "PT10M" }
\ No newline at end of file
+{ "DataverseName": "channels", "ChannelName": "nearbyTweetChannel1", "SubscriptionsDatasetName": "nearbyTweetChannel1Subscriptions", "ResultsDatasetName": "nearbyTweetChannel1Results", "Function": [ "channels", "NearbyTweetsContainingText", "2" ], "Duration": "PT10M", "Dependencies": [ [ [ "channels", "nearbyTweetChannel1Results" ], [ "channels", "nearbyTweetChannel1Subscriptions" ] ], [ [ "channels", "NearbyTweetsContainingText", "2" ] ] ] }
+{ "DataverseName": "channels", "ChannelName": "nearbyTweetChannel3", "SubscriptionsDatasetName": "nearbyTweetChannel3Subscriptions", "ResultsDatasetName": "nearbyTweetChannel3Results", "Function": [ "channels", "NearbyTweetsContainingText", "2" ], "Duration": "PT10M", "Dependencies": [ [ [ "channels", "nearbyTweetChannel3Results" ], [ "channels", "nearbyTweetChannel3Subscriptions" ] ], [ [ "channels", "NearbyTweetsContainingText", "2" ] ] ] }
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/results/procedure/create_procedure_check_metadata/create_procedure_check_metadata.1.adm b/asterix-bad/src/test/resources/runtimets/results/procedure/create_procedure_check_metadata/create_procedure_check_metadata.1.adm
new file mode 100644
index 0000000..4308c83
--- /dev/null
+++ b/asterix-bad/src/test/resources/runtimets/results/procedure/create_procedure_check_metadata/create_procedure_check_metadata.1.adm
@@ -0,0 +1,6 @@
+{ "DataverseName": "two", "ProcedureName": "addMe", "Arity": "0", "Params": [  ], "ReturnType": "VOID", "Definition": "insert into channels.UserLocations([\n    {\"timeStamp\":current_datetime(), \"roomNumber\":222}]\n  )", "Language": "AQL", "Duration": "", "Dependencies": [ [ [ "channels", "UserLocations" ] ], [  ] ] }
+{ "DataverseName": "two", "ProcedureName": "deleteSome", "Arity": "2", "Params": [ "$r", "$otherRoom" ], "ReturnType": "VOID", "Definition": "delete from channels.UserLocations\nwhere roomNumber = r\nor roomNumber = otherRoom\nand channels.really_contains(roomNumber,\"l\")", "Language": "AQL", "Duration": "", "Dependencies": [ [ [ "channels", "UserLocations" ] ], [ [ "channels", "really_contains", "2" ] ] ] }
+{ "DataverseName": "two", "ProcedureName": "localAddMe", "Arity": "0", "Params": [  ], "ReturnType": "VOID", "Definition": "insert into UserLocations([\n    {\"timeStamp\":current_datetime(), \"roomNumber\":222}]\n  )", "Language": "AQL", "Duration": "", "Dependencies": [ [ [ "two", "UserLocations" ] ], [  ] ] }
+{ "DataverseName": "two", "ProcedureName": "localDeleteSome", "Arity": "2", "Params": [ "$r", "$otherRoom" ], "ReturnType": "VOID", "Definition": "delete from UserLocations\nwhere roomNumber = r\nor roomNumber = otherRoom\nand really_contains(roomNumber,\"l\")", "Language": "AQL", "Duration": "", "Dependencies": [ [ [ "two", "UserLocations" ] ], [ [ "two", "really_contains", "2" ] ] ] }
+{ "DataverseName": "two", "ProcedureName": "localSelectSome", "Arity": "2", "Params": [ "$r", "$otherRoom" ], "ReturnType": "VOID", "Definition": "select roomNumber from UserLocations\nwhere roomNumber = r\nor roomNumber = otherRoom\nand really_contains(roomNumber,\"l\")\norder by id", "Language": "AQL", "Duration": "", "Dependencies": [ [ [ "two", "UserLocations" ] ], [ [ "two", "really_contains", "2" ] ] ] }
+{ "DataverseName": "two", "ProcedureName": "selectSome", "Arity": "2", "Params": [ "$r", "$otherRoom" ], "ReturnType": "VOID", "Definition": "select roomNumber from channels.UserLocations\nwhere roomNumber = r\nor roomNumber = otherRoom\nand channels.really_contains(roomNumber,\"l\")\norder by id", "Language": "AQL", "Duration": "", "Dependencies": [ [ [ "channels", "UserLocations" ] ], [ [ "channels", "really_contains", "2" ] ] ] }
\ No newline at end of file
diff --git a/asterix-bad/src/test/resources/runtimets/testsuite.xml b/asterix-bad/src/test/resources/runtimets/testsuite.xml
index 1b2844b..3c72a14 100644
--- a/asterix-bad/src/test/resources/runtimets/testsuite.xml
+++ b/asterix-bad/src/test/resources/runtimets/testsuite.xml
@@ -22,6 +22,11 @@
              QueryFileExtension=".sqlpp">
   <test-group name="channel">
     <test-case FilePath="procedure">
+      <compilation-unit name="create_procedure_check_metadata">
+        <output-dir compare="Text">create_procedure_check_metadata</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="procedure">
       <compilation-unit name="delete_procedure">
         <output-dir compare="Text">delete_procedure</output-dir>
       </compilation-unit>
@@ -52,13 +57,50 @@
       </compilation-unit>
     </test-case>
     <test-case FilePath="procedure">
-      <compilation-unit name="repetitive_insert_procedure">
-        <output-dir compare="Text">repetitive_insert_procedure</output-dir>
+      <compilation-unit name="insert_procedure_drop_dataset">
+        <output-dir compare="Text">insert_procedure_drop_dataset</output-dir>
+        <expected-error>Cannot alter dataset channels.UserLocations. two.addMe(Procedure) depends on it!</expected-error>
       </compilation-unit>
     </test-case>
-    <test-case FilePath="channel">
-      <compilation-unit name="room_occupants">
-        <output-dir compare="Text">room_occupants</output-dir>
+    <test-case FilePath="procedure">
+      <compilation-unit name="insert_procedure_drop_dataverse">
+        <output-dir compare="Text">insert_procedure_drop_dataverse</output-dir>
+        <expected-error>Cannot drop dataverse channels. two.addMe(Procedure) depends on it!</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="procedure">
+      <compilation-unit name="delete_procedure_drop_dataset">
+        <output-dir compare="Text">delete_procedure_drop_dataset</output-dir>
+        <expected-error>Cannot alter dataset channels.UserLocations. two.deleteAll(Procedure) depends on it!</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="procedure">
+      <compilation-unit name="delete_procedure_drop_function">
+        <output-dir compare="Text">delete_procedure_drop_function</output-dir>
+        <expected-error>Cannot drop function two.really_contains@2. two.deleteSome(Procedure) depends on it!</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="procedure">
+      <compilation-unit name="delete_procedure_drop_index">
+        <output-dir compare="Text">delete_procedure_drop_index</output-dir>
+        <expected-error>Cannot alter dataset channels.UserLocations. two.deleteAll(Procedure) depends on it!</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="procedure">
+      <compilation-unit name="query_procedure_drop_dataset">
+        <output-dir compare="Text">query_procedure_drop_dataset</output-dir>
+        <expected-error>Cannot alter dataset channels.UserLocations. two.findMe(Procedure) depends on it!</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="procedure">
+      <compilation-unit name="query_procedure_drop_function">
+        <output-dir compare="Text">query_procedure_drop_function</output-dir>
+        <expected-error>Cannot drop function channels.really_contains@2. two.selectSome(Procedure) depends on it!</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="procedure">
+      <compilation-unit name="repetitive_insert_procedure">
+        <output-dir compare="Text">repetitive_insert_procedure</output-dir>
       </compilation-unit>
     </test-case>
     <test-case FilePath="channel">
@@ -87,6 +129,35 @@
       </compilation-unit>
     </test-case>
     <test-case FilePath="channel">
+      <compilation-unit name="drop_function">
+        <output-dir compare="Text">drop_function</output-dir>
+        <expected-error>Cannot drop function channels.NearbyTweetsContainingText@2. two.nearbyTweetChannel(Channel) depends on it!</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="channel">
+      <compilation-unit name="drop_results">
+        <output-dir compare="Text">drop_results</output-dir>
+        <expected-error>Cannot alter dataset two.nearbyTweetChannelResults. two.nearbyTweetChannel(Channel) depends on it!</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="channel">
+      <compilation-unit name="drop_subscriptions">
+        <output-dir compare="Text">drop_subscriptions</output-dir>
+        <expected-error>Cannot alter dataset two.nearbyTweetChannelSubscriptions. two.nearbyTweetChannel(Channel) depends on it!</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="channel">
+      <compilation-unit name="drop_function_dataverse">
+        <output-dir compare="Text">drop_function_dataverse</output-dir>
+        <expected-error>Cannot drop dataverse channels. two.nearbyTweetChannel(Channel) depends on it!</expected-error>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="channel">
+      <compilation-unit name="room_occupants">
+        <output-dir compare="Text">room_occupants</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="channel">
       <compilation-unit name="disasters_with_friends">
         <output-dir compare="Text">disasters_with_friends</output-dir>
       </compilation-unit>

-- 
To view, visit https://asterix-gerrit.ics.uci.edu/2302
To unsubscribe, visit https://asterix-gerrit.ics.uci.edu/settings

Gerrit-MessageType: merged
Gerrit-Change-Id: Ic6ac2daad03844a042aded8e17bb231a06f59cbe
Gerrit-PatchSet: 3
Gerrit-Project: asterixdb-bad
Gerrit-Branch: master
Gerrit-Owner: Steven Jacobs <sj...@ucr.edu>
Gerrit-Reviewer: Jenkins <je...@fulliautomatix.ics.uci.edu>
Gerrit-Reviewer: Steven Jacobs <sj...@ucr.edu>
Gerrit-Reviewer: Xikui Wang <xk...@gmail.com>

Change in asterixdb-bad[master]: Enable dependencies in the metadata for BAD entities

Posted by "Steven Jacobs (Code Review)" <do...@asterixdb.incubator.apache.org>.
Hello Jenkins,

I'd like you to reexamine a change.  Please visit

    https://asterix-gerrit.ics.uci.edu/2302

to look at the new patch set (#2).

Change subject: Enable dependencies in the metadata for BAD entities
......................................................................

Enable dependencies in the metadata for BAD entities

Allow Channels and Procedures to store dependencies on
Datasets and Functions

Prevent dropping of these dependencies

Add Error tests

Change-Id: Ic6ac2daad03844a042aded8e17bb231a06f59cbe
---
M asterix-bad/src/main/java/org/apache/asterix/bad/BADConstants.java
M asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADLangExtension.java
M asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADStatementExecutor.java
M asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/ChannelDropStatement.java
M asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateChannelStatement.java
M asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateProcedureStatement.java
A asterix-bad/src/main/java/org/apache/asterix/bad/metadata/AllChannelsSearchKey.java
A asterix-bad/src/main/java/org/apache/asterix/bad/metadata/AllProceduresSearchKey.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/BADMetadataRecordTypes.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Channel.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ChannelTupleTranslator.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/DeployedJobSpecEventListener.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Procedure.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ProcedureTupleTranslator.java
M asterix-bad/src/main/resources/lang-extension/lang.txt
A asterix-bad/src/test/resources/runtimets/queries/channel/drop_function/drop_function.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/channel/drop_function_dataverse/drop_function_dataverse.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/channel/drop_results/drop_results.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/channel/drop_subscriptions/drop_subscriptions.1.ddl.sqlpp
M asterix-bad/src/test/resources/runtimets/queries/channel/room_occupants/room_occupants.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/create_procedure_check_metadata/create_procedure_check_metadata.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/create_procedure_check_metadata/create_procedure_check_metadata.2.query.sqlpp
M asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure/delete_procedure.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_dataset/delete_procedure_drop_dataset.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_function/delete_procedure_drop_function.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_index/delete_procedure_drop_index.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/insert_procedure_drop_dataset/insert_procedure_drop_dataset.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/insert_procedure_drop_dataverse/insert_procedure_drop_dataverse.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/query_procedure_drop_dataset/query_procedure_drop_dataset.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/query_procedure_drop_function/query_procedure_drop_function.1.ddl.sqlpp
M asterix-bad/src/test/resources/runtimets/queries/procedure/repetitive_insert_procedure/repetitive_insert_procedure.1.ddl.sqlpp
M asterix-bad/src/test/resources/runtimets/results/channel/create_channel_check_metadata/create_channel_check_metadata.1.adm
M asterix-bad/src/test/resources/runtimets/results/channel/drop_channel_check_metadata/drop_channel_check_metadata.1.adm
A asterix-bad/src/test/resources/runtimets/results/procedure/create_procedure_check_metadata/create_procedure_check_metadata.1.adm
M asterix-bad/src/test/resources/runtimets/testsuite.xml
35 files changed, 1,319 insertions(+), 103 deletions(-)


  git pull ssh://asterix-gerrit.ics.uci.edu:29418/asterixdb-bad refs/changes/02/2302/2
-- 
To view, visit https://asterix-gerrit.ics.uci.edu/2302
To unsubscribe, visit https://asterix-gerrit.ics.uci.edu/settings

Gerrit-MessageType: newpatchset
Gerrit-Change-Id: Ic6ac2daad03844a042aded8e17bb231a06f59cbe
Gerrit-PatchSet: 2
Gerrit-Project: asterixdb-bad
Gerrit-Branch: master
Gerrit-Owner: Steven Jacobs <sj...@ucr.edu>
Gerrit-Reviewer: Jenkins <je...@fulliautomatix.ics.uci.edu>
Gerrit-Reviewer: Steven Jacobs <sj...@ucr.edu>
Gerrit-Reviewer: Xikui Wang <xk...@gmail.com>

Change in asterixdb-bad[master]: Enable dependencies in the metadata for BAD entities

Posted by "Xikui Wang (Code Review)" <do...@asterixdb.incubator.apache.org>.
Xikui Wang has posted comments on this change.

Change subject: Enable dependencies in the metadata for BAD entities
......................................................................


Patch Set 1:

(1 comment)

One minor comment for the triple nested list dependency... If you need a +2 now, i can do that to accelerate the process.

https://asterix-gerrit.ics.uci.edu/#/c/2302/1/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateProcedureStatement.java
File asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateProcedureStatement.java:

Line 107:         this.dependencies.add(new ArrayList<>());
Could you add one or two lines comment to explains the hierarchy here?  In the drop dataverse case, it seems the dependencies are as <datavese, dataset, function info> and the function info is always length 3? I'm a little bit confused... Or there is a document somewhere that I can refer to? Same for the other dependencies...


-- 
To view, visit https://asterix-gerrit.ics.uci.edu/2302
To unsubscribe, visit https://asterix-gerrit.ics.uci.edu/settings

Gerrit-MessageType: comment
Gerrit-Change-Id: Ic6ac2daad03844a042aded8e17bb231a06f59cbe
Gerrit-PatchSet: 1
Gerrit-Project: asterixdb-bad
Gerrit-Branch: master
Gerrit-Owner: Steven Jacobs <sj...@ucr.edu>
Gerrit-Reviewer: Jenkins <je...@fulliautomatix.ics.uci.edu>
Gerrit-Reviewer: Xikui Wang <xk...@gmail.com>
Gerrit-HasComments: Yes

Change in asterixdb-bad[master]: Enable dependencies in the metadata for BAD entities

Posted by "Jenkins (Code Review)" <do...@asterixdb.incubator.apache.org>.
Jenkins has posted comments on this change.

Change subject: Enable dependencies in the metadata for BAD entities
......................................................................


Patch Set 1:

Build Started https://asterix-jenkins.ics.uci.edu/job/asterixbad-gerrit/232/

-- 
To view, visit https://asterix-gerrit.ics.uci.edu/2302
To unsubscribe, visit https://asterix-gerrit.ics.uci.edu/settings

Gerrit-MessageType: comment
Gerrit-Change-Id: Ic6ac2daad03844a042aded8e17bb231a06f59cbe
Gerrit-PatchSet: 1
Gerrit-Project: asterixdb-bad
Gerrit-Branch: master
Gerrit-Owner: Steven Jacobs <sj...@ucr.edu>
Gerrit-Reviewer: Jenkins <je...@fulliautomatix.ics.uci.edu>
Gerrit-HasComments: No

Change in asterixdb-bad[master]: Enable dependencies in the metadata for BAD entities

Posted by "Steven Jacobs (Code Review)" <do...@asterixdb.incubator.apache.org>.
Hello Jenkins,

I'd like you to reexamine a change.  Please visit

    https://asterix-gerrit.ics.uci.edu/2302

to look at the new patch set (#3).

Change subject: Enable dependencies in the metadata for BAD entities
......................................................................

Enable dependencies in the metadata for BAD entities

Allow Channels and Procedures to store dependencies on
Datasets and Functions

Prevent dropping of these dependencies

Add Error tests

Change-Id: Ic6ac2daad03844a042aded8e17bb231a06f59cbe
---
M asterix-bad/src/main/java/org/apache/asterix/bad/BADConstants.java
M asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADLangExtension.java
M asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADStatementExecutor.java
M asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/ChannelDropStatement.java
M asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateChannelStatement.java
M asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateProcedureStatement.java
A asterix-bad/src/main/java/org/apache/asterix/bad/metadata/AllChannelsSearchKey.java
A asterix-bad/src/main/java/org/apache/asterix/bad/metadata/AllProceduresSearchKey.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/BADMetadataRecordTypes.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Channel.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ChannelTupleTranslator.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/DeployedJobSpecEventListener.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Procedure.java
M asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ProcedureTupleTranslator.java
M asterix-bad/src/main/resources/lang-extension/lang.txt
A asterix-bad/src/test/resources/runtimets/queries/channel/drop_function/drop_function.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/channel/drop_function_dataverse/drop_function_dataverse.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/channel/drop_results/drop_results.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/channel/drop_subscriptions/drop_subscriptions.1.ddl.sqlpp
M asterix-bad/src/test/resources/runtimets/queries/channel/room_occupants/room_occupants.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/create_procedure_check_metadata/create_procedure_check_metadata.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/create_procedure_check_metadata/create_procedure_check_metadata.2.query.sqlpp
M asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure/delete_procedure.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_dataset/delete_procedure_drop_dataset.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_function/delete_procedure_drop_function.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/delete_procedure_drop_index/delete_procedure_drop_index.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/insert_procedure_drop_dataset/insert_procedure_drop_dataset.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/insert_procedure_drop_dataverse/insert_procedure_drop_dataverse.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/query_procedure_drop_dataset/query_procedure_drop_dataset.1.ddl.sqlpp
A asterix-bad/src/test/resources/runtimets/queries/procedure/query_procedure_drop_function/query_procedure_drop_function.1.ddl.sqlpp
M asterix-bad/src/test/resources/runtimets/queries/procedure/repetitive_insert_procedure/repetitive_insert_procedure.1.ddl.sqlpp
M asterix-bad/src/test/resources/runtimets/results/channel/create_channel_check_metadata/create_channel_check_metadata.1.adm
M asterix-bad/src/test/resources/runtimets/results/channel/drop_channel_check_metadata/drop_channel_check_metadata.1.adm
A asterix-bad/src/test/resources/runtimets/results/procedure/create_procedure_check_metadata/create_procedure_check_metadata.1.adm
M asterix-bad/src/test/resources/runtimets/testsuite.xml
35 files changed, 1,317 insertions(+), 103 deletions(-)


  git pull ssh://asterix-gerrit.ics.uci.edu:29418/asterixdb-bad refs/changes/02/2302/3
-- 
To view, visit https://asterix-gerrit.ics.uci.edu/2302
To unsubscribe, visit https://asterix-gerrit.ics.uci.edu/settings

Gerrit-MessageType: newpatchset
Gerrit-Change-Id: Ic6ac2daad03844a042aded8e17bb231a06f59cbe
Gerrit-PatchSet: 3
Gerrit-Project: asterixdb-bad
Gerrit-Branch: master
Gerrit-Owner: Steven Jacobs <sj...@ucr.edu>
Gerrit-Reviewer: Jenkins <je...@fulliautomatix.ics.uci.edu>
Gerrit-Reviewer: Steven Jacobs <sj...@ucr.edu>
Gerrit-Reviewer: Xikui Wang <xk...@gmail.com>