You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@asterixdb.apache.org by mb...@apache.org on 2020/10/02 01:07:46 UTC

[asterixdb] 10/11: Merge branch gerrit/mad-hatter

This is an automated email from the ASF dual-hosted git repository.

mblow pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/asterixdb.git

commit da3bb888c23b278ca1e9ac4af88d127af637c034
Merge: 731fa70 d2aefdc
Author: Michael Blow <mi...@couchbase.com>
AuthorDate: Thu Oct 1 11:48:19 2020 -0400

    Merge branch gerrit/mad-hatter
    
    Change-Id: I24af1b0da23f056277b10c6d9064813c564ab628

 .../org/apache/asterix/active/ActiveManager.java   |  6 +++
 .../active/IActiveEntityEventSubscriber.java       |  2 +-
 .../active/IActiveEntityEventsListener.java        |  2 +-
 .../active/message/ActiveManagerMessage.java       |  2 +-
 .../active/message/ActiveStatsRequestMessage.java  | 10 +++++
 .../api/http/server/RebalanceApiServlet.java       | 25 ++++++-----
 .../app/active/ActiveEntityEventsListener.java     | 25 ++++++++---
 .../asterix/app/translator/QueryTranslator.java    | 20 ++++++++-
 .../org/apache/asterix/utils/RebalanceUtil.java    |  5 +--
 .../org/apache/asterix/common/TestDataUtil.java    |  2 +-
 .../asterix/test/active/ActiveStatsTest.java       | 19 ++++++--
 .../apache/asterix/test/common/TestExecutor.java   | 38 ++++++++++------
 .../test/runtime/ResultStreamingFailureTest.java   |  2 +-
 .../src/test/resources/runtimets/only_sqlpp.xml    | 10 +++++
 .../test.000.ddl.sqlpp}                            | 13 +++---
 .../test.001.ddl.sqlpp}                            |  3 +-
 .../test.002.ddl.sqlpp}                            |  3 +-
 .../test.003.ddl.sqlpp}                            |  3 +-
 .../test.004.ddl.sqlpp}                            |  3 +-
 .../test.005.ddl.sqlpp}                            |  3 +-
 .../test.006.ddl.sqlpp}                            |  3 +-
 .../test.007.ddl.sqlpp}                            |  3 +-
 .../test.008.ddl.sqlpp}                            |  3 +-
 .../test.009.ddl.sqlpp}                            |  3 +-
 .../test.010.ddl.sqlpp}                            |  3 +-
 .../test.011.ddl.sqlpp}                            |  3 +-
 .../test.999.ddl.sqlpp}                            |  3 +-
 .../invalid-dataverse.001.ddl.sqlpp                |  4 +-
 .../invalid-dataverse.002.ddl.sqlpp                |  6 ++-
 .../invalid-dataverse.003.ddl.sqlpp                |  7 +--
 .../invalid-dataverse.004.ddl.sqlpp                |  8 ++--
 .../invalid-dataverse.005.ddl.sqlpp                |  8 ++--
 .../invalid-dataverse.006.ddl.sqlpp                |  8 ++--
 .../invalid-dataverse.007.ddl.sqlpp                |  2 +
 .../invalid-dataverse.008.ddl.sqlpp                |  2 +
 .../empty-dataset/empty-dataset.01.ddl.sqlpp}      |  8 +++-
 .../empty-dataset/empty-dataset.02.update.sqlpp}   |  3 +-
 .../empty-dataset/empty-dataset.03.query.sqlpp}    |  4 +-
 .../empty-dataset/empty-dataset.04.query.sqlpp}    |  4 +-
 .../empty-dataset/empty-dataset.05.query.sqlpp}    |  4 +-
 .../empty-dataset/empty-dataset.06.query.sqlpp}    |  4 +-
 .../push-limit-to-primary-scan-select.1.ddl.sqlpp  | 24 +++++++++-
 ...sh-limit-to-primary-scan-select.10.query.sqlpp} | 40 +++++++++--------
 ...sh-limit-to-primary-scan-select.11.query.sqlpp} | 25 +++++++++--
 ...ush-limit-to-primary-scan-select.2.update.sqlpp |  6 +--
 .../push-limit-to-primary-scan.1.ddl.sqlpp         | 21 +++++++++
 .../push-limit-to-primary-scan.2.update.sqlpp      |  3 +-
 .../push-limit-to-primary-scan.6.query.sqlpp}      | 27 +++++++++--
 ...pp => push-limit-to-primary-scan.7.query.sqlpp} | 24 +++++++++-
 .../identical_location.3.post.http                 |  2 +-
 .../empty-dataset/empty-dataset.03.adm             |  1 +
 .../empty-dataset/empty-dataset.04.adm             |  0
 .../empty-dataset/empty-dataset.05.adm             |  0
 .../empty-dataset/empty-dataset.06.adm             |  0
 .../push-limit-to-primary-scan-select.10.adm       |  1 +
 .../push-limit-to-primary-scan-select.11.adm       | 52 ++++++++++++++++++++++
 .../push-limit-to-primary-scan.6.adm               |  1 +
 .../push-limit-to-primary-scan.7.adm               | 52 ++++++++++++++++++++++
 .../runtimets/testsuite_external_dataset.xml       |  2 +
 .../test/resources/runtimets/testsuite_sqlpp.xml   | 28 +++++++++---
 .../asterix/common/exceptions/ErrorCode.java       |  1 +
 .../asterix/common/exceptions/ExceptionUtils.java  | 15 +++++++
 .../src/main/resources/asx_errormsg/en.properties  |  1 +
 .../external/feed/watch/AbstractSubscriber.java    | 29 +++++++++---
 .../feed/watch/WaitForStateSubscriber.java         |  2 +-
 .../visitors/IsomorphismOperatorVisitor.java       | 11 +++--
 .../hyracks/dataflow/std/join/NestedLoopJoin.java  | 32 +++++++++++--
 67 files changed, 554 insertions(+), 135 deletions(-)

diff --cc asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RebalanceApiServlet.java
index e98e3f6,43c25f2..a7b91c6
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RebalanceApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RebalanceApiServlet.java
@@@ -104,10 -104,14 +104,14 @@@ public class RebalanceApiServlet extend
      protected void post(IServletRequest request, IServletResponse response) {
          try {
              // Gets dataverse, dataset, and target nodes for rebalance.
 -            String dataverseName = request.getParameter("dataverseName");
 +            DataverseName dataverseName = ServletUtil.getDataverseName(request, "dataverseName");
              String datasetName = request.getParameter("datasetName");
              String nodes = request.getParameter("nodes");
- 
+             boolean forceRebalance = true;
+             String force = request.getParameter("force");
+             if (force != null) {
+                 forceRebalance = Boolean.parseBoolean(force);
+             }
              // Parses and check target nodes.
              if (nodes == null) {
                  sendResponse(response, HttpResponseStatus.BAD_REQUEST, "nodes are not given");
@@@ -155,19 -160,19 +160,19 @@@
      }
  
      // Schedules a rebalance task.
 -    private synchronized CountDownLatch scheduleRebalance(String dataverseName, String datasetName,
 +    private synchronized CountDownLatch scheduleRebalance(DataverseName dataverseName, String datasetName,
-             String[] targetNodes, IServletResponse response) {
+             String[] targetNodes, IServletResponse response, boolean force) {
          CountDownLatch terminated = new CountDownLatch(1);
-         Future<Void> task =
-                 executor.submit(() -> doRebalance(dataverseName, datasetName, targetNodes, response, terminated));
+         Future<Void> task = executor
+                 .submit(() -> doRebalance(dataverseName, datasetName, targetNodes, response, terminated, force));
          rebalanceTasks.add(task);
          rebalanceFutureTerminated.add(terminated);
          return terminated;
      }
  
      // Performs the actual rebalance.
 -    private Void doRebalance(String dataverseName, String datasetName, String[] targetNodes, IServletResponse response,
 -            CountDownLatch terminated, boolean force) {
 +    private Void doRebalance(DataverseName dataverseName, String datasetName, String[] targetNodes,
-             IServletResponse response, CountDownLatch terminated) {
++            IServletResponse response, CountDownLatch terminated, boolean force) {
          try {
              // Sets the content type.
              HttpUtil.setContentType(response, HttpUtil.ContentType.APPLICATION_JSON, StandardCharsets.UTF_8);
@@@ -243,20 -248,20 +248,20 @@@
      }
  
      // Rebalances a given dataset.
-     private void rebalanceDataset(DataverseName dataverseName, String datasetName, String[] targetNodes)
 -    private void rebalanceDataset(String dataverseName, String datasetName, String[] targetNodes, boolean force)
++    private void rebalanceDataset(DataverseName dataverseName, String datasetName, String[] targetNodes, boolean force)
              throws Exception {
          IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(HYRACKS_CONNECTION_ATTR);
 -        MetadataProvider metadataProvider = new MetadataProvider(appCtx, null);
 +        MetadataProvider metadataProvider = MetadataProvider.create(appCtx, null);
          try {
              ActiveNotificationHandler activeNotificationHandler =
                      (ActiveNotificationHandler) appCtx.getActiveNotificationHandler();
              activeNotificationHandler.suspend(metadataProvider);
              try {
                  IMetadataLockManager lockManager = appCtx.getMetadataLockManager();
 -                lockManager.acquireDatasetExclusiveModificationLock(metadataProvider.getLocks(),
 -                        dataverseName + '.' + datasetName);
 +                lockManager.acquireDatasetExclusiveModificationLock(metadataProvider.getLocks(), dataverseName,
 +                        datasetName);
                  RebalanceUtil.rebalance(dataverseName, datasetName, new LinkedHashSet<>(Arrays.asList(targetNodes)),
-                         metadataProvider, hcc, NoOpDatasetRebalanceCallback.INSTANCE);
+                         metadataProvider, hcc, NoOpDatasetRebalanceCallback.INSTANCE, force);
              } finally {
                  activeNotificationHandler.resume(metadataProvider);
              }
diff --cc asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index f912c76,d49996a..bccd2a9
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@@ -213,7 -197,9 +213,8 @@@ import org.apache.hyracks.algebricks.ru
  import org.apache.hyracks.algebricks.runtime.writers.PrinterBasedWriterFactory;
  import org.apache.hyracks.api.client.IClusterInfoCollector;
  import org.apache.hyracks.api.client.IHyracksClientConnection;
 -import org.apache.hyracks.api.dataflow.value.ITypeTraits;
  import org.apache.hyracks.api.exceptions.HyracksDataException;
+ import org.apache.hyracks.api.exceptions.IWarningCollector;
  import org.apache.hyracks.api.exceptions.SourceLocation;
  import org.apache.hyracks.api.exceptions.Warning;
  import org.apache.hyracks.api.io.FileSplit;
@@@ -281,9 -265,24 +282,13 @@@ public class QueryTranslator extends Ab
          return sessionOutput;
      }
  
 -    protected List<FunctionDecl> getDeclaredFunctions(List<Statement> statements) {
 -        List<FunctionDecl> functionDecls = new ArrayList<>();
 -        for (Statement st : statements) {
 -            if (st.getKind() == Statement.Kind.FUNCTION_DECL) {
 -                functionDecls.add((FunctionDecl) st);
 -            }
 -        }
 -        return functionDecls;
 -    }
 -
+     public IWarningCollector getWarningCollector() {
+         return warningCollector;
+     }
+ 
      @Override
      public void compileAndExecute(IHyracksClientConnection hcc, IRequestParameters requestParameters) throws Exception {
 -        validateStatements(statements, requestParameters.isMultiStatement(),
 -                requestParameters.getStatementCategoryRestrictionMask());
 +        validateStatements(requestParameters);
          trackRequest(requestParameters);
          int resultSetIdCounter = 0;
          FileSplit outputFile = null;
@@@ -1646,8 -1585,12 +1651,12 @@@
              Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx.getValue(), dataverseName);
              if (dv == null) {
                  if (ifExists) {
+                     if (warningCollector.shouldWarn()) {
+                         warningCollector
+                                 .warn(WarningUtil.forAsterix(sourceLoc, ErrorCode.UNKNOWN_DATAVERSE, dataverseName));
+                     }
                      MetadataManager.INSTANCE.commitTransaction(mdTxnCtx.getValue());
 -                    return;
 +                    return false;
                  } else {
                      throw new CompilationException(ErrorCode.UNKNOWN_DATAVERSE, sourceLoc, dataverseName);
                  }
diff --cc asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java
index 84f9a94,25076c2..d32bfb2
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java
@@@ -92,9 -91,9 +92,9 @@@ public class RebalanceUtil 
       *            the reusable hyracks connection.
       * @throws Exception
       */
 -    public static void rebalance(String dataverseName, String datasetName, Set<String> targetNcNames,
 +    public static void rebalance(DataverseName dataverseName, String datasetName, Set<String> targetNcNames,
              MetadataProvider metadataProvider, IHyracksClientConnection hcc,
-             IDatasetRebalanceCallback datasetRebalanceCallback) throws Exception {
+             IDatasetRebalanceCallback datasetRebalanceCallback, boolean forceRebalance) throws Exception {
          Dataset sourceDataset;
          Dataset targetDataset;
          // Executes the first Metadata transaction.
diff --cc asterixdb/asterix-app/src/test/java/org/apache/asterix/common/TestDataUtil.java
index dfe696f,f15b88f..06380fe
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/common/TestDataUtil.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/common/TestDataUtil.java
@@@ -165,10 -164,10 +165,10 @@@ public class TestDataUtil 
              activeNotificationHandler.suspend(metadataProvider);
              try {
                  IMetadataLockManager lockManager = ccAppCtx.getMetadataLockManager();
 -                lockManager.acquireDatasetExclusiveModificationLock(metadataProvider.getLocks(),
 -                        dataverseName + '.' + datasetName);
 +                lockManager.acquireDatasetExclusiveModificationLock(metadataProvider.getLocks(), dataverseName,
 +                        datasetName);
                  RebalanceUtil.rebalance(dataverseName, datasetName, new LinkedHashSet<>(Arrays.asList(targetNodes)),
-                         metadataProvider, ccAppCtx.getHcc(), NoOpDatasetRebalanceCallback.INSTANCE);
+                         metadataProvider, ccAppCtx.getHcc(), NoOpDatasetRebalanceCallback.INSTANCE, false);
              } finally {
                  activeNotificationHandler.resume(metadataProvider);
              }
diff --cc asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
index 2399064,e586878..cd0dee2
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
@@@ -1614,10 -1567,11 +1613,11 @@@ public class TestExecutor 
          return executeUpdateOrDdl(statement, outputFormat, getQueryServiceUri(AQL));
      }
  
-     private InputStream executeUpdateOrDdl(String statement, OutputFormat outputFormat, URI serviceUri)
+     private ExtractedResult executeUpdateOrDdl(String statement, OutputFormat outputFormat, URI serviceUri)
              throws Exception {
-         InputStream resultStream = executeQueryService(statement, serviceUri, outputFormat, UTF_8);
-         return ResultExtractor.extract(resultStream, UTF_8, outputFormat).getResult();
+         try (InputStream resultStream = executeQueryService(statement, serviceUri, outputFormat, UTF_8)) {
 -            return ResultExtractor.extract(resultStream, UTF_8);
++            return ResultExtractor.extract(resultStream, UTF_8, outputFormat);
+         }
      }
  
      protected static boolean isExpected(Exception e, CompilationUnit cUnit) {
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/only_sqlpp.xml
index 334dd52,334dd52..f521dd9
--- a/asterixdb/asterix-app/src/test/resources/runtimets/only_sqlpp.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/only_sqlpp.xml
@@@ -19,5 -19,5 +19,15 @@@
   !-->
  <test-suite xmlns="urn:xml.testframework.asterix.apache.org" ResultOffsetPath="results" QueryOffsetPath="queries_sqlpp" QueryFileExtension=".sqlpp">
    <test-group name="failed">
++    <test-case FilePath="limit">
++      <compilation-unit name="push-limit-to-primary-scan">
++        <output-dir compare="Text">push-limit-to-primary-scan</output-dir>
++      </compilation-unit>
++    </test-case>
++    <test-case FilePath="limit">
++      <compilation-unit name="push-limit-to-primary-scan-select">
++        <output-dir compare="Text">push-limit-to-primary-scan-select</output-dir>
++      </compilation-unit>
++    </test-case>
    </test-group>
  </test-suite>
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/rebalance/identical_location/identical_location.3.post.http
index e179282,68d4a0f..d5427da
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/rebalance/identical_location/identical_location.3.post.http
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/rebalance/identical_location/identical_location.3.post.http
@@@ -17,4 -17,4 +17,4 @@@
   * under the License.
   */
  
- /admin/rebalance?dataverseName=tpch&datasetName=LineItem&nodes=asterix_nc2%2Casterix_nc1
 -/admin/rebalance?dataverseName=tpch&datasetName=LineItem&nodes=%22asterix_nc2%2Casterix_nc1%22&force=false
++/admin/rebalance?dataverseName=tpch&datasetName=LineItem&nodes=asterix_nc2%2Casterix_nc1&force=false
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/results/limit/push-limit-to-primary-scan-select/push-limit-to-primary-scan-select.11.adm
index 0000000,a040253..85cf5c5
mode 000000,100644..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/limit/push-limit-to-primary-scan-select/push-limit-to-primary-scan-select.11.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/limit/push-limit-to-primary-scan-select/push-limit-to-primary-scan-select.11.adm
@@@ -1,0 -1,52 +1,52 @@@
 -distribute result [$$128]
++distribute result [$$202]
+ -- DISTRIBUTE_RESULT  |LOCAL|
+   exchange
+   -- ONE_TO_ONE_EXCHANGE  |LOCAL|
 -    aggregate [$$128] <- [agg-sql-sum($$129)]
++    aggregate [$$202] <- [agg-sql-sum($$235)]
+     -- AGGREGATE  |LOCAL|
 -      aggregate [$$129] <- [agg-sql-count(1)]
++      aggregate [$$235] <- [agg-sql-count(1)]
+       -- AGGREGATE  |LOCAL|
+         exchange
+         -- ONE_TO_ONE_EXCHANGE  |UNPARTITIONED|
+           union
+           -- UNION_ALL  |UNPARTITIONED|
+             exchange
+             -- ONE_TO_ONE_EXCHANGE  |UNPARTITIONED|
+               limit 1000
+               -- STREAM_LIMIT  |UNPARTITIONED|
+                 project ([])
+                 -- STREAM_PROJECT  |PARTITIONED|
+                   exchange
 -                  -- SORT_MERGE_EXCHANGE [$$126(ASC) ]  |PARTITIONED|
++                  -- SORT_MERGE_EXCHANGE [$$134(ASC) ]  |PARTITIONED|
+                     limit 1000
+                     -- STREAM_LIMIT  |PARTITIONED|
 -                      project ([$$126])
++                      project ([$$134])
+                       -- STREAM_PROJECT  |PARTITIONED|
+                         exchange
+                         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
 -                          data-scan []<-[$$126, $$onek1] <- test.onek1 condition (and(ge($$onek1.getField(2), 1), le($$onek1.getField(2), 10))) limit 1000
++                          data-scan []<-[$$134, $$onek1] <- test.onek1 condition (and(ge($$onek1.getField(2), 1), le($$onek1.getField(2), 10))) limit 1000
+                           -- DATASOURCE_SCAN  |PARTITIONED|
+                             exchange
+                             -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                               empty-tuple-source
+                               -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+             exchange
+             -- ONE_TO_ONE_EXCHANGE  |UNPARTITIONED|
+               limit 1000
+               -- STREAM_LIMIT  |UNPARTITIONED|
+                 project ([])
+                 -- STREAM_PROJECT  |PARTITIONED|
+                   exchange
 -                  -- SORT_MERGE_EXCHANGE [$$127(ASC) ]  |PARTITIONED|
++                  -- SORT_MERGE_EXCHANGE [$$135(ASC) ]  |PARTITIONED|
+                     limit 1000
+                     -- STREAM_LIMIT  |PARTITIONED|
 -                      project ([$$127])
++                      project ([$$135])
+                       -- STREAM_PROJECT  |PARTITIONED|
+                         exchange
+                         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
 -                          data-scan []<-[$$127, $$onek1] <- test.onek1 condition (and(ge($$onek1.getField(2), -10), le($$onek1.getField(2), -1))) limit 1000
++                          data-scan []<-[$$135, $$onek1] <- test.onek1 condition (and(ge($$onek1.getField(2), -10), le($$onek1.getField(2), -1))) limit 1000
+                           -- DATASOURCE_SCAN  |PARTITIONED|
+                             exchange
+                             -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                               empty-tuple-source
+                               -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/results/limit/push-limit-to-primary-scan/push-limit-to-primary-scan.7.adm
index 0000000,f6de6e3..f830e9b
mode 000000,100644..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/limit/push-limit-to-primary-scan/push-limit-to-primary-scan.7.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/limit/push-limit-to-primary-scan/push-limit-to-primary-scan.7.adm
@@@ -1,0 -1,52 +1,52 @@@
 -distribute result [$$114]
++distribute result [$$180]
+ -- DISTRIBUTE_RESULT  |LOCAL|
+   exchange
+   -- ONE_TO_ONE_EXCHANGE  |LOCAL|
 -    aggregate [$$114] <- [agg-sql-sum($$115)]
++    aggregate [$$180] <- [agg-sql-sum($$209)]
+     -- AGGREGATE  |LOCAL|
 -      aggregate [$$115] <- [agg-sql-count(1)]
++      aggregate [$$209] <- [agg-sql-count(1)]
+       -- AGGREGATE  |LOCAL|
+         exchange
+         -- ONE_TO_ONE_EXCHANGE  |UNPARTITIONED|
+           union
+           -- UNION_ALL  |UNPARTITIONED|
+             exchange
+             -- ONE_TO_ONE_EXCHANGE  |UNPARTITIONED|
+               limit 100
+               -- STREAM_LIMIT  |UNPARTITIONED|
+                 project ([])
+                 -- STREAM_PROJECT  |PARTITIONED|
+                   exchange
 -                  -- SORT_MERGE_EXCHANGE [$$112(ASC) ]  |PARTITIONED|
++                  -- SORT_MERGE_EXCHANGE [$$120(ASC) ]  |PARTITIONED|
+                     limit 100
+                     -- STREAM_LIMIT  |PARTITIONED|
 -                      project ([$$112])
++                      project ([$$120])
+                       -- STREAM_PROJECT  |PARTITIONED|
+                         exchange
+                         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
 -                          data-scan []<-[$$112, $$onek1] <- test.onek1 limit 100
++                          data-scan []<-[$$120, $$onek1] <- test.onek1 limit 100
+                           -- DATASOURCE_SCAN  |PARTITIONED|
+                             exchange
+                             -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                               empty-tuple-source
+                               -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+             exchange
+             -- ONE_TO_ONE_EXCHANGE  |UNPARTITIONED|
+               limit 10
+               -- STREAM_LIMIT  |UNPARTITIONED|
+                 project ([])
+                 -- STREAM_PROJECT  |PARTITIONED|
+                   exchange
 -                  -- SORT_MERGE_EXCHANGE [$$113(ASC) ]  |PARTITIONED|
++                  -- SORT_MERGE_EXCHANGE [$$121(ASC) ]  |PARTITIONED|
+                     limit 10
+                     -- STREAM_LIMIT  |PARTITIONED|
 -                      project ([$$113])
++                      project ([$$121])
+                       -- STREAM_PROJECT  |PARTITIONED|
+                         exchange
+                         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
 -                          data-scan []<-[$$113, $$onek1] <- test.onek1 limit 10
++                          data-scan []<-[$$121, $$onek1] <- test.onek1 limit 10
+                           -- DATASOURCE_SCAN  |PARTITIONED|
+                             exchange
+                             -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                               empty-tuple-source
+                               -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
index 883a9e3,ca9e269..246f2a3
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
@@@ -4015,11 -3915,19 +4015,24 @@@
          <output-dir compare="Text">dataset-and-index-same-dataverse</output-dir>
        </compilation-unit>
      </test-case>
+     <test-case FilePath="ddl" check-warnings="true">
+       <compilation-unit name="drop_dataset_invalid_dataverse">
+         <output-dir compare="Text">drop_dataset_invalid_dataverse</output-dir>
+         <expected-error>ASX1063: Cannot find dataverse with name fakeDataverse (in line 22, at column 1)</expected-error>
+         <expected-error>ASX1063: Cannot find dataverse with name fakeDataverse (in line 22, at column 1)</expected-error>
+         <expected-error>ASX1063: Cannot find dataverse with name fakeDataverse (in line 22, at column 1)</expected-error>
+         <expected-error>ASX1063: Cannot find dataverse with name fakeDataverse (in line 22, at column 1)</expected-error>
+         <expected-error>ASX1050: Cannot find dataset with name fakeDataset1 in dataverse realDataverse (in line 22, at column 1)</expected-error>
+         <expected-warn>ASX1063: Cannot find dataverse with name fakeDataverse (in line 22, at column 1)</expected-warn>
+         <expected-warn>ASX1063: Cannot find dataverse with name fakeDataverse (in line 22, at column 1)</expected-warn>
+         <expected-warn>ASX1063: Cannot find dataverse with name fakeDataverse (in line 22, at column 1)</expected-warn>
+       </compilation-unit>
+     </test-case>
 +    <test-case FilePath="ddl">
 +      <compilation-unit name="create_dataset_with_filter_on_meta">
 +        <output-dir compare="Text">create_dataset_with_filter_on_meta</output-dir>
 +      </compilation-unit>
 +    </test-case>
    </test-group>
    <test-group name="dml">
      <test-case FilePath="dml">
@@@ -12729,10 -11816,10 +12742,15 @@@
        </compilation-unit>
      </test-case>
      <test-case FilePath="leftouterjoin">
+       <compilation-unit name="empty-dataset">
+         <output-dir compare="Text">empty-dataset</output-dir>
+       </compilation-unit>
+     </test-case>
++    <test-case FilePath="leftouterjoin">
 +      <compilation-unit name="query-ASTERIXDB-769">
 +        <output-dir compare="Text">query-ASTERIXDB-769</output-dir>
 +      </compilation-unit>
 +    </test-case>
    </test-group>
    <test-group name="index-leftouterjoin">
      <test-case FilePath="index-leftouterjoin">