You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by ch...@apache.org on 2019/03/05 18:12:30 UTC

[beam] branch master updated: Run BQ Storage tests in Dataflow in post-commit.

This is an automated email from the ASF dual-hosted git repository.

chamikara pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/beam.git


The following commit(s) were added to refs/heads/master by this push:
     new d4932ea  Run BQ Storage tests in Dataflow in post-commit.
     new cff1323  Merge pull request #7967: [BEAM-6755] Run BQ Storage tests in Dataflow in post-commit.
d4932ea is described below

commit d4932ea6757af7779c9fa29dd2d12ad615c54fca
Author: Kenneth Jung <km...@google.com>
AuthorDate: Thu Feb 28 12:53:05 2019 -0800

    Run BQ Storage tests in Dataflow in post-commit.
    
    This change modifies the test configuration for the BigQuery Storage API
    table source to run a 1GB table read test using the Dataflow runner as
    part of Beam post-commit.
---
 runners/google-cloud-dataflow-java/build.gradle    |  2 --
 .../io/gcp/bigquery/BigQueryIOStorageReadIT.java   | 22 ++--------------------
 2 files changed, 2 insertions(+), 22 deletions(-)

diff --git a/runners/google-cloud-dataflow-java/build.gradle b/runners/google-cloud-dataflow-java/build.gradle
index 66b32a9..3218e46 100644
--- a/runners/google-cloud-dataflow-java/build.gradle
+++ b/runners/google-cloud-dataflow-java/build.gradle
@@ -300,7 +300,6 @@ task googleCloudPlatformLegacyWorkerIntegrationTest(type: Test) {
 
   include '**/*IT.class'
   exclude '**/BigQueryIOReadIT.class'
-  exclude '**/BigQueryIOStorageReadIT.class'
   exclude '**/BigQueryIOStorageReadTableRowIT.class'
   exclude '**/PubsubReadIT.class'
   exclude '**/*KmsKeyIT.class'
@@ -343,7 +342,6 @@ task googleCloudPlatformFnApiWorkerIntegrationTest(type: Test) {
 
     include '**/*IT.class'
     exclude '**/BigQueryIOReadIT.class'
-    exclude '**/BigQueryIOStorageReadIT.class'
     exclude '**/BigQueryIOStorageReadTableRowIT.class'
     exclude '**/PubsubReadIT.class'
     exclude '**/SpannerReadIT.class'
diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageReadIT.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageReadIT.java
index 24e7bcd..d565cb5 100644
--- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageReadIT.java
+++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageReadIT.java
@@ -52,8 +52,8 @@ public class BigQueryIOStorageReadIT {
           "1G", 11110839L,
           "1T", 11110839000L);
 
-  private static final String DATASET_ID = "big_query_import_export";
-  private static final String TABLE_PREFIX = "parallel_read_";
+  private static final String DATASET_ID = "big_query_storage";
+  private static final String TABLE_PREFIX = "storage_read_";
 
   private BigQueryIOStorageReadOptions options;
 
@@ -94,26 +94,8 @@ public class BigQueryIOStorageReadIT {
   }
 
   @Test
-  public void testBigQueryStorageReadEmpty() throws Exception {
-    setUpTestEnvironment("empty");
-    runBigQueryIOStorageReadPipeline();
-  }
-
-  @Test
-  public void testBigQueryStorageRead1M() throws Exception {
-    setUpTestEnvironment("1M");
-    runBigQueryIOStorageReadPipeline();
-  }
-
-  @Test
   public void testBigQueryStorageRead1G() throws Exception {
     setUpTestEnvironment("1G");
     runBigQueryIOStorageReadPipeline();
   }
-
-  @Test
-  public void testBigqueryStorageRead1T() throws Exception {
-    setUpTestEnvironment("1T");
-    runBigQueryIOStorageReadPipeline();
-  }
 }