You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by dh...@apache.org on 2016/09/02 00:02:22 UTC

[2/2] incubator-beam git commit: Cloud Datastore naming clean-up

Cloud Datastore naming clean-up


Project: http://git-wip-us.apache.org/repos/asf/incubator-beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-beam/commit/d0a4a0d6
Tree: http://git-wip-us.apache.org/repos/asf/incubator-beam/tree/d0a4a0d6
Diff: http://git-wip-us.apache.org/repos/asf/incubator-beam/diff/d0a4a0d6

Branch: refs/heads/master
Commit: d0a4a0d6bdea846b3947e539592a4c63af1c66eb
Parents: e17d331
Author: Vikas Kedigehalli <vi...@google.com>
Authored: Thu Sep 1 13:31:35 2016 -0700
Committer: Dan Halperin <dh...@google.com>
Committed: Thu Sep 1 17:02:16 2016 -0700

----------------------------------------------------------------------
 .../beam/examples/complete/AutoComplete.java    | 12 ++---
 .../examples/cookbook/DatastoreWordCount.java   | 18 +++----
 .../beam/sdk/io/gcp/datastore/DatastoreIO.java  |  6 +--
 .../beam/sdk/io/gcp/datastore/DatastoreV1.java  | 52 ++++++++++----------
 .../beam/sdk/io/gcp/datastore/package-info.java |  2 +-
 .../sdk/io/gcp/datastore/DatastoreV1Test.java   |  6 +--
 6 files changed, 48 insertions(+), 48 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/d0a4a0d6/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java b/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java
index 56c7855..2182e6d 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/complete/AutoComplete.java
@@ -96,7 +96,7 @@ import org.joda.time.Duration;
  *   --streaming
  * }</pre>
  *
- * <p>This will update the datastore every 10 seconds based on the last
+ * <p>This will update the Cloud Datastore every 10 seconds based on the last
  * 30 minutes of data received.
  */
 public class AutoComplete {
@@ -380,7 +380,7 @@ public class AutoComplete {
 
   /**
    * Takes as input a the top candidates per prefix, and emits an entity
-   * suitable for writing to Datastore.
+   * suitable for writing to Cloud Datastore.
    *
    * <p>Note: We use ancestor keys for strong consistency. See the Cloud Datastore documentation on
    * <a href="https://cloud.google.com/datastore/docs/concepts/structuring_for_strong_consistency">
@@ -431,7 +431,7 @@ public class AutoComplete {
     Boolean getRecursive();
     void setRecursive(Boolean value);
 
-    @Description("Datastore entity kind")
+    @Description("Cloud Datastore entity kind")
     @Default.String("autocomplete-demo")
     String getKind();
     void setKind(String value);
@@ -441,17 +441,17 @@ public class AutoComplete {
     Boolean getOutputToBigQuery();
     void setOutputToBigQuery(Boolean value);
 
-    @Description("Whether output to Datastore")
+    @Description("Whether output to Cloud Datastore")
     @Default.Boolean(false)
     Boolean getOutputToDatastore();
     void setOutputToDatastore(Boolean value);
 
-    @Description("Datastore ancestor key")
+    @Description("Cloud Datastore ancestor key")
     @Default.String("root")
     String getDatastoreAncestorKey();
     void setDatastoreAncestorKey(String value);
 
-    @Description("Datastore output project ID, defaults to project ID")
+    @Description("Cloud Datastore output project ID, defaults to project ID")
     String getOutputProject();
     void setOutputProject(String value);
   }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/d0a4a0d6/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java
----------------------------------------------------------------------
diff --git a/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java b/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java
index eb2165f..434e9fb 100644
--- a/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java
+++ b/examples/java/src/main/java/org/apache/beam/examples/cookbook/DatastoreWordCount.java
@@ -47,11 +47,11 @@ import org.apache.beam.sdk.transforms.ParDo;
 /**
  * A WordCount example using DatastoreIO.
  *
- * <p>This example shows how to use DatastoreIO to read from Datastore and
+ * <p>This example shows how to use DatastoreIO to read from Cloud Datastore and
  * write the results to Cloud Storage.  Note that this example will write
- * data to Datastore, which may incur charge for Datastore operations.
+ * data to Cloud Datastore, which may incur charge for Cloud Datastore operations.
  *
- * <p>To run this example, users need to use gcloud to get credential for Datastore:
+ * <p>To run this example, users need to use gcloud to get credential for Cloud Datastore:
  * <pre>{@code
  * $ gcloud auth login
  * }</pre>
@@ -150,7 +150,7 @@ public class DatastoreWordCount {
    * <p>Inherits standard configuration options.
    */
   public static interface Options extends PipelineOptions {
-    @Description("Path of the file to read from and store to Datastore")
+    @Description("Path of the file to read from and store to Cloud Datastore")
     @Default.String("gs://apache-beam-samples/shakespeare/kinglear.txt")
     String getInput();
     void setInput(String value);
@@ -160,17 +160,17 @@ public class DatastoreWordCount {
     String getOutput();
     void setOutput(String value);
 
-    @Description("Project ID to read from datastore")
+    @Description("Project ID to read from Cloud Datastore")
     @Validation.Required
     String getProject();
     void setProject(String value);
 
-    @Description("Datastore Entity kind")
+    @Description("Cloud Datastore Entity kind")
     @Default.String("shakespeare-demo")
     String getKind();
     void setKind(String value);
 
-    @Description("Datastore Namespace")
+    @Description("Cloud Datastore Namespace")
     String getNamespace();
     void setNamespace(@Nullable String value);
 
@@ -186,7 +186,7 @@ public class DatastoreWordCount {
 
   /**
    * An example that creates a pipeline to populate DatastoreIO from a
-   * text input.  Forces use of DirectRunner for local execution mode.
+   * text input. Forces use of DirectRunner for local execution mode.
    */
   public static void writeDataToDatastore(Options options) {
       Pipeline p = Pipeline.create(options);
@@ -217,7 +217,7 @@ public class DatastoreWordCount {
   }
 
   /**
-   * An example that creates a pipeline to do DatastoreIO.Read from Datastore.
+   * An example that creates a pipeline to do DatastoreIO.Read from Cloud Datastore.
    */
   public static void readDataFromDatastore(Options options) {
     Query query = makeAncestorKindQuery(options);

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/d0a4a0d6/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreIO.java
index 5abf015..c50c23a 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreIO.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreIO.java
@@ -22,7 +22,7 @@ import org.apache.beam.sdk.annotations.Experimental;
 /**
  * <p>{@link DatastoreIO} provides an API for reading from and writing to
  * <a href="https://developers.google.com/datastore/">Google Cloud Datastore</a> over different
- * versions of the Datastore Client libraries.
+ * versions of the Cloud Datastore Client libraries.
  *
  * <p>To use the v1 version see {@link DatastoreV1}.
  */
@@ -32,8 +32,8 @@ public class DatastoreIO {
   private DatastoreIO() {}
 
   /**
-   * Returns a {@link DatastoreV1} that provides an API for accessing Datastore through v1 version
-   * of Datastore Client library.
+   * Returns a {@link DatastoreV1} that provides an API for accessing Cloud Datastore through v1
+   * version of Datastore Client library.
    */
   public static DatastoreV1 v1() {
     return new DatastoreV1();

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/d0a4a0d6/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1.java
index e24bc80..6bd03b5 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1.java
@@ -91,13 +91,13 @@ import org.slf4j.LoggerFactory;
  * {@link Entity} objects.
  *
  * <p>This API currently requires an authentication workaround. To use {@link DatastoreV1}, users
- * must use the {@code gcloud} command line tool to get credentials for Datastore:
+ * must use the {@code gcloud} command line tool to get credentials for Cloud Datastore:
  * <pre>
  * $ gcloud auth login
  * </pre>
  *
- * <p>To read a {@link PCollection} from a query to Datastore, use {@link DatastoreV1#read} and
- * its methods {@link DatastoreV1.Read#withProjectId} and {@link DatastoreV1.Read#withQuery} to
+ * <p>To read a {@link PCollection} from a query to Cloud Datastore, use {@link DatastoreV1#read}
+ * and its methods {@link DatastoreV1.Read#withProjectId} and {@link DatastoreV1.Read#withQuery} to
  * specify the project to query and the query to read from. You can optionally provide a namespace
  * to query within using {@link DatastoreV1.Read#withNamespace}. You could also optionally specify
  * how many splits you want for the query using {@link DatastoreV1.Read#withNumQuerySplits}.
@@ -122,7 +122,7 @@ import org.slf4j.LoggerFactory;
  * {@link com.google.datastore.v1.Query.Builder#setLimit(Int32Value)}, then
  * all returned results will be read by a single Dataflow worker in order to ensure correct data.
  *
- * <p>To write a {@link PCollection} to a Datastore, use {@link DatastoreV1#write},
+ * <p>To write a {@link PCollection} to a Cloud Datastore, use {@link DatastoreV1#write},
  * specifying the Cloud Datastore project to write to:
  *
  * <pre> {@code
@@ -131,7 +131,7 @@ import org.slf4j.LoggerFactory;
  * p.run();
  * } </pre>
  *
- * <p>To delete a {@link PCollection} of {@link Entity Entities} from Datastore, use
+ * <p>To delete a {@link PCollection} of {@link Entity Entities} from Cloud Datastore, use
  * {@link DatastoreV1#deleteEntity()}, specifying the Cloud Datastore project to write to:
  *
  * <pre> {@code
@@ -140,8 +140,8 @@ import org.slf4j.LoggerFactory;
  * p.run();
  * } </pre>
  *
- * <p>To delete entities associated with a {@link PCollection} of {@link Key Keys} from Datastore,
- * use {@link DatastoreV1#deleteKey}, specifying the Cloud Datastore project to write to:
+ * <p>To delete entities associated with a {@link PCollection} of {@link Key Keys} from Cloud
+ * Datastore, use {@link DatastoreV1#deleteKey}, specifying the Cloud Datastore project to write to:
  *
  * <pre> {@code
  * PCollection<Entity> entities = ...;
@@ -169,7 +169,7 @@ import org.slf4j.LoggerFactory;
  * more details.
  *
  * <p>Please see <a href="https://cloud.google.com/datastore/docs/activate">Cloud Datastore Sign Up
- * </a>for security and permission related information specific to Datastore.
+ * </a>for security and permission related information specific to Cloud Datastore.
  *
  * @see org.apache.beam.sdk.runners.PipelineRunner
  */
@@ -180,7 +180,7 @@ public class DatastoreV1 {
   DatastoreV1() {}
 
   /**
-   * Datastore has a limit of 500 mutations per batch operation, so we flush
+   * Cloud Datastore has a limit of 500 mutations per batch operation, so we flush
    * changes to Datastore every 500 entities.
    */
   @VisibleForTesting
@@ -197,7 +197,7 @@ public class DatastoreV1 {
   }
 
   /**
-   * A {@link PTransform} that reads the result rows of a Datastore query as {@code Entity}
+   * A {@link PTransform} that reads the result rows of a Cloud Datastore query as {@code Entity}
    * objects.
    *
    * @see DatastoreIO
@@ -234,7 +234,7 @@ public class DatastoreV1 {
 
     /**
      * Computes the number of splits to be performed on the given query by querying the estimated
-     * size from Datastore.
+     * size from Cloud Datastore.
      */
     static int getEstimatedNumSplits(Datastore datastore, Query query, @Nullable String namespace) {
       int numSplits;
@@ -252,7 +252,7 @@ public class DatastoreV1 {
     }
 
     /**
-     * Datastore system tables with statistics are periodically updated. This method fetches
+     * Cloud Datastore system tables with statistics are periodically updated. This method fetches
      * the latest timestamp (in microseconds) of statistics update using the {@code __Stat_Total__}
      * table.
      */
@@ -281,7 +281,7 @@ public class DatastoreV1 {
     /**
      * Get the estimated size of the data returned by the given query.
      *
-     * <p>Datastore provides no way to get a good estimate of how large the result of a query
+     * <p>Cloud Datastore provides no way to get a good estimate of how large the result of a query
      * entity kind being queried, using the __Stat_Kind__ system table, assuming exactly 1 kind
      * is specified in the query.
      *
@@ -357,7 +357,7 @@ public class DatastoreV1 {
     }
 
     /**
-     * Returns a new {@link DatastoreV1.Read} that reads from the Datastore for the specified
+     * Returns a new {@link DatastoreV1.Read} that reads from the Cloud Datastore for the specified
      * project.
      */
     public DatastoreV1.Read withProjectId(String projectId) {
@@ -400,7 +400,7 @@ public class DatastoreV1 {
      *   <li>If the {@code query} has a user limit set, then {@code numQuerySplits} will be
      *   ignored and no split will be performed.
      *   <li>Under certain cases Cloud Datastore is unable to split query to the requested number of
-     *   splits. In such cases we just use whatever the Datastore returns.
+     *   splits. In such cases we just use whatever the Cloud Datastore returns.
      * </ul>
      */
     public DatastoreV1.Read withNumQuerySplits(int numQuerySplits) {
@@ -492,7 +492,7 @@ public class DatastoreV1 {
     }
 
     /**
-     * A class for v1 Datastore related options.
+     * A class for v1 Cloud Datastore related options.
      */
     @VisibleForTesting
     static class V1Options implements Serializable {
@@ -608,7 +608,7 @@ public class DatastoreV1 {
     }
 
     /**
-     * A {@link DoFn} that reads entities from Datastore for each query.
+     * A {@link DoFn} that reads entities from Cloud Datastore for each query.
      */
     @VisibleForTesting
     static class ReadFn extends DoFn<Query, Entity> {
@@ -908,8 +908,8 @@ public class DatastoreV1 {
      *
      * <p>If a commit fails, it will be retried (up to {@link DatastoreWriterFn#MAX_RETRIES}
      * times). All mutations in the batch will be committed again, even if the commit was partially
-     * successful. If the retry limit is exceeded, the last exception from the Datastore will be
-     * thrown.
+     * successful. If the retry limit is exceeded, the last exception from the Cloud Datastore will
+     * be thrown.
      *
      * @throws DatastoreException if the commit fails or IOException or InterruptedException if
      * backing off between retries fails.
@@ -953,7 +953,7 @@ public class DatastoreV1 {
   }
 
   /**
-   * Returns true if a Datastore key is complete. A key is complete if its last element
+   * Returns true if a Cloud Datastore key is complete. A key is complete if its last element
    * has either an id or a name.
    */
   static boolean isValidKey(Key key) {
@@ -974,7 +974,7 @@ public class DatastoreV1 {
     public Mutation apply(Entity entity) {
       // Verify that the entity to write has a complete key.
       checkArgument(isValidKey(entity.getKey()),
-          "Entities to be written to the Datastore must have complete keys:\n%s", entity);
+          "Entities to be written to the Cloud Datastore must have complete keys:\n%s", entity);
 
       return makeUpsert(entity).build();
     }
@@ -995,7 +995,7 @@ public class DatastoreV1 {
     public Mutation apply(Entity entity) {
       // Verify that the entity to delete has a complete key.
       checkArgument(isValidKey(entity.getKey()),
-          "Entities to be deleted from the Datastore must have complete keys:\n%s", entity);
+          "Entities to be deleted from the Cloud Datastore must have complete keys:\n%s", entity);
 
       return makeDelete(entity.getKey()).build();
     }
@@ -1016,7 +1016,7 @@ public class DatastoreV1 {
     public Mutation apply(Key key) {
       // Verify that the entity to delete has a complete key.
       checkArgument(isValidKey(key),
-          "Keys to be deleted from the Datastore must be complete:\n%s", key);
+          "Keys to be deleted from the Cloud Datastore must be complete:\n%s", key);
 
       return makeDelete(key).build();
     }
@@ -1029,7 +1029,7 @@ public class DatastoreV1 {
   }
 
   /**
-   * A wrapper factory class for Datastore singleton classes {@link DatastoreFactory} and
+   * A wrapper factory class for Cloud Datastore singleton classes {@link DatastoreFactory} and
    * {@link QuerySplitter}
    *
    * <p>{@link DatastoreFactory} and {@link QuerySplitter} are not java serializable, hence
@@ -1038,7 +1038,7 @@ public class DatastoreV1 {
   @VisibleForTesting
   static class V1DatastoreFactory implements Serializable {
 
-    /** Builds a Datastore client for the given pipeline options and project. */
+    /** Builds a Cloud Datastore client for the given pipeline options and project. */
     public Datastore getDatastore(PipelineOptions pipelineOptions, String projectId) {
       DatastoreOptions.Builder builder =
           new DatastoreOptions.Builder()
@@ -1055,7 +1055,7 @@ public class DatastoreV1 {
       return DatastoreFactory.get().create(builder.build());
     }
 
-    /** Builds a Datastore {@link QuerySplitter}. */
+    /** Builds a Cloud Datastore {@link QuerySplitter}. */
     public QuerySplitter getQuerySplitter() {
       return DatastoreHelper.getQuerySplitter();
     }

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/d0a4a0d6/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/package-info.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/package-info.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/package-info.java
index 1ca0266..4e5fbc3 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/package-info.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/datastore/package-info.java
@@ -19,6 +19,6 @@
 /**
  * <p>Provides an API for reading from and writing to
  * <a href="https://developers.google.com/datastore/">Google Cloud Datastore</a> over different
- * versions of the Datastore Client libraries.
+ * versions of the Cloud Datastore Client libraries.
  */
 package org.apache.beam.sdk.io.gcp.datastore;

http://git-wip-us.apache.org/repos/asf/incubator-beam/blob/d0a4a0d6/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1Test.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1Test.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1Test.java
index d96c320..dd1904a 100644
--- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1Test.java
+++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1Test.java
@@ -430,7 +430,7 @@ public class DatastoreV1Test {
     UpsertFn upsertFn = new UpsertFn();
 
     thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Entities to be written to the Datastore must have complete keys");
+    thrown.expectMessage("Entities to be written to the Cloud Datastore must have complete keys");
 
     upsertFn.apply(entity);
   }
@@ -458,7 +458,7 @@ public class DatastoreV1Test {
     DeleteEntityFn deleteEntityFn = new DeleteEntityFn();
 
     thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Entities to be deleted from the Datastore must have complete keys");
+    thrown.expectMessage("Entities to be deleted from the Cloud Datastore must have complete keys");
 
     deleteEntityFn.apply(entity);
   }
@@ -485,7 +485,7 @@ public class DatastoreV1Test {
     DeleteKeyFn deleteKeyFn = new DeleteKeyFn();
 
     thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage("Keys to be deleted from the Datastore must be complete");
+    thrown.expectMessage("Keys to be deleted from the Cloud Datastore must be complete");
 
     deleteKeyFn.apply(key);
   }