You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by jb...@apache.org on 2017/04/12 19:53:23 UTC
[1/2] beam git commit: [BEAM-911] Mark IO APIs as @Experimental
Repository: beam
Updated Branches:
refs/heads/master a405a2c46 -> 1788cef96
[BEAM-911] Mark IO APIs as @Experimental
Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/024b6e05
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/024b6e05
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/024b6e05
Branch: refs/heads/master
Commit: 024b6e05a99b12142a2b10c0612288c6177eff96
Parents: a405a2c
Author: Isma�l Mej�a <ie...@apache.org>
Authored: Wed Apr 12 17:12:05 2017 +0200
Committer: Jean-Baptiste Onofr� <jb...@apache.org>
Committed: Wed Apr 12 21:35:13 2017 +0200
----------------------------------------------------------------------
.../org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java | 2 ++
.../beam/sdk/io/hadoop/inputformat/HadoopInputFormatIO.java | 3 ++-
.../src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSink.java | 2 ++
.../src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSource.java | 2 ++
.../io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java | 2 ++
.../io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java | 2 ++
.../kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java | 2 ++
.../src/main/java/org/apache/beam/sdk/io/kinesis/KinesisIO.java | 2 ++
.../main/java/org/apache/beam/sdk/io/mongodb/MongoDbGridFSIO.java | 2 ++
.../src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java | 2 ++
.../io/mqtt/src/main/java/org/apache/beam/sdk/io/mqtt/MqttIO.java | 2 ++
11 files changed, 22 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/beam/blob/024b6e05/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java b/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java
index 54692b9..baf0cc2 100644
--- a/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java
+++ b/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java
@@ -41,6 +41,7 @@ import java.util.Map;
import java.util.NoSuchElementException;
import javax.annotation.Nullable;
+import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.StringUtf8Coder;
import org.apache.beam.sdk.io.BoundedSource;
@@ -114,6 +115,7 @@ import org.elasticsearch.client.RestClientBuilder;
* <p>Optionally, you can provide {@code withBatchSize()} and {@code withBatchSizeBytes()}
* to specify the size of the write batch in number of documents or in bytes.
*/
+@Experimental
public class ElasticsearchIO {
public static Read read() {
http://git-wip-us.apache.org/repos/asf/beam/blob/024b6e05/sdks/java/io/hadoop/input-format/src/main/java/org/apache/beam/sdk/io/hadoop/inputformat/HadoopInputFormatIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/hadoop/input-format/src/main/java/org/apache/beam/sdk/io/hadoop/inputformat/HadoopInputFormatIO.java b/sdks/java/io/hadoop/input-format/src/main/java/org/apache/beam/sdk/io/hadoop/inputformat/HadoopInputFormatIO.java
index 61dc1bf..d776ea0 100644
--- a/sdks/java/io/hadoop/input-format/src/main/java/org/apache/beam/sdk/io/hadoop/inputformat/HadoopInputFormatIO.java
+++ b/sdks/java/io/hadoop/input-format/src/main/java/org/apache/beam/sdk/io/hadoop/inputformat/HadoopInputFormatIO.java
@@ -42,6 +42,7 @@ import java.util.concurrent.atomic.AtomicLong;
import javax.annotation.Nullable;
+import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.coders.CannotProvideCoderException;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.CoderException;
@@ -170,7 +171,7 @@ import org.slf4j.LoggerFactory;
* }
* </pre>
*/
-
+@Experimental
public class HadoopInputFormatIO {
private static final Logger LOG = LoggerFactory.getLogger(HadoopInputFormatIO.class);
http://git-wip-us.apache.org/repos/asf/beam/blob/024b6e05/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSink.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSink.java b/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSink.java
index 10ff788..15d61cb 100644
--- a/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSink.java
+++ b/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSink.java
@@ -33,6 +33,7 @@ import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.mapred.AvroKey;
import org.apache.avro.mapreduce.AvroKeyOutputFormat;
+import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.coders.AvroCoder;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.StringUtf8Coder;
@@ -92,6 +93,7 @@ import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
* @param <V> the type of values to be written to the sink via {@link FileOutputFormat}.
*/
@AutoValue
+@Experimental
public abstract class HDFSFileSink<T, K, V> extends Sink<T> {
private static final JobID jobId = new JobID(
http://git-wip-us.apache.org/repos/asf/beam/blob/024b6e05/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSource.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSource.java b/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSource.java
index e317c6e..b55944b 100644
--- a/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSource.java
+++ b/sdks/java/io/hdfs/src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSource.java
@@ -39,6 +39,7 @@ import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.mapred.AvroKey;
import org.apache.avro.mapreduce.AvroKeyInputFormat;
+import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.coders.AvroCoder;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.CoderException;
@@ -104,6 +105,7 @@ import org.slf4j.LoggerFactory;
* @param <V> the type of values to be read from the source via {@link FileInputFormat}.
*/
@AutoValue
+@Experimental
public abstract class HDFSFileSource<T, K, V> extends BoundedSource<T> {
private static final long serialVersionUID = 0L;
http://git-wip-us.apache.org/repos/asf/beam/blob/024b6e05/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java
index dc31130..8fdbeb0 100644
--- a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java
+++ b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java
@@ -31,6 +31,7 @@ import java.util.Random;
import javax.annotation.Nullable;
import javax.sql.DataSource;
+import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.transforms.Create;
import org.apache.beam.sdk.transforms.DoFn;
@@ -135,6 +136,7 @@ import org.apache.commons.dbcp2.BasicDataSource;
* Consider using <a href="https://en.wikipedia.org/wiki/Merge_(SQL)">MERGE ("upsert")
* statements</a> supported by your database instead.
*/
+@Experimental
public class JdbcIO {
/**
* Read data from a JDBC datasource.
http://git-wip-us.apache.org/repos/asf/beam/blob/024b6e05/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java b/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
index a935b56..89016ac 100644
--- a/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
+++ b/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
@@ -37,6 +37,7 @@ import javax.jms.MessageConsumer;
import javax.jms.MessageProducer;
import javax.jms.Session;
import javax.jms.TextMessage;
+import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.coders.AvroCoder;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.SerializableCoder;
@@ -97,6 +98,7 @@ import org.slf4j.LoggerFactory;
*
* }</pre>
*/
+@Experimental
public class JmsIO {
private static final Logger LOG = LoggerFactory.getLogger(JmsIO.class);
http://git-wip-us.apache.org/repos/asf/beam/blob/024b6e05/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
index 80b40be..69d82bc 100644
--- a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
+++ b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
@@ -52,6 +52,7 @@ import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.annotation.Nullable;
+import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.coders.AtomicCoder;
import org.apache.beam.sdk.coders.AvroCoder;
import org.apache.beam.sdk.coders.ByteArrayCoder;
@@ -211,6 +212,7 @@ import org.slf4j.LoggerFactory;
* Note that {@link KafkaRecord#getTimestamp()} reflects timestamp provided by Kafka if any,
* otherwise it is set to processing time.
*/
+@Experimental
public class KafkaIO {
/**
* Creates an uninitialized {@link Read} {@link PTransform}. Before use, basic Kafka
http://git-wip-us.apache.org/repos/asf/beam/blob/024b6e05/sdks/java/io/kinesis/src/main/java/org/apache/beam/sdk/io/kinesis/KinesisIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/kinesis/src/main/java/org/apache/beam/sdk/io/kinesis/KinesisIO.java b/sdks/java/io/kinesis/src/main/java/org/apache/beam/sdk/io/kinesis/KinesisIO.java
index 91939d4..45a7b2d 100644
--- a/sdks/java/io/kinesis/src/main/java/org/apache/beam/sdk/io/kinesis/KinesisIO.java
+++ b/sdks/java/io/kinesis/src/main/java/org/apache/beam/sdk/io/kinesis/KinesisIO.java
@@ -27,6 +27,7 @@ import com.amazonaws.regions.Regions;
import com.amazonaws.services.kinesis.AmazonKinesis;
import com.amazonaws.services.kinesis.AmazonKinesisClient;
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.InitialPositionInStream;
+import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.transforms.PTransform;
import org.joda.time.Instant;
@@ -99,6 +100,7 @@ import org.joda.time.Instant;
* }</pre>
*
*/
+@Experimental
public final class KinesisIO {
/**
* A {@link PTransform} that reads from a Kinesis stream.
http://git-wip-us.apache.org/repos/asf/beam/blob/024b6e05/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbGridFSIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbGridFSIO.java b/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbGridFSIO.java
index d924c14..919fda3 100644
--- a/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbGridFSIO.java
+++ b/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbGridFSIO.java
@@ -43,6 +43,7 @@ import java.util.NoSuchElementException;
import javax.annotation.Nullable;
+import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.SerializableCoder;
import org.apache.beam.sdk.coders.StringUtf8Coder;
@@ -119,6 +120,7 @@ import org.joda.time.Instant;
* to the file separated with line feeds.
* </p>
*/
+@Experimental
public class MongoDbGridFSIO {
/**
http://git-wip-us.apache.org/repos/asf/beam/blob/024b6e05/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java b/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java
index bd262e7..09b8505 100644
--- a/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java
+++ b/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java
@@ -30,6 +30,7 @@ import com.mongodb.client.MongoDatabase;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Nullable;
+import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.SerializableCoder;
import org.apache.beam.sdk.io.BoundedSource;
@@ -91,6 +92,7 @@ import org.slf4j.LoggerFactory;
*
* }</pre>
*/
+@Experimental
public class MongoDbIO {
private static final Logger LOG = LoggerFactory.getLogger(MongoDbIO.class);
http://git-wip-us.apache.org/repos/asf/beam/blob/024b6e05/sdks/java/io/mqtt/src/main/java/org/apache/beam/sdk/io/mqtt/MqttIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/mqtt/src/main/java/org/apache/beam/sdk/io/mqtt/MqttIO.java b/sdks/java/io/mqtt/src/main/java/org/apache/beam/sdk/io/mqtt/MqttIO.java
index 46f2dcc..820b265 100644
--- a/sdks/java/io/mqtt/src/main/java/org/apache/beam/sdk/io/mqtt/MqttIO.java
+++ b/sdks/java/io/mqtt/src/main/java/org/apache/beam/sdk/io/mqtt/MqttIO.java
@@ -31,6 +31,7 @@ import java.util.UUID;
import javax.annotation.Nullable;
+import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.coders.ByteArrayCoder;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.SerializableCoder;
@@ -99,6 +100,7 @@ import org.slf4j.LoggerFactory;
*
* }</pre>
*/
+@Experimental
public class MqttIO {
private static final Logger LOG = LoggerFactory.getLogger(MqttIO.class);
[2/2] beam git commit: [BEAM-911] This closes #2510
Posted by jb...@apache.org.
[BEAM-911] This closes #2510
Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/1788cef9
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/1788cef9
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/1788cef9
Branch: refs/heads/master
Commit: 1788cef96916ebfb588848922a5cbc2584586e9b
Parents: a405a2c 024b6e0
Author: Jean-Baptiste Onofr� <jb...@apache.org>
Authored: Wed Apr 12 21:53:15 2017 +0200
Committer: Jean-Baptiste Onofr� <jb...@apache.org>
Committed: Wed Apr 12 21:53:15 2017 +0200
----------------------------------------------------------------------
.../org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java | 2 ++
.../beam/sdk/io/hadoop/inputformat/HadoopInputFormatIO.java | 3 ++-
.../src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSink.java | 2 ++
.../src/main/java/org/apache/beam/sdk/io/hdfs/HDFSFileSource.java | 2 ++
.../io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java | 2 ++
.../io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java | 2 ++
.../kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java | 2 ++
.../src/main/java/org/apache/beam/sdk/io/kinesis/KinesisIO.java | 2 ++
.../main/java/org/apache/beam/sdk/io/mongodb/MongoDbGridFSIO.java | 2 ++
.../src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java | 2 ++
.../io/mqtt/src/main/java/org/apache/beam/sdk/io/mqtt/MqttIO.java | 2 ++
11 files changed, 22 insertions(+), 1 deletion(-)
----------------------------------------------------------------------