beam-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jbono...@apache.org
Subject [1/2] beam git commit: Add Experimental annotation to AMQP and refine Kind for the Experimental IOs
Date Wed, 28 Jun 2017 15:08:03 GMT
Repository: beam
Updated Branches:
  refs/heads/master 6573e2ea5 -> 23a34f5df


Add Experimental annotation to AMQP and refine Kind for the Experimental IOs


Project: http://git-wip-us.apache.org/repos/asf/beam/repo
Commit: http://git-wip-us.apache.org/repos/asf/beam/commit/2cb2161c
Tree: http://git-wip-us.apache.org/repos/asf/beam/tree/2cb2161c
Diff: http://git-wip-us.apache.org/repos/asf/beam/diff/2cb2161c

Branch: refs/heads/master
Commit: 2cb2161cec824a5ca5e719a92243029e712347c1
Parents: 6573e2e
Author: Ismaël Mejía <iemejia@gmail.com>
Authored: Wed Jun 28 10:31:25 2017 +0200
Committer: Ismaël Mejía <iemejia@gmail.com>
Committed: Wed Jun 28 10:31:25 2017 +0200

----------------------------------------------------------------------
 .../amqp/src/main/java/org/apache/beam/sdk/io/amqp/AmqpIO.java | 2 ++
 .../java/org/apache/beam/sdk/io/cassandra/CassandraIO.java     | 2 +-
 .../org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java  | 2 +-
 .../java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java   | 6 +++---
 .../java/org/apache/beam/sdk/io/gcp/spanner/SpannerIO.java     | 2 +-
 .../beam/sdk/io/hadoop/inputformat/HadoopInputFormatIO.java    | 2 +-
 .../src/main/java/org/apache/beam/sdk/io/hbase/HBaseIO.java    | 2 +-
 .../main/java/org/apache/beam/sdk/io/hcatalog/HCatalogIO.java  | 2 +-
 .../jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java | 2 +-
 .../io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java | 2 +-
 .../src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java    | 2 +-
 .../main/java/org/apache/beam/sdk/io/kinesis/KinesisIO.java    | 2 +-
 .../java/org/apache/beam/sdk/io/mongodb/MongoDbGridFSIO.java   | 2 +-
 .../main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java    | 2 +-
 .../mqtt/src/main/java/org/apache/beam/sdk/io/mqtt/MqttIO.java | 2 +-
 15 files changed, 18 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/beam/blob/2cb2161c/sdks/java/io/amqp/src/main/java/org/apache/beam/sdk/io/amqp/AmqpIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/amqp/src/main/java/org/apache/beam/sdk/io/amqp/AmqpIO.java b/sdks/java/io/amqp/src/main/java/org/apache/beam/sdk/io/amqp/AmqpIO.java
index b9a0be9..1f307b2 100644
--- a/sdks/java/io/amqp/src/main/java/org/apache/beam/sdk/io/amqp/AmqpIO.java
+++ b/sdks/java/io/amqp/src/main/java/org/apache/beam/sdk/io/amqp/AmqpIO.java
@@ -31,6 +31,7 @@ import java.util.NoSuchElementException;
 
 import javax.annotation.Nullable;
 
+import org.apache.beam.sdk.annotations.Experimental;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.SerializableCoder;
 import org.apache.beam.sdk.io.UnboundedSource;
@@ -94,6 +95,7 @@ import org.joda.time.Instant;
  *
  * }</pre>
  */
+@Experimental(Experimental.Kind.SOURCE_SINK)
 public class AmqpIO {
 
   public static Read read() {

http://git-wip-us.apache.org/repos/asf/beam/blob/2cb2161c/sdks/java/io/cassandra/src/main/java/org/apache/beam/sdk/io/cassandra/CassandraIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/cassandra/src/main/java/org/apache/beam/sdk/io/cassandra/CassandraIO.java
b/sdks/java/io/cassandra/src/main/java/org/apache/beam/sdk/io/cassandra/CassandraIO.java
index b6f4ef6..32905b7 100644
--- a/sdks/java/io/cassandra/src/main/java/org/apache/beam/sdk/io/cassandra/CassandraIO.java
+++ b/sdks/java/io/cassandra/src/main/java/org/apache/beam/sdk/io/cassandra/CassandraIO.java
@@ -82,7 +82,7 @@ import org.slf4j.LoggerFactory;
  *        .withEntity(Person.class));
  * }</pre>
  */
-@Experimental
+@Experimental(Experimental.Kind.SOURCE_SINK)
 public class CassandraIO {
 
   private static final Logger LOG = LoggerFactory.getLogger(CassandraIO.class);

http://git-wip-us.apache.org/repos/asf/beam/blob/2cb2161c/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java
b/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java
index fa67fe1..4d76887 100644
--- a/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java
+++ b/sdks/java/io/elasticsearch/src/main/java/org/apache/beam/sdk/io/elasticsearch/ElasticsearchIO.java
@@ -113,7 +113,7 @@ import org.elasticsearch.client.RestClientBuilder;
  * <p>Optionally, you can provide {@code withBatchSize()} and {@code withBatchSizeBytes()}
  * to specify the size of the write batch in number of documents or in bytes.
  */
-@Experimental
+@Experimental(Experimental.Kind.SOURCE_SINK)
 public class ElasticsearchIO {
 
   public static Read read() {

http://git-wip-us.apache.org/repos/asf/beam/blob/2cb2161c/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java
index 62679bb..0a90dde 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java
@@ -175,7 +175,7 @@ import org.slf4j.LoggerFactory;
  * pipeline. Please refer to the documentation of corresponding
  * {@link PipelineRunner PipelineRunners} for more details.
  */
-@Experimental
+@Experimental(Experimental.Kind.SOURCE_SINK)
 public class BigtableIO {
   private static final Logger LOG = LoggerFactory.getLogger(BigtableIO.class);
 
@@ -211,7 +211,7 @@ public class BigtableIO {
    *
    * @see BigtableIO
    */
-  @Experimental
+  @Experimental(Experimental.Kind.SOURCE_SINK)
   @AutoValue
   public abstract static class Read extends PTransform<PBegin, PCollection<Row>>
{
 
@@ -415,7 +415,7 @@ public class BigtableIO {
    *
    * @see BigtableIO
    */
-  @Experimental
+  @Experimental(Experimental.Kind.SOURCE_SINK)
   @AutoValue
   public abstract static class Write
       extends PTransform<PCollection<KV<ByteString, Iterable<Mutation>>>,
PDone> {

http://git-wip-us.apache.org/repos/asf/beam/blob/2cb2161c/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIO.java
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIO.java
index acf9285..a247d4c 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIO.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIO.java
@@ -167,7 +167,7 @@ public class SpannerIO {
    * configured with a {@link Read#withInstanceId} and {@link Read#withDatabaseId} that identify
the
    * Cloud Spanner database.
    */
-  @Experimental
+  @Experimental(Experimental.Kind.SOURCE_SINK)
   public static Read read() {
     return new AutoValue_SpannerIO_Read.Builder()
         .setSpannerConfig(SpannerConfig.create())

http://git-wip-us.apache.org/repos/asf/beam/blob/2cb2161c/sdks/java/io/hadoop/input-format/src/main/java/org/apache/beam/sdk/io/hadoop/inputformat/HadoopInputFormatIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/hadoop/input-format/src/main/java/org/apache/beam/sdk/io/hadoop/inputformat/HadoopInputFormatIO.java
b/sdks/java/io/hadoop/input-format/src/main/java/org/apache/beam/sdk/io/hadoop/inputformat/HadoopInputFormatIO.java
index efd47fd..0b4c23f 100644
--- a/sdks/java/io/hadoop/input-format/src/main/java/org/apache/beam/sdk/io/hadoop/inputformat/HadoopInputFormatIO.java
+++ b/sdks/java/io/hadoop/input-format/src/main/java/org/apache/beam/sdk/io/hadoop/inputformat/HadoopInputFormatIO.java
@@ -166,7 +166,7 @@ import org.slf4j.LoggerFactory;
  * }
  * </pre>
  */
-@Experimental
+@Experimental(Experimental.Kind.SOURCE_SINK)
 public class HadoopInputFormatIO {
   private static final Logger LOG = LoggerFactory.getLogger(HadoopInputFormatIO.class);
 

http://git-wip-us.apache.org/repos/asf/beam/blob/2cb2161c/sdks/java/io/hbase/src/main/java/org/apache/beam/sdk/io/hbase/HBaseIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/hbase/src/main/java/org/apache/beam/sdk/io/hbase/HBaseIO.java b/sdks/java/io/hbase/src/main/java/org/apache/beam/sdk/io/hbase/HBaseIO.java
index c9afe89..90ede4c 100644
--- a/sdks/java/io/hbase/src/main/java/org/apache/beam/sdk/io/hbase/HBaseIO.java
+++ b/sdks/java/io/hbase/src/main/java/org/apache/beam/sdk/io/hbase/HBaseIO.java
@@ -140,7 +140,7 @@ import org.slf4j.LoggerFactory;
  * it can evolve or be different in some aspects, but the idea is that users can easily migrate
  * from one to the other</p>.
  */
-@Experimental
+@Experimental(Experimental.Kind.SOURCE_SINK)
 public class HBaseIO {
     private static final Logger LOG = LoggerFactory.getLogger(HBaseIO.class);
 

http://git-wip-us.apache.org/repos/asf/beam/blob/2cb2161c/sdks/java/io/hcatalog/src/main/java/org/apache/beam/sdk/io/hcatalog/HCatalogIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/hcatalog/src/main/java/org/apache/beam/sdk/io/hcatalog/HCatalogIO.java
b/sdks/java/io/hcatalog/src/main/java/org/apache/beam/sdk/io/hcatalog/HCatalogIO.java
index 1549dab..4199b80 100644
--- a/sdks/java/io/hcatalog/src/main/java/org/apache/beam/sdk/io/hcatalog/HCatalogIO.java
+++ b/sdks/java/io/hcatalog/src/main/java/org/apache/beam/sdk/io/hcatalog/HCatalogIO.java
@@ -106,7 +106,7 @@ import org.slf4j.LoggerFactory;
  *       .withBatchSize(1024L)) //optional, assumes a default batch size of 1024 if none
specified
  * }</pre>
  */
-@Experimental
+@Experimental(Experimental.Kind.SOURCE_SINK)
 public class HCatalogIO {
 
   private static final Logger LOG = LoggerFactory.getLogger(HCatalogIO.class);

http://git-wip-us.apache.org/repos/asf/beam/blob/2cb2161c/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java
index 8092da6..bf73dbe 100644
--- a/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java
+++ b/sdks/java/io/jdbc/src/main/java/org/apache/beam/sdk/io/jdbc/JdbcIO.java
@@ -133,7 +133,7 @@ import org.apache.commons.dbcp2.BasicDataSource;
  * Consider using <a href="https://en.wikipedia.org/wiki/Merge_(SQL)">MERGE ("upsert")
  * statements</a> supported by your database instead.
  */
-@Experimental
+@Experimental(Experimental.Kind.SOURCE_SINK)
 public class JdbcIO {
   /**
    * Read data from a JDBC datasource.

http://git-wip-us.apache.org/repos/asf/beam/blob/2cb2161c/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java b/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
index c5e5150..f8cba5e 100644
--- a/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
+++ b/sdks/java/io/jms/src/main/java/org/apache/beam/sdk/io/jms/JmsIO.java
@@ -98,7 +98,7 @@ import org.slf4j.LoggerFactory;
  *
  * }</pre>
  */
-@Experimental
+@Experimental(Experimental.Kind.SOURCE_SINK)
 public class JmsIO {
 
   private static final Logger LOG = LoggerFactory.getLogger(JmsIO.class);

http://git-wip-us.apache.org/repos/asf/beam/blob/2cb2161c/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
index 4d2a358..702bdd3 100644
--- a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
+++ b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaIO.java
@@ -235,7 +235,7 @@ import org.slf4j.LoggerFactory;
  * Note that {@link KafkaRecord#getTimestamp()} reflects timestamp provided by Kafka if any,
  * otherwise it is set to processing time.
  */
-@Experimental
+@Experimental(Experimental.Kind.SOURCE_SINK)
 public class KafkaIO {
 
   /**

http://git-wip-us.apache.org/repos/asf/beam/blob/2cb2161c/sdks/java/io/kinesis/src/main/java/org/apache/beam/sdk/io/kinesis/KinesisIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/kinesis/src/main/java/org/apache/beam/sdk/io/kinesis/KinesisIO.java
b/sdks/java/io/kinesis/src/main/java/org/apache/beam/sdk/io/kinesis/KinesisIO.java
index c97316d..b85eb63 100644
--- a/sdks/java/io/kinesis/src/main/java/org/apache/beam/sdk/io/kinesis/KinesisIO.java
+++ b/sdks/java/io/kinesis/src/main/java/org/apache/beam/sdk/io/kinesis/KinesisIO.java
@@ -100,7 +100,7 @@ import org.joda.time.Instant;
  * }</pre>
  *
  */
-@Experimental
+@Experimental(Experimental.Kind.SOURCE_SINK)
 public final class KinesisIO {
     /** Returns a new {@link Read} transform for reading from Kinesis. */
     public static Read read() {

http://git-wip-us.apache.org/repos/asf/beam/blob/2cb2161c/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbGridFSIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbGridFSIO.java
b/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbGridFSIO.java
index b63775d..5b5412c 100644
--- a/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbGridFSIO.java
+++ b/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbGridFSIO.java
@@ -117,7 +117,7 @@ import org.joda.time.Instant;
  * to the file separated with line feeds.
  * </p>
  */
-@Experimental
+@Experimental(Experimental.Kind.SOURCE_SINK)
 public class MongoDbGridFSIO {
 
   /**

http://git-wip-us.apache.org/repos/asf/beam/blob/2cb2161c/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java
b/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java
index 04d9975..3b14182 100644
--- a/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java
+++ b/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java
@@ -94,7 +94,7 @@ import org.slf4j.LoggerFactory;
  *
  * }</pre>
  */
-@Experimental
+@Experimental(Experimental.Kind.SOURCE_SINK)
 public class MongoDbIO {
 
   private static final Logger LOG = LoggerFactory.getLogger(MongoDbIO.class);

http://git-wip-us.apache.org/repos/asf/beam/blob/2cb2161c/sdks/java/io/mqtt/src/main/java/org/apache/beam/sdk/io/mqtt/MqttIO.java
----------------------------------------------------------------------
diff --git a/sdks/java/io/mqtt/src/main/java/org/apache/beam/sdk/io/mqtt/MqttIO.java b/sdks/java/io/mqtt/src/main/java/org/apache/beam/sdk/io/mqtt/MqttIO.java
index 228a85d..add5cb5 100644
--- a/sdks/java/io/mqtt/src/main/java/org/apache/beam/sdk/io/mqtt/MqttIO.java
+++ b/sdks/java/io/mqtt/src/main/java/org/apache/beam/sdk/io/mqtt/MqttIO.java
@@ -97,7 +97,7 @@ import org.slf4j.LoggerFactory;
  *
  * }</pre>
  */
-@Experimental
+@Experimental(Experimental.Kind.SOURCE_SINK)
 public class MqttIO {
 
   private static final Logger LOG = LoggerFactory.getLogger(MqttIO.class);


Mime
View raw message