beam-issues mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From "ASF GitHub Bot (Jira)" <j...@apache.org>
Subject [jira] [Work logged] (BEAM-2546) Add InfluxDbIO
Date Sun, 12 Jul 2020 04:51:00 GMT

     [ https://issues.apache.org/jira/browse/BEAM-2546?focusedWorklogId=457700&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-457700
]

ASF GitHub Bot logged work on BEAM-2546:
----------------------------------------

                Author: ASF GitHub Bot
            Created on: 12/Jul/20 04:50
            Start Date: 12/Jul/20 04:50
    Worklog Time Spent: 10m 
      Work Description: bipinupd commented on a change in pull request #11459:
URL: https://github.com/apache/beam/pull/11459#discussion_r453267113



##########
File path: sdks/java/io/influxdb/src/main/java/org/apache/beam/sdk/io/influxdb/InfluxDBIO.java
##########
@@ -0,0 +1,829 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.io.influxdb;
+
+import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument;
+import static org.influxdb.BatchOptions.DEFAULT_BATCH_INTERVAL_DURATION;
+import static org.influxdb.BatchOptions.DEFAULT_BUFFER_LIMIT;
+
+import com.google.auto.value.AutoValue;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.LinkedHashSet;
+import java.util.List;
+import javax.annotation.Nullable;
+import javax.net.ssl.SSLContext;
+import javax.net.ssl.SSLSocketFactory;
+import javax.net.ssl.TrustManager;
+import javax.net.ssl.X509TrustManager;
+import okhttp3.OkHttpClient;
+import org.apache.beam.sdk.annotations.Experimental;
+import org.apache.beam.sdk.coders.Coder;
+import org.apache.beam.sdk.coders.StringUtf8Coder;
+import org.apache.beam.sdk.io.BoundedSource;
+import org.apache.beam.sdk.options.PipelineOptions;
+import org.apache.beam.sdk.options.ValueProvider;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.PTransform;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.transforms.SerializableFunction;
+import org.apache.beam.sdk.transforms.display.DisplayData;
+import org.apache.beam.sdk.transforms.display.HasDisplayData;
+import org.apache.beam.sdk.values.PBegin;
+import org.apache.beam.sdk.values.PCollection;
+import org.apache.beam.sdk.values.PDone;
+import org.influxdb.BatchOptions;
+import org.influxdb.InfluxDB;
+import org.influxdb.InfluxDBFactory;
+import org.influxdb.dto.Query;
+import org.influxdb.dto.QueryResult;
+import org.influxdb.dto.QueryResult.Result;
+import org.influxdb.dto.QueryResult.Series;
+import org.joda.time.DateTime;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * IO to read and write to InfluxDB.
+ *
+ * <h3>Reading from InfluxDB</h3>
+ *
+ * <p>InfluxDBIO {@link #read()} returns a bounded collection of {@code String} as
a {@code
+ * PCollection<String>}.
+ *
+ * <p>you have to provide a {@link DataSourceConfiguration} using<br>
+ * {@link DataSourceConfiguration#create(String, String, String)}(url, userName and password).
+ * Optionally, {@link DataSourceConfiguration#withUsername(String)} and {@link
+ * DataSourceConfiguration#withPassword(String)} allows you to define userName and password.
+ *
+ * <p>For example:
+ *
+ * <pre>{@code
+ * PCollection<String> collection = pipeline.apply(InfluxDBIO.read()
+ *   .withDataSourceConfiguration(InfluxDBIO.DataSourceConfiguration.create(
+ *          "https://localhost:8086","userName","password"))
+ *   .withDatabase("metrics")
+ *   .withRetentionPolicy("autogen")
+ *   .withSslInvalidHostNameAllowed(true)
+ *   .withSslEnabled(true));
+ * }</pre>
+ *
+ * <p>Read with query example:
+ *
+ * <pre>{@code
+ * PCollection<String> collection = pipeline.apply(InfluxDBIO.read()
+ *   .withDataSourceConfiguration(InfluxDBIO.DataSourceConfiguration.create(
+ *          "https://localhost:8086","userName","password"))
+ *   .withDatabase("metrics")
+ *   .withQuery("SELECT * FROM CPU")
+ *   .withRetentionPolicy("autogen")
+ *   .withSslInvalidHostNameAllowed(true)
+ *   .withSslEnabled(true));
+ * }</pre>
+ *
+ * <h3>Writing to InfluxDB </h3>
+ *
+ * <p>InfluxDB sink supports writing records into a database. It writes a {@link PCollection}
to the
+ * database by converting each T. The T should implement getLineProtocol() from {@link
+ * LineProtocolConvertable}.
+ *
+ * <p>Like the {@link #read()}, to configure the {@link #write()}, you have to provide
a {@link
+ * DataSourceConfiguration}.
+ *
+ * <pre>{@code
+ * pipeline
+ *   .apply(...)
+ *   .apply(InfluxDb.write()
+ *      .withDataSourceConfiguration(InfluxDBIO.DataSourceConfiguration.create(
+ *            "https://localhost:8086","userName","password"))
+ *   .withRetentionPolicy("autogen")
+ *   .withDatabase("metrics")
+ *   .withSslInvalidHostNameAllowed(true)
+ *   .withSslEnabled(true));
+ *    );
+ * }</pre>
+ *
+ * *
+ */
+@Experimental(Experimental.Kind.SOURCE_SINK)
+public class InfluxDBIO {
+  private static final Logger LOG = LoggerFactory.getLogger(InfluxDBIO.class);
+
+  public static Write write() {
+    return new AutoValue_InfluxDBIO_Write.Builder()
+        .setSslEnabled(false)
+        .setSslInvalidHostNameAllowed(false)
+        .setFlushDuration(DEFAULT_BATCH_INTERVAL_DURATION)
+        .setNoOfElementsToBatch(DEFAULT_BUFFER_LIMIT)
+        .build();
+  }
+
+  public static Read read() {
+    return new AutoValue_InfluxDBIO_Read.Builder()
+        .setSslEnabled(false)
+        .setSslInvalidHostNameAllowed(false)
+        .setStartDateTime(DateTime.parse("1677-09-21T00:12:43.145224194Z"))
+        .setEndDateTime(DateTime.parse("2262-04-11T23:47:16.854775806Z"))
+        .setRetentionPolicy("autogen")
+        .build();
+  }
+
+  @AutoValue
+  public abstract static class Read extends PTransform<PBegin, PCollection<String>>
{
+
+    abstract boolean sslInvalidHostNameAllowed();
+
+    abstract String retentionPolicy();
+
+    @Nullable
+    abstract String database();
+
+    @Nullable
+    abstract String query();
+
+    abstract boolean sslEnabled();
+
+    @Nullable
+    abstract DataSourceConfiguration dataSourceConfiguration();
+
+    @Nullable
+    abstract List<String> metrics();
+
+    abstract DateTime startDateTime();
+
+    abstract DateTime endDateTime();
+
+    @Nullable
+    abstract DateTime fromDateTime();
+
+    @Nullable
+    abstract DateTime toDateTime();
+
+    abstract Builder builder();
+
+    @AutoValue.Builder
+    abstract static class Builder {
+      abstract Builder setDataSourceConfiguration(DataSourceConfiguration configuration);
+
+      abstract Builder setDatabase(String database);
+
+      abstract Builder setSslInvalidHostNameAllowed(boolean value);
+
+      abstract Builder setRetentionPolicy(String retentionPolicy);
+
+      abstract Builder setQuery(String query);
+
+      abstract Builder setToDateTime(DateTime toDateTime);
+
+      abstract Builder setFromDateTime(DateTime fromDateTime);
+
+      abstract Builder setStartDateTime(DateTime startDateTime);
+
+      abstract Builder setEndDateTime(DateTime endDateTime);
+
+      abstract Builder setSslEnabled(boolean sslEnabled);
+
+      abstract Builder setMetrics(List<String> metrics);
+
+      abstract Read build();
+    }
+
+    /** Reads from the InfluxDB instance indicated by the given configuration. */
+    public Read withDataSourceConfiguration(DataSourceConfiguration configuration) {
+      checkArgument(configuration != null, "configuration can not be null");
+      return builder().setDataSourceConfiguration(configuration).build();
+    }
+
+    /** Reads from the specified database. */
+    public Read withDatabase(String database) {
+      return builder()
+          .setDatabase(database)
+          .setDataSourceConfiguration(dataSourceConfiguration())
+          .build();
+    }
+
+    public Read withToDateTime(DateTime toDateTime) {
+      return builder().setToDateTime(toDateTime).build();
+    }
+
+    public Read withFromDateTime(DateTime fromDateTime) {
+      return builder().setFromDateTime(fromDateTime).build();
+    }
+
+    public Read withStartDateTime(DateTime startDateTime) {
+      return builder().setStartDateTime(startDateTime).build();
+    }
+
+    public Read withEndDateTime(DateTime endDateTime) {
+      return builder().setEndDateTime(endDateTime).build();
+    }
+
+    public Read withMetrics(List<String> metrics) {
+      return builder().setMetrics(metrics).build();
+    }
+
+    public Read withMetrics(String... metrics) {
+      return withMetrics(Arrays.asList(metrics));
+    }
+
+    public Read withSslEnabled(boolean sslEnabled) {
+      return builder().setSslEnabled(sslEnabled).build();
+    }
+
+    public Read withSslInvalidHostNameAllowed(boolean value) {
+      return builder().setSslInvalidHostNameAllowed(value).build();
+    }
+
+    public Read withRetentionPolicy(String retentionPolicy) {
+      return builder().setRetentionPolicy(retentionPolicy).build();
+    }
+
+    public Read withQuery(String query) {
+      return builder().setQuery(query).build();
+    }
+
+    @Override
+    public PCollection<String> expand(PBegin input) {
+      checkArgument(dataSourceConfiguration() != null, "configuration is required");
+      checkArgument(query() != null || database() != null, "database or query is required");
+      if (database() != null) {
+        checkArgument(
+            checkDatabase(
+                database(), dataSourceConfiguration(), sslInvalidHostNameAllowed(), sslEnabled()),
+            "Database %s does not exist",
+            database());
+      }
+      return input.apply(org.apache.beam.sdk.io.Read.from(new InfluxDBSource(this)));
+    }
+
+    @Override
+    public void populateDisplayData(DisplayData.Builder builder) {
+      super.populateDisplayData(builder);
+      builder.addIfNotNull(
+          DisplayData.item("dataSourceConfiguration", dataSourceConfiguration().toString()));
+      builder.addIfNotNull(DisplayData.item("database", database()));
+      builder.addIfNotNull(DisplayData.item("retentionPolicy", retentionPolicy()));
+      builder.addIfNotNull(DisplayData.item("sslEnabled", sslEnabled()));
+      builder.addIfNotNull(DisplayData.item("query", query()));
+      builder.addIfNotNull(
+          DisplayData.item("sslInvalidHostNameAllowed", sslInvalidHostNameAllowed()));
+    }
+  }
+
+  static class InfluxDBSource extends BoundedSource<String> {
+    private final Read spec;
+
+    InfluxDBSource(Read read) {
+      this.spec = read;
+    }
+
+    @Override
+    public long getEstimatedSizeBytes(PipelineOptions pipelineOptions) {
+      String noOfBlocks = "NUMBER OF BLOCKS";
+      String sizeOfBlocks = "SIZE OF BLOCKS";
+      LinkedHashSet<Long> noOfBlocksValue = new LinkedHashSet<>();
+      LinkedHashSet<Long> sizeOfBlocksValue = new LinkedHashSet<>();
+      try (InfluxDB connection =
+          getConnection(
+              spec.dataSourceConfiguration(),
+              spec.sslInvalidHostNameAllowed(),
+              spec.sslEnabled())) {
+        String query = spec.query();
+        if (query == null) {
+          query =
+              String.format(
+                  "SELECT * FROM %s.%s", spec.retentionPolicy(), String.join(",", spec.metrics()));
+        }
+        QueryResult result = connection.query(new Query(query, spec.database()));
+        List<Result> results = result.getResults();
+        for (Result res : results) {
+          for (Series series : res.getSeries()) {
+            for (List<Object> data : series.getValues()) {
+              String s = data.get(0).toString();
+              if (s.startsWith(noOfBlocks)) {
+                noOfBlocksValue.add(Long.parseLong(s.split(":", -1)[1].trim()));
+              }
+              if (s.startsWith(sizeOfBlocks)) {
+                sizeOfBlocksValue.add(Long.parseLong(s.split(":", -1)[1].trim()));
+              }
+            }
+          }
+        }
+      }
+
+      Iterator<Long> noOfBlocksValueItr = noOfBlocksValue.iterator();
+      Iterator<Long> sizeOfBlocksValueItr = sizeOfBlocksValue.iterator();
+      long size = 0;
+      while (noOfBlocksValueItr.hasNext() && sizeOfBlocksValueItr.hasNext()) {
+        size = size + (noOfBlocksValueItr.next() * sizeOfBlocksValueItr.next());
+      }
+      return size;
+    }
+
+    @Override
+    public List<? extends BoundedSource<String>> split(
+        long desiredElementsInABundle, PipelineOptions options) {
+      List<ShardInformation> shardInfo =
+          getDBShardedInformation(
+              spec.database(),
+              spec.dataSourceConfiguration(),
+              spec.sslInvalidHostNameAllowed(),
+              spec.sslEnabled());
+      List<BoundedSource<String>> sources = new ArrayList<>();
+      if (spec.query() == null) {
+        for (ShardInformation sInfo : shardInfo) {
+          if (sInfo.getStartTime().compareTo(spec.startDateTime()) > 0) {
+            sources.add(
+                new InfluxDBSource(
+                    spec.withMetrics(spec.metrics())
+                        .withRetentionPolicy(sInfo.getRetentionPolicy())
+                        .withToDateTime(sInfo.getStartTime())
+                        .withFromDateTime(sInfo.getEndTime())));
+          }
+        }
+      } else {
+        sources.add(this);
+      }
+      return sources;
+    }
+
+    @Override
+    public BoundedReader<String> createReader(PipelineOptions pipelineOptions) {
+      return new BoundedInfluxDbReader(this);
+    }
+
+    @Override
+    public void validate() {
+      spec.validate(null /* input */);
+    }
+
+    @Override
+    public void populateDisplayData(DisplayData.Builder builder) {
+      spec.populateDisplayData(builder);
+    }
+
+    @Override
+    public Coder<String> getOutputCoder() {
+      return StringUtf8Coder.of();
+    }
+  }
+
+  private static String getQueryToRun(Read spec) {
+    if (spec.query() == null) {
+      if (spec.toDateTime() != null && spec.fromDateTime() != null) {
+        return String.format(
+            "SELECT * FROM %s.%s WHERE time >= '%s' and time <= '%s'",
+            spec.retentionPolicy(),
+            String.join(",", spec.metrics()),
+            spec.toDateTime(),
+            spec.fromDateTime());
+      } else {
+        return String.format(
+            "SELECT * FROM %s.%s", spec.retentionPolicy(), String.join(",", spec.metrics()));
+      }
+    }
+    return spec.query();
+  }
+
+  private static class BoundedInfluxDbReader extends BoundedSource.BoundedReader<String>
{
+    private final InfluxDBIO.InfluxDBSource source;
+    private Iterator<Result> resultIterator;
+    private Iterator<Series> seriesIterator;
+    private Iterator<List<Object>> valuesIterator;
+    private List current;
+
+    public BoundedInfluxDbReader(InfluxDBIO.InfluxDBSource source) {
+      this.source = source;
+    }
+
+    @Override
+    public boolean start() {
+      InfluxDBIO.Read spec = source.spec;
+      try (InfluxDB influxDB =
+          getConnection(
+              spec.dataSourceConfiguration(),
+              spec.sslInvalidHostNameAllowed(),
+              spec.sslEnabled())) {
+        if (spec.database() != null) {
+          influxDB.setDatabase(spec.database());
+        }
+        if (spec.retentionPolicy() != null) {
+          influxDB.setRetentionPolicy(spec.retentionPolicy());
+        }
+        String query = getQueryToRun(spec);
+        QueryResult queryResult = influxDB.query(new Query(query, spec.database()));
+        resultIterator = queryResult.getResults().iterator();
+        if (resultIterator.hasNext()) {
+          seriesIterator = resultIterator.next().getSeries().iterator();
+        }
+        if (seriesIterator.hasNext()) {
+          valuesIterator = seriesIterator.next().getValues().iterator();
+        }
+      }
+      return advance();
+    }
+
+    @Override
+    public boolean advance() {
+      if (valuesIterator.hasNext()) {
+        current = valuesIterator.next();
+        return true;
+      } else if (seriesIterator.hasNext()) {
+        valuesIterator = seriesIterator.next().getValues().iterator();
+        current = valuesIterator.next();
+        return true;
+      } else if (resultIterator.hasNext()) {
+        seriesIterator = resultIterator.next().getSeries().iterator();
+        valuesIterator = seriesIterator.next().getValues().iterator();
+        current = valuesIterator.next();
+        return true;
+      } else {
+        return false;
+      }
+    }
+
+    @Override
+    public BoundedSource getCurrentSource() {
+      return source;
+    }
+
+    @Override
+    public String getCurrent() {
+      return current.toString();
+    }
+
+    @Override
+    public void close() {}
+  }
+
+  @AutoValue
+  public abstract static class Write extends PTransform<PCollection<String>, PDone>
{
+
+    @Override
+    public PDone expand(PCollection<String> input) {
+      checkArgument(dataSourceConfiguration() != null, "withConfiguration() is required");
+      checkArgument(database() != null && !database().isEmpty(), "withDatabase()
is required");
+      checkArgument(
+          checkDatabase(
+              database(), dataSourceConfiguration(), sslInvalidHostNameAllowed(), sslEnabled()),
+          "Database %s does not exist",
+          database());
+      input.apply(ParDo.of(new InfluxWriterFn(this)));
+      return PDone.in(input.getPipeline());
+    }
+
+    @Override
+    public void populateDisplayData(DisplayData.Builder builder) {
+      super.populateDisplayData(builder);
+      builder.addIfNotNull(
+          DisplayData.item("dataSourceConfiguration", dataSourceConfiguration().toString()));
+      builder.addIfNotNull(DisplayData.item("database", database()));
+      builder.addIfNotNull(DisplayData.item("retentionPolicy", retentionPolicy()));
+      builder.addIfNotNull(DisplayData.item("sslEnabled", sslEnabled()));
+      builder.addIfNotNull(
+          DisplayData.item("sslInvalidHostNameAllowed", sslInvalidHostNameAllowed()));
+      builder.addIfNotNull(DisplayData.item("noOfElementsToBatch", noOfElementsToBatch()));
+      builder.addIfNotNull(DisplayData.item("flushDuration", flushDuration()));
+    }
+
+    @Nullable
+    abstract String database();
+
+    @Nullable
+    abstract String retentionPolicy();
+
+    abstract boolean sslInvalidHostNameAllowed();
+
+    abstract boolean sslEnabled();
+
+    abstract int noOfElementsToBatch();
+
+    abstract int flushDuration();
+
+    @Nullable
+    abstract DataSourceConfiguration dataSourceConfiguration();
+
+    abstract Builder builder();
+
+    @AutoValue.Builder
+    abstract static class Builder {
+      abstract Builder setDataSourceConfiguration(DataSourceConfiguration configuration);
+
+      abstract Builder setDatabase(String database);
+
+      abstract Builder setSslInvalidHostNameAllowed(boolean value);
+
+      abstract Builder setNoOfElementsToBatch(int noOfElementsToBatch);
+
+      abstract Builder setFlushDuration(int flushDuration);
+
+      abstract Builder setSslEnabled(boolean sslEnabled);
+
+      abstract Builder setRetentionPolicy(String retentionPolicy);
+
+      abstract Write build();
+    }
+
+    public Write withConfiguration(DataSourceConfiguration configuration) {
+      checkArgument(configuration != null, "configuration can not be null");
+      return builder().setDataSourceConfiguration(configuration).build();
+    }
+
+    public Write withDatabase(String database) {
+      return builder().setDatabase(database).build();
+    }
+
+    public Write withSslEnabled(boolean sslEnabled) {
+      return builder().setSslEnabled(sslEnabled).build();
+    }
+
+    public Write withSslInvalidHostNameAllowed(boolean value) {
+      return builder().setSslInvalidHostNameAllowed(value).build();
+    }
+
+    public Write withNoOfElementsToBatch(int noOfElementsToBatch) {
+      return builder().setNoOfElementsToBatch(noOfElementsToBatch).build();
+    }
+
+    public Write withFlushDuration(int flushDuration) {

Review comment:
       If not applied, it uses the default value https://github.com/bipinupd/beam/blob/BEAM-2546/sdks/java/io/influxdb/src/main/java/org/apache/beam/sdk/io/influxdb/InfluxDBIO.java#L133




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org


Issue Time Tracking
-------------------

    Worklog Id:     (was: 457700)
    Time Spent: 19h 50m  (was: 19h 40m)

> Add InfluxDbIO
> --------------
>
>                 Key: BEAM-2546
>                 URL: https://issues.apache.org/jira/browse/BEAM-2546
>             Project: Beam
>          Issue Type: New Feature
>          Components: io-ideas
>            Reporter: Jean-Baptiste Onofré
>            Priority: P2
>          Time Spent: 19h 50m
>  Remaining Estimate: 0h
>




--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Mime
View raw message