camel-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hekon...@apache.org
Subject camel git commit: Added endpoint documentation.
Date Wed, 16 Dec 2015 20:56:49 GMT
Repository: camel
Updated Branches:
  refs/heads/master 3356d1d84 -> b7ae8147b


Added endpoint documentation.


Project: http://git-wip-us.apache.org/repos/asf/camel/repo
Commit: http://git-wip-us.apache.org/repos/asf/camel/commit/b7ae8147
Tree: http://git-wip-us.apache.org/repos/asf/camel/tree/b7ae8147
Diff: http://git-wip-us.apache.org/repos/asf/camel/diff/b7ae8147

Branch: refs/heads/master
Commit: b7ae8147bc4b477bb380b5e4c4fd7ab58855a194
Parents: 3356d1d
Author: Henryk Konsek <hekonsek@gmail.com>
Authored: Wed Dec 16 21:56:42 2015 +0100
Committer: Henryk Konsek <hekonsek@gmail.com>
Committed: Wed Dec 16 21:56:42 2015 +0100

----------------------------------------------------------------------
 .../org/apache/camel/component/spark/SparkEndpoint.java   | 10 ++++++++--
 1 file changed, 8 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/camel/blob/b7ae8147/components/camel-spark/src/main/java/org/apache/camel/component/spark/SparkEndpoint.java
----------------------------------------------------------------------
diff --git a/components/camel-spark/src/main/java/org/apache/camel/component/spark/SparkEndpoint.java
b/components/camel-spark/src/main/java/org/apache/camel/component/spark/SparkEndpoint.java
index 40c86e1..1d758c4 100644
--- a/components/camel-spark/src/main/java/org/apache/camel/component/spark/SparkEndpoint.java
+++ b/components/camel-spark/src/main/java/org/apache/camel/component/spark/SparkEndpoint.java
@@ -21,14 +21,14 @@ import org.apache.camel.Processor;
 import org.apache.camel.Producer;
 import org.apache.camel.impl.DefaultEndpoint;
 import org.apache.camel.spi.UriEndpoint;
+import org.apache.camel.spi.UriParam;
 import org.apache.spark.api.java.AbstractJavaRDDLike;
 import org.apache.spark.sql.DataFrame;
 import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.slf4j.spi.LoggerFactoryBinder;
 
 import static org.slf4j.LoggerFactory.getLogger;
 
+// @UriEndpoint(scheme = "spark", producerOnly = true, title = "Apache Spark", syntax = "spark:jobType",
label = "bigdata,iot")
 public class SparkEndpoint extends DefaultEndpoint {
 
     // Logger
@@ -37,18 +37,24 @@ public class SparkEndpoint extends DefaultEndpoint {
 
     // Endpoint collaborators
 
+    @UriParam(name = "rdd", description = "RDD to compute against.")
     private AbstractJavaRDDLike rdd;
 
+    @UriParam(name = "rddCallback", description = "Function performing action against an
RDD.")
     private RddCallback rddCallback;
 
+    @UriParam(name = "dataFrame", description = "DataFrame to compute against.")
     private DataFrame dataFrame;
 
+    @UriParam(name = "dataFrameCallback", description = "Function performing action against
an DataFrame.")
     private DataFrameCallback dataFrameCallback;
 
     // Endpoint configuration
 
+    @UriParam(name = "endpointType", description = "Type of the endpoint (rdd, dataframe,
hive).")
     private final EndpointType endpointType;
 
+    @UriParam(name = "collect", description = "Indicates if results should be collected or
counted.")
     private boolean collect = true;
 
     // Constructors


Mime
View raw message