spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ma...@apache.org
Subject [6/9] git commit: Merge branch 'apache-master' into transform
Date Sat, 26 Oct 2013 00:26:24 GMT
Merge branch 'apache-master' into transform


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/0400aba1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/0400aba1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/0400aba1

Branch: refs/heads/master
Commit: 0400aba1c0fac298c024dee4fb4be23328f07e35
Parents: bacfe5e 1dc776b
Author: Tathagata Das <tathagata.das1565@gmail.com>
Authored: Thu Oct 24 11:05:00 2013 -0700
Committer: Tathagata Das <tathagata.das1565@gmail.com>
Committed: Thu Oct 24 11:05:00 2013 -0700

----------------------------------------------------------------------
 .../apache/spark/api/java/JavaDoubleRDD.scala   |  13 +++
 .../org/apache/spark/api/java/JavaPairRDD.scala |  13 +++
 .../org/apache/spark/api/java/JavaRDD.scala     |   8 ++
 .../apache/spark/scheduler/DAGScheduler.scala   |   8 ++
 .../spark/scheduler/DAGSchedulerEvent.scala     |   3 +
 .../apache/spark/scheduler/SparkListener.scala  |   9 ++
 .../spark/scheduler/SparkListenerBus.scala      |   2 +
 .../org/apache/spark/scheduler/TaskInfo.scala   |  20 ++++
 .../scheduler/cluster/ClusterScheduler.scala    |   4 +
 .../cluster/ClusterTaskSetManager.scala         |   6 +
 .../scheduler/cluster/TaskResultGetter.scala    |   1 +
 .../spark/ui/jobs/JobProgressListener.scala     |   8 +-
 .../spark/scheduler/SparkListenerSuite.scala    |  77 +++++++++++--
 .../streaming/examples/MQTTWordCount.scala      | 107 ++++++++++++++++++
 pom.xml                                         |  11 ++
 project/SparkBuild.scala                        |   8 +-
 streaming/pom.xml                               |   5 +
 .../spark/streaming/StreamingContext.scala      |  15 +++
 .../streaming/dstream/MQTTInputDStream.scala    | 109 +++++++++++++++++++
 19 files changed, 417 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/0400aba1/core/src/main/scala/org/apache/spark/api/java/JavaPairRDD.scala
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/0400aba1/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala
----------------------------------------------------------------------
diff --cc streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala
index 70bc250,09c2f7f..70bf902
--- a/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala
@@@ -462,8 -462,23 +462,23 @@@ class StreamingContext private 
      inputStream
    }
  
+ /**
+    * Create an input stream that receives messages pushed by a mqtt publisher.
+    * @param brokerUrl Url of remote mqtt publisher
+    * @param topic topic name to subscribe to
+    * @param storageLevel RDD storage level. Defaults to memory-only.
+    */
+ 
+   def mqttStream(
+     brokerUrl: String,
+     topic: String,
+     storageLevel: StorageLevel = StorageLevel.MEMORY_ONLY_SER_2): DStream[String] = {
+     val inputStream = new MQTTInputDStream[String](this, brokerUrl, topic, storageLevel)
+     registerInputStream(inputStream)
+     inputStream
+   }
    /**
 -   * Create a unified DStream from multiple DStreams of the same type and same interval
 +   * Create a unified DStream from multiple DStreams of the same type and same slide duration.
     */
    def union[T: ClassManifest](streams: Seq[DStream[T]]): DStream[T] = {
      new UnionDStream[T](streams.toArray)


Mime
View raw message