Return-Path: X-Original-To: apmail-ignite-commits-archive@minotaur.apache.org Delivered-To: apmail-ignite-commits-archive@minotaur.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id E7F4A18D34 for ; Thu, 11 Jun 2015 07:18:45 +0000 (UTC) Received: (qmail 11355 invoked by uid 500); 11 Jun 2015 07:18:45 -0000 Delivered-To: apmail-ignite-commits-archive@ignite.apache.org Received: (qmail 11289 invoked by uid 500); 11 Jun 2015 07:18:45 -0000 Mailing-List: contact commits-help@ignite.incubator.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@ignite.incubator.apache.org Delivered-To: mailing list commits@ignite.incubator.apache.org Received: (qmail 11272 invoked by uid 99); 11 Jun 2015 07:18:45 -0000 Received: from nike.apache.org (HELO nike.apache.org) (192.87.106.230) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 11 Jun 2015 07:18:45 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED,T_RP_MATCHES_RCVD X-Spam-Check-By: apache.org Received: from [140.211.11.3] (HELO mail.apache.org) (140.211.11.3) by apache.org (qpsmtpd/0.29) with SMTP; Thu, 11 Jun 2015 07:16:30 +0000 Received: (qmail 8679 invoked by uid 99); 11 Jun 2015 07:18:17 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 11 Jun 2015 07:18:17 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 08437E0B2B; Thu, 11 Jun 2015 07:18:17 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 8bit From: sergi@apache.org To: commits@ignite.incubator.apache.org Date: Thu, 11 Jun 2015 07:18:27 -0000 Message-Id: <17de7b2df2a64e4e9cdf60a2b90294ca@git.apache.org> In-Reply-To: <5595a0989ca04d6fb2b60708d73e09c8@git.apache.org> References: <5595a0989ca04d6fb2b60708d73e09c8@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [12/50] incubator-ignite git commit: #IGNITE-389 - More functions on API. X-Virus-Checked: Checked by ClamAV on apache.org #IGNITE-389 - More functions on API. Project: http://git-wip-us.apache.org/repos/asf/incubator-ignite/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-ignite/commit/c527a044 Tree: http://git-wip-us.apache.org/repos/asf/incubator-ignite/tree/c527a044 Diff: http://git-wip-us.apache.org/repos/asf/incubator-ignite/diff/c527a044 Branch: refs/heads/ignite-484-1 Commit: c527a04471bd4d263a346ee27a5dbef6c98a5894 Parents: 8503dec Author: Alexey Goncharuk Authored: Fri May 29 16:29:33 2015 -0700 Committer: Alexey Goncharuk Committed: Fri May 29 16:29:33 2015 -0700 ---------------------------------------------------------------------- .../org/apache/ignite/spark/IgniteContext.scala | 6 +++--- .../scala/org/apache/ignite/spark/IgniteRDD.scala | 16 ++++++++-------- .../ignite/spark/examples/ColocationTest.scala | 2 +- .../ignite/spark/examples/IgniteStoreExample.scala | 2 +- .../ignite/spark/impl/IgniteQueryIterator.scala | 2 +- .../org/apache/ignite/spark/impl/IgniteSqlRDD.scala | 2 +- .../org/apache/ignite/spark/IgniteRddSpec.scala | 10 +++++----- 7 files changed, 20 insertions(+), 20 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/c527a044/modules/spark/src/main/scala/org/apache/ignite/spark/IgniteContext.scala ---------------------------------------------------------------------- diff --git a/modules/spark/src/main/scala/org/apache/ignite/spark/IgniteContext.scala b/modules/spark/src/main/scala/org/apache/ignite/spark/IgniteContext.scala index 5b649db..6259665 100644 --- a/modules/spark/src/main/scala/org/apache/ignite/spark/IgniteContext.scala +++ b/modules/spark/src/main/scala/org/apache/ignite/spark/IgniteContext.scala @@ -33,7 +33,7 @@ import org.apache.spark.SparkContext */ class IgniteContext[K, V]( @scala.transient val sparkContext: SparkContext, - cfgF: () => IgniteConfiguration + cfgF: () ⇒ IgniteConfiguration ) extends Serializable { def this( sc: SparkContext, @@ -57,14 +57,14 @@ class IgniteContext[K, V]( Ignition.ignite(igniteCfg.getGridName) } catch { - case e: Exception => + case e: Exception ⇒ try { igniteCfg.setClientMode(true) Ignition.start(igniteCfg) } catch { - case e: Exception => Ignition.ignite(igniteCfg.getGridName) + case e: Exception ⇒ Ignition.ignite(igniteCfg.getGridName) } } } http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/c527a044/modules/spark/src/main/scala/org/apache/ignite/spark/IgniteRDD.scala ---------------------------------------------------------------------- diff --git a/modules/spark/src/main/scala/org/apache/ignite/spark/IgniteRDD.scala b/modules/spark/src/main/scala/org/apache/ignite/spark/IgniteRDD.scala index 358fcd4..f286b58 100644 --- a/modules/spark/src/main/scala/org/apache/ignite/spark/IgniteRDD.scala +++ b/modules/spark/src/main/scala/org/apache/ignite/spark/IgniteRDD.scala @@ -59,7 +59,7 @@ class IgniteRDD[K, V] ( val it: java.util.Iterator[Cache.Entry[K, V]] = cache.query(qry).iterator() - new IgniteQueryIterator[Cache.Entry[K, V], (K, V)](it, entry => { + new IgniteQueryIterator[Cache.Entry[K, V], (K, V)](it, entry ⇒ { (entry.getKey, entry.getValue) }) } @@ -95,7 +95,7 @@ class IgniteRDD[K, V] ( qry.setArgs(args.map(_.asInstanceOf[Object]):_*) - new IgniteSqlRDD[(K, V), Cache.Entry[K, V], K, V](ic, cacheName, cacheCfg, qry, entry => (entry.getKey, entry.getValue)) + new IgniteSqlRDD[(K, V), Cache.Entry[K, V], K, V](ic, cacheName, cacheCfg, qry, entry ⇒ (entry.getKey, entry.getValue)) } def sql(sql: String, args: Any*): RDD[Seq[Any]] = { @@ -103,11 +103,11 @@ class IgniteRDD[K, V] ( qry.setArgs(args.map(_.asInstanceOf[Object]):_*) - new IgniteSqlRDD[Seq[Any], java.util.List[_], K, V](ic, cacheName, cacheCfg, qry, list => list) + new IgniteSqlRDD[Seq[Any], java.util.List[_], K, V](ic, cacheName, cacheCfg, qry, list ⇒ list) } def saveValues(rdd: RDD[V]) = { - rdd.foreachPartition(it => { + rdd.foreachPartition(it ⇒ { val ig = ic.ignite() ensureCache() @@ -119,7 +119,7 @@ class IgniteRDD[K, V] ( val streamer = ig.dataStreamer[Object, V](cacheName) try { - it.foreach(value => { + it.foreach(value ⇒ { val key = affinityKeyFunc(value, node.orNull) streamer.addData(key, value) @@ -131,8 +131,8 @@ class IgniteRDD[K, V] ( }) } - def saveTuples(rdd: RDD[(K, V)]) = { - rdd.foreachPartition(it => { + def savePairs(rdd: RDD[(K, V)]) = { + rdd.foreachPartition(it ⇒ { val ig = ic.ignite() // Make sure to deploy the cache @@ -145,7 +145,7 @@ class IgniteRDD[K, V] ( val streamer = ig.dataStreamer[K, V](cacheName) try { - it.foreach(tup => { + it.foreach(tup ⇒ { streamer.addData(tup._1, tup._2) }) } http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/c527a044/modules/spark/src/main/scala/org/apache/ignite/spark/examples/ColocationTest.scala ---------------------------------------------------------------------- diff --git a/modules/spark/src/main/scala/org/apache/ignite/spark/examples/ColocationTest.scala b/modules/spark/src/main/scala/org/apache/ignite/spark/examples/ColocationTest.scala index a0814fa..e1d3d8e 100644 --- a/modules/spark/src/main/scala/org/apache/ignite/spark/examples/ColocationTest.scala +++ b/modules/spark/src/main/scala/org/apache/ignite/spark/examples/ColocationTest.scala @@ -31,7 +31,7 @@ object ColocationTest { // Search for lines containing "Ignite". val cache = ignite.fromCache("partitioned") - cache.saveTuples(sc.parallelize((1 to 100000).toSeq, 48).map(i => (i, i))) + cache.savePairs(sc.parallelize((1 to 100000).toSeq, 48).map(i => (i, i))) // Execute parallel sum. println("Local sum: " + (1 to 100000).sum) http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/c527a044/modules/spark/src/main/scala/org/apache/ignite/spark/examples/IgniteStoreExample.scala ---------------------------------------------------------------------- diff --git a/modules/spark/src/main/scala/org/apache/ignite/spark/examples/IgniteStoreExample.scala b/modules/spark/src/main/scala/org/apache/ignite/spark/examples/IgniteStoreExample.scala index 24be795..ad6b7e6 100644 --- a/modules/spark/src/main/scala/org/apache/ignite/spark/examples/IgniteStoreExample.scala +++ b/modules/spark/src/main/scala/org/apache/ignite/spark/examples/IgniteStoreExample.scala @@ -36,6 +36,6 @@ object IgniteStoreExample { }) ignite.fromCache("partitioned").saveValues(lines) - ignite.fromCache("").saveTuples(lines.map(l ⇒ (l, l))) + ignite.fromCache("partitioned").savePairs(lines.map(l ⇒ (l, l))) } } \ No newline at end of file http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/c527a044/modules/spark/src/main/scala/org/apache/ignite/spark/impl/IgniteQueryIterator.scala ---------------------------------------------------------------------- diff --git a/modules/spark/src/main/scala/org/apache/ignite/spark/impl/IgniteQueryIterator.scala b/modules/spark/src/main/scala/org/apache/ignite/spark/impl/IgniteQueryIterator.scala index b24ba50..4165fd3 100644 --- a/modules/spark/src/main/scala/org/apache/ignite/spark/impl/IgniteQueryIterator.scala +++ b/modules/spark/src/main/scala/org/apache/ignite/spark/impl/IgniteQueryIterator.scala @@ -19,7 +19,7 @@ package org.apache.ignite.spark.impl class IgniteQueryIterator[T, R] ( cur: java.util.Iterator[T], - conv: (T) => R + conv: (T) ⇒ R ) extends Iterator[R] { override def hasNext: Boolean = cur.hasNext http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/c527a044/modules/spark/src/main/scala/org/apache/ignite/spark/impl/IgniteSqlRDD.scala ---------------------------------------------------------------------- diff --git a/modules/spark/src/main/scala/org/apache/ignite/spark/impl/IgniteSqlRDD.scala b/modules/spark/src/main/scala/org/apache/ignite/spark/impl/IgniteSqlRDD.scala index 7cf9f3a..762a6ed 100644 --- a/modules/spark/src/main/scala/org/apache/ignite/spark/impl/IgniteSqlRDD.scala +++ b/modules/spark/src/main/scala/org/apache/ignite/spark/impl/IgniteSqlRDD.scala @@ -29,7 +29,7 @@ class IgniteSqlRDD[R: ClassTag, T, K, V]( cacheName: String, cacheCfg: CacheConfiguration[K, V], qry: Query[T], - conv: (T) => R + conv: (T) ⇒ R ) extends IgniteAbstractRDD[R, K, V](ic, cacheName, cacheCfg) { override def compute(split: Partition, context: TaskContext): Iterator[R] = { new IgniteQueryIterator[T, R](ensureCache().query(qry).iterator(), conv) http://git-wip-us.apache.org/repos/asf/incubator-ignite/blob/c527a044/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteRddSpec.scala ---------------------------------------------------------------------- diff --git a/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteRddSpec.scala b/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteRddSpec.scala index 7af25de..68273da 100644 --- a/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteRddSpec.scala +++ b/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteRddSpec.scala @@ -42,7 +42,7 @@ class IgniteRddSpec extends FunSpec with Matchers with BeforeAndAfterAll with Be () ⇒ configuration("client", client = true)) // Save pairs ("0", "val0"), ("1", "val1"), ... to Ignite cache. - ic.fromCache(PARTITIONED_CACHE_NAME).saveTuples(sc.parallelize(0 to 10000, 2).map(i ⇒ (String.valueOf(i), "val" + i))) + ic.fromCache(PARTITIONED_CACHE_NAME).savePairs(sc.parallelize(0 to 10000, 2).map(i ⇒ (String.valueOf(i), "val" + i))) // Check cache contents. val ignite = Ignition.ignite("grid-0") @@ -92,9 +92,9 @@ class IgniteRddSpec extends FunSpec with Matchers with BeforeAndAfterAll with Be val cache: IgniteRDD[String, Entity] = ic.fromCache(PARTITIONED_CACHE_NAME) - cache.saveTuples(sc.parallelize(0 to 1000, 2).map(i ⇒ (String.valueOf(i), new Entity(i, "name" + i, i * 100)))) + cache.savePairs(sc.parallelize(0 to 1000, 2).map(i ⇒ (String.valueOf(i), new Entity(i, "name" + i, i * 100)))) - val res = cache.objectSql("Entity", "name = ? and salary = ?", "name50", 5000).map(_._2).collect() + val res: Array[Entity] = cache.objectSql("Entity", "name = ? and salary = ?", "name50", 5000).map(_._2).collect() assert(res.length == 1, "Invalid result length") assert(50 == res(0).id, "Invalid result") @@ -117,7 +117,7 @@ class IgniteRddSpec extends FunSpec with Matchers with BeforeAndAfterAll with Be val cache: IgniteRDD[String, Entity] = ic.fromCache(PARTITIONED_CACHE_NAME) - cache.saveTuples(sc.parallelize(0 to 1000, 2).map(i ⇒ (String.valueOf(i), new Entity(i, "name" + i, i * 100)))) + cache.savePairs(sc.parallelize(0 to 1000, 2).map(i ⇒ (String.valueOf(i), new Entity(i, "name" + i, i * 100)))) val res = cache.sql("select id, name, salary from Entity where name = ? and salary = ?", "name50", 5000).collect() @@ -142,7 +142,7 @@ class IgniteRddSpec extends FunSpec with Matchers with BeforeAndAfterAll with Be val cache: IgniteRDD[String, Entity] = ic.fromCache(PARTITIONED_CACHE_NAME) - cache.saveTuples(sc.parallelize(0 to 1000, 2).map(i ⇒ (String.valueOf(i), new Entity(i, "name" + i, i * 100)))) + cache.savePairs(sc.parallelize(0 to 1000, 2).map(i ⇒ (String.valueOf(i), new Entity(i, "name" + i, i * 100)))) val res = cache.sql("select id, name, salary from Entity where name = ? and salary = ?", "name50", 5000).collect()