spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From shiva...@apache.org
Subject spark git commit: [SPARK-10106] [SPARKR] Add `ifelse` Column function to SparkR
Date Wed, 19 Aug 2015 19:39:47 GMT
Repository: spark
Updated Branches:
  refs/heads/branch-1.5 f25c32475 -> ba369258d


[SPARK-10106] [SPARKR] Add `ifelse` Column function to SparkR

### JIRA
[[SPARK-10106] Add `ifelse` Column function to SparkR - ASF JIRA](https://issues.apache.org/jira/browse/SPARK-10106)

Author: Yu ISHIKAWA <yuu.ishikawa@gmail.com>

Closes #8303 from yu-iskw/SPARK-10106.

(cherry picked from commit d898c33f774b9a3db2fb6aa8f0cb2c2ac6004b58)
Signed-off-by: Shivaram Venkataraman <shivaram@cs.berkeley.edu>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ba369258
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ba369258
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ba369258

Branch: refs/heads/branch-1.5
Commit: ba369258d94ba09b0bfc15d17f6851aa72a4d6d7
Parents: f25c324
Author: Yu ISHIKAWA <yuu.ishikawa@gmail.com>
Authored: Wed Aug 19 12:39:37 2015 -0700
Committer: Shivaram Venkataraman <shivaram@cs.berkeley.edu>
Committed: Wed Aug 19 12:39:44 2015 -0700

----------------------------------------------------------------------
 R/pkg/NAMESPACE                  |  1 +
 R/pkg/R/functions.R              | 19 +++++++++++++++++++
 R/pkg/inst/tests/test_sparkSQL.R |  3 ++-
 3 files changed, 22 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/ba369258/R/pkg/NAMESPACE
----------------------------------------------------------------------
diff --git a/R/pkg/NAMESPACE b/R/pkg/NAMESPACE
index 111a2dc..3e5c89d 100644
--- a/R/pkg/NAMESPACE
+++ b/R/pkg/NAMESPACE
@@ -135,6 +135,7 @@ exportMethods("abs",
               "hex",
               "hour",
               "hypot",
+              "ifelse",
               "initcap",
               "instr",
               "isNaN",

http://git-wip-us.apache.org/repos/asf/spark/blob/ba369258/R/pkg/R/functions.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/functions.R b/R/pkg/R/functions.R
index 5dba088..b5879bd 100644
--- a/R/pkg/R/functions.R
+++ b/R/pkg/R/functions.R
@@ -594,3 +594,22 @@ setMethod("when", signature(condition = "Column", value = "ANY"),
               jc <- callJStatic("org.apache.spark.sql.functions", "when", condition, value)
               column(jc)
           })
+
+#' ifelse
+#'
+#' Evaluates a list of conditions and returns `yes` if the conditions are satisfied.
+#' Otherwise `no` is returned for unmatched conditions.
+#'
+#' @rdname column
+setMethod("ifelse",
+          signature(test = "Column", yes = "ANY", no = "ANY"),
+          function(test, yes, no) {
+              test <- test@jc
+              yes <- ifelse(class(yes) == "Column", yes@jc, yes)
+              no <- ifelse(class(no) == "Column", no@jc, no)
+              jc <- callJMethod(callJStatic("org.apache.spark.sql.functions",
+                                            "when",
+                                            test, yes),
+                                "otherwise", no)
+              column(jc)
+          })

http://git-wip-us.apache.org/repos/asf/spark/blob/ba369258/R/pkg/inst/tests/test_sparkSQL.R
----------------------------------------------------------------------
diff --git a/R/pkg/inst/tests/test_sparkSQL.R b/R/pkg/inst/tests/test_sparkSQL.R
index 670017e..556b8c5 100644
--- a/R/pkg/inst/tests/test_sparkSQL.R
+++ b/R/pkg/inst/tests/test_sparkSQL.R
@@ -813,11 +813,12 @@ test_that("greatest() and least() on a DataFrame", {
   expect_equal(collect(select(df, least(df$a, df$b)))[, 1], c(1, 3))
 })
 
-test_that("when() and otherwise() on a DataFrame", {
+test_that("when(), otherwise() and ifelse() on a DataFrame", {
   l <- list(list(a = 1, b = 2), list(a = 3, b = 4))
   df <- createDataFrame(sqlContext, l)
   expect_equal(collect(select(df, when(df$a > 1 & df$b > 2, 1)))[, 1], c(NA, 1))
   expect_equal(collect(select(df, otherwise(when(df$a > 1, 1), 0)))[, 1], c(0, 1))
+  expect_equal(collect(select(df, ifelse(df$a > 1 & df$b > 2, 0, 1)))[, 1], c(1,
0))
 })
 
 test_that("group by", {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


Mime
View raw message