spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From r...@apache.org
Subject spark git commit: [SPARK-16850][SQL] Improve type checking error message for greatest/least
Date Tue, 02 Aug 2016 17:22:26 GMT
Repository: spark
Updated Branches:
  refs/heads/branch-2.0 a937c9ee4 -> f190bb83b


[SPARK-16850][SQL] Improve type checking error message for greatest/least

Greatest/least function does not have the most friendly error message for data types. This
patch improves the error message to not show the Seq type, and use more human readable data
types.

Before:
```
org.apache.spark.sql.AnalysisException: cannot resolve 'greatest(CAST(1.0 AS DECIMAL(2,1)),
"1.0")' due to data type mismatch: The expressions should all have the same type, got GREATEST
(ArrayBuffer(DecimalType(2,1), StringType)).; line 1 pos 7
```

After:
```
org.apache.spark.sql.AnalysisException: cannot resolve 'greatest(CAST(1.0 AS DECIMAL(2,1)),
"1.0")' due to data type mismatch: The expressions should all have the same type, got GREATEST(decimal(2,1),
string).; line 1 pos 7
```

Manually verified the output and also added unit tests to ConditionalExpressionSuite.

Author: petermaxlee <petermaxlee@gmail.com>

Closes #14453 from petermaxlee/SPARK-16850.

(cherry picked from commit a1ff72e1cce6f22249ccc4905e8cef30075beb2f)
Signed-off-by: Reynold Xin <rxin@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f190bb83
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f190bb83
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f190bb83

Branch: refs/heads/branch-2.0
Commit: f190bb83beaafb65c8e6290e9ecaa61ac51e04bb
Parents: a937c9e
Author: petermaxlee <petermaxlee@gmail.com>
Authored: Tue Aug 2 19:32:35 2016 +0800
Committer: Reynold Xin <rxin@databricks.com>
Committed: Tue Aug 2 10:22:18 2016 -0700

----------------------------------------------------------------------
 .../catalyst/expressions/conditionalExpressions.scala  |  4 ++--
 .../expressions/ConditionalExpressionSuite.scala       | 13 +++++++++++++
 2 files changed, 15 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/f190bb83/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
index e97e089..5f2585f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
@@ -299,7 +299,7 @@ case class Least(children: Seq[Expression]) extends Expression {
     } else if (children.map(_.dataType).distinct.count(_ != NullType) > 1) {
       TypeCheckResult.TypeCheckFailure(
         s"The expressions should all have the same type," +
-          s" got LEAST (${children.map(_.dataType)}).")
+          s" got LEAST(${children.map(_.dataType.simpleString).mkString(", ")}).")
     } else {
       TypeUtils.checkForOrderingExpr(dataType, "function " + prettyName)
     }
@@ -359,7 +359,7 @@ case class Greatest(children: Seq[Expression]) extends Expression {
     } else if (children.map(_.dataType).distinct.count(_ != NullType) > 1) {
       TypeCheckResult.TypeCheckFailure(
         s"The expressions should all have the same type," +
-          s" got GREATEST (${children.map(_.dataType)}).")
+          s" got GREATEST(${children.map(_.dataType.simpleString).mkString(", ")}).")
     } else {
       TypeUtils.checkForOrderingExpr(dataType, "function " + prettyName)
     }

http://git-wip-us.apache.org/repos/asf/spark/blob/f190bb83/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ConditionalExpressionSuite.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ConditionalExpressionSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ConditionalExpressionSuite.scala
index 3c581ec..36185b8 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ConditionalExpressionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ConditionalExpressionSuite.scala
@@ -21,6 +21,7 @@ import java.sql.{Date, Timestamp}
 
 import org.apache.spark.SparkFunSuite
 import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.TypeCheckFailure
 import org.apache.spark.sql.catalyst.dsl.expressions._
 import org.apache.spark.sql.types._
 
@@ -181,6 +182,12 @@ class ConditionalExpressionSuite extends SparkFunSuite with ExpressionEvalHelper
         Literal(Timestamp.valueOf("2015-07-01 10:00:00")))),
       Timestamp.valueOf("2015-07-01 08:00:00"), InternalRow.empty)
 
+    // Type checking error
+    assert(
+      Least(Seq(Literal(1), Literal("1"))).checkInputDataTypes() ==
+        TypeCheckFailure("The expressions should all have the same type, " +
+          "got LEAST(int, string)."))
+
     DataTypeTestUtils.ordered.foreach { dt =>
       checkConsistencyBetweenInterpretedAndCodegen(Least, dt, 2)
     }
@@ -227,6 +234,12 @@ class ConditionalExpressionSuite extends SparkFunSuite with ExpressionEvalHelper
         Literal(Timestamp.valueOf("2015-07-01 10:00:00")))),
       Timestamp.valueOf("2015-07-01 10:00:00"), InternalRow.empty)
 
+    // Type checking error
+    assert(
+      Greatest(Seq(Literal(1), Literal("1"))).checkInputDataTypes() ==
+        TypeCheckFailure("The expressions should all have the same type, " +
+          "got GREATEST(int, string)."))
+
     DataTypeTestUtils.ordered.foreach { dt =>
       checkConsistencyBetweenInterpretedAndCodegen(Greatest, dt, 2)
     }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


Mime
View raw message