spark-reviews mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From GitBox <...@apache.org>
Subject [GitHub] [spark] dongjoon-hyun commented on a change in pull request #26034: [SPARK-29364][SQL] Return an interval from date subtract according to SQL standard
Date Sat, 05 Oct 2019 22:00:52 GMT
dongjoon-hyun commented on a change in pull request #26034: [SPARK-29364][SQL] Return an interval
from date subtract according to SQL standard
URL: https://github.com/apache/spark/pull/26034#discussion_r331763162
 
 

 ##########
 File path: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
 ##########
 @@ -1672,33 +1673,59 @@ case class TruncTimestamp(
 }
 
 /**
- * Returns the number of days from startDate to endDate.
+ * Returns the number of days from startDate to endDate or an interval between the dates.
  */
+// scalastyle:off line.size.limit line.contains.tab
 @ExpressionDescription(
-  usage = "_FUNC_(endDate, startDate) - Returns the number of days from `startDate` to `endDate`.",
+  usage = "_FUNC_(endDate, startDate) - Returns the number of days from `startDate` to `endDate`."
+
+    "When `spark.sql.ansi.enabled` is set to `true` and `spark.sql.dialect` is `Spark`, it
returns " +
+    "an interval between `startDate` (inclusive) and `endDate` (exclusive).",
   examples = """
     Examples:
       > SELECT _FUNC_('2009-07-31', '2009-07-30');
        1
-
       > SELECT _FUNC_('2009-07-30', '2009-07-31');
        -1
+      > SET spark.sql.ansi.enabled=true;
+      spark.sql.ansi.enabled	true
+      > SET spark.sql.dialect=Spark;
+      spark.sql.dialect	Spark
+      > select _FUNC_(date'tomorrow', date'yesterday');
+      interval 2 days
   """,
   since = "1.5.0")
+// scalastyle:on line.size.limit line.contains.tab
 case class DateDiff(endDate: Expression, startDate: Expression)
   extends BinaryExpression with ImplicitCastInputTypes {
 
   override def left: Expression = endDate
   override def right: Expression = startDate
   override def inputTypes: Seq[AbstractDataType] = Seq(DateType, DateType)
-  override def dataType: DataType = IntegerType
+  private val returnInterval: Boolean = {
+    val isSparkDialect = SQLConf.get.getConf(DIALECT) == Dialect.SPARK.toString()
+    SQLConf.get.ansiEnabled && isSparkDialect
 
 Review comment:
   Maybe, one-liner `SQLConf.get.ansiEnabled && SQLConf.get.getConf(DIALECT) == Dialect.POSTGRESQL.toString`
is enough. Please note that I suggested `Dialect.POSTGRESQL`.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


Mime
View raw message