spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dav...@apache.org
Subject spark git commit: [SQL] address comments for to_date/trunc
Date Fri, 31 Jul 2015 18:07:39 GMT
Repository: spark
Updated Branches:
  refs/heads/master 27ae851ce -> 0024da915


[SQL] address comments for to_date/trunc

This PR address the comments in #7805

cc rxin

Author: Davies Liu <davies@databricks.com>

Closes #7817 from davies/trunc and squashes the following commits:

f729d5f [Davies Liu] rollback
cb7f7832 [Davies Liu] genCode() is protected
31e52ef [Davies Liu] fix style
ed1edc7 [Davies Liu] address comments for #7805


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/0024da91
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/0024da91
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/0024da91

Branch: refs/heads/master
Commit: 0024da9157ba12ec84883a78441fa6835c1d0042
Parents: 27ae851
Author: Davies Liu <davies@databricks.com>
Authored: Fri Jul 31 11:07:34 2015 -0700
Committer: Davies Liu <davies.liu@gmail.com>
Committed: Fri Jul 31 11:07:34 2015 -0700

----------------------------------------------------------------------
 .../sql/catalyst/expressions/datetimeFunctions.scala | 15 ++++++++-------
 .../spark/sql/catalyst/util/DateTimeUtils.scala      |  3 ++-
 .../catalyst/expressions/ExpressionEvalHelper.scala  |  4 +---
 .../main/scala/org/apache/spark/sql/functions.scala  |  3 +++
 4 files changed, 14 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/0024da91/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala
index 6e76133..07dea5b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala
@@ -726,15 +726,16 @@ case class TruncDate(date: Expression, format: Expression)
   override def dataType: DataType = DateType
   override def prettyName: String = "trunc"
 
-  lazy val minItemConst = DateTimeUtils.parseTruncLevel(format.eval().asInstanceOf[UTF8String])
+  private lazy val truncLevel: Int =
+    DateTimeUtils.parseTruncLevel(format.eval().asInstanceOf[UTF8String])
 
   override def eval(input: InternalRow): Any = {
-    val minItem = if (format.foldable) {
-      minItemConst
+    val level = if (format.foldable) {
+      truncLevel
     } else {
       DateTimeUtils.parseTruncLevel(format.eval().asInstanceOf[UTF8String])
     }
-    if (minItem == -1) {
+    if (level == -1) {
       // unknown format
       null
     } else {
@@ -742,7 +743,7 @@ case class TruncDate(date: Expression, format: Expression)
       if (d == null) {
         null
       } else {
-        DateTimeUtils.truncDate(d.asInstanceOf[Int], minItem)
+        DateTimeUtils.truncDate(d.asInstanceOf[Int], level)
       }
     }
   }
@@ -751,7 +752,7 @@ case class TruncDate(date: Expression, format: Expression)
     val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
 
     if (format.foldable) {
-      if (minItemConst == -1) {
+      if (truncLevel == -1) {
         s"""
           boolean ${ev.isNull} = true;
           ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
@@ -763,7 +764,7 @@ case class TruncDate(date: Expression, format: Expression)
           boolean ${ev.isNull} = ${d.isNull};
           ${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
           if (!${ev.isNull}) {
-            ${ev.primitive} = $dtu.truncDate(${d.primitive}, $minItemConst);
+            ${ev.primitive} = $dtu.truncDate(${d.primitive}, $truncLevel);
           }
         """
       }

http://git-wip-us.apache.org/repos/asf/spark/blob/0024da91/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
index 5a7c25b..032ed8a 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
@@ -794,7 +794,8 @@ object DateTimeUtils {
     } else if (level == TRUNC_TO_MONTH) {
       d - DateTimeUtils.getDayOfMonth(d) + 1
     } else {
-      throw new Exception(s"Invalid trunc level: $level")
+      // caller make sure that this should never be reached
+      sys.error(s"Invalid trunc level: $level")
     }
   }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/0024da91/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
index 3c05e5c..a41185b 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
@@ -18,11 +18,9 @@
 package org.apache.spark.sql.catalyst.expressions
 
 import org.scalactic.TripleEqualsSupport.Spread
-import org.scalatest.Matchers._
 
 import org.apache.spark.SparkFunSuite
-import org.apache.spark.sql.catalyst.InternalRow
-import org.apache.spark.sql.catalyst.CatalystTypeConverters
+import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
 import org.apache.spark.sql.catalyst.expressions.codegen._
 import org.apache.spark.sql.catalyst.optimizer.DefaultOptimizer
 import org.apache.spark.sql.catalyst.plans.logical.{OneRowRelation, Project}

http://git-wip-us.apache.org/repos/asf/spark/blob/0024da91/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
index 46dc460..5d82a5e 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
@@ -2192,6 +2192,9 @@ object functions {
   /**
    * Returns date truncated to the unit specified by the format.
    *
+   * @param format: 'year', 'yyyy', 'yy' for truncate by year,
+   *               or 'month', 'mon', 'mm' for truncate by month
+   *
    * @group datetime_funcs
    * @since 1.5.0
    */


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


Mime
View raw message