spark-reviews mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From GitBox <...@apache.org>
Subject [GitHub] [spark] cloud-fan commented on a change in pull request #27617: [SPARK-30865][SQL] Refactor DateTimeUtils
Date Wed, 26 Feb 2020 06:59:33 GMT
cloud-fan commented on a change in pull request #27617: [SPARK-30865][SQL] Refactor DateTimeUtils
URL: https://github.com/apache/spark/pull/27617#discussion_r384303953
 
 

 ##########
 File path: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
 ##########
 @@ -39,84 +39,75 @@ import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String}
  */
 object DateTimeUtils {
 
-  // we use Int and Long internally to represent [[DateType]] and [[TimestampType]]
-  type SQLDate = Int
-  type SQLTimestamp = Long
-
   // see http://stackoverflow.com/questions/466321/convert-unix-timestamp-to-julian
   // it's 2440587.5, rounding up to compatible with Hive
   final val JULIAN_DAY_OF_EPOCH = 2440588
 
-  final val TimeZoneGMT = TimeZone.getTimeZone("GMT")
-  final val TimeZoneUTC = TimeZone.getTimeZone("UTC")
-
   val TIMEZONE_OPTION = "timeZone"
 
-  def defaultTimeZone(): TimeZone = TimeZone.getDefault()
-
   def getZoneId(timeZoneId: String): ZoneId = ZoneId.of(timeZoneId, ZoneId.SHORT_IDS)
   def getTimeZone(timeZoneId: String): TimeZone = {
     TimeZone.getTimeZone(getZoneId(timeZoneId))
   }
 
-  def microsToDays(timestamp: SQLTimestamp): SQLDate = {
-    microsToDays(timestamp, defaultTimeZone().toZoneId)
+  def microsToDays(timestamp: Long): Int = {
+    microsToDays(timestamp, ZoneId.systemDefault())
   }
 
-  def microsToDays(timestamp: SQLTimestamp, zoneId: ZoneId): SQLDate = {
+  def microsToDays(timestamp: Long, zoneId: ZoneId): Int = {
     val instant = microsToInstant(timestamp)
     localDateToDays(LocalDateTime.ofInstant(instant, zoneId).toLocalDate)
   }
 
-  def daysToMicros(days: SQLDate): SQLTimestamp = {
-    daysToMicros(days, defaultTimeZone().toZoneId)
+  def daysToMicros(days: Int): Long = {
+    daysToMicros(days, ZoneId.systemDefault())
   }
 
-  def daysToMicros(days: SQLDate, zoneId: ZoneId): SQLTimestamp = {
+  def daysToMicros(days: Int, zoneId: ZoneId): Long = {
     val instant = daysToLocalDate(days).atStartOfDay(zoneId).toInstant
     instantToMicros(instant)
   }
 
-  // Converts Timestamp to string according to Hive TimestampWritable convention.
-  def timestampToString(tf: TimestampFormatter, us: SQLTimestamp): String = {
-    tf.format(us)
+  // Converts the `micros` timestamp to string according to Hive TimestampWritable convention.
+  def timestampToString(tf: TimestampFormatter, micros: Long): String = {
+    tf.format(micros)
   }
 
   /**
    * Returns the number of days since epoch from java.sql.Date.
    */
-  def fromJavaDate(date: Date): SQLDate = {
+  def fromJavaDate(date: Date): Int = {
     microsToDays(millisToMicros(date.getTime))
   }
 
   /**
    * Returns a java.sql.Date from number of days since epoch.
    */
-  def toJavaDate(daysSinceEpoch: SQLDate): Date = {
+  def toJavaDate(daysSinceEpoch: Int): Date = {
 
 Review comment:
   `days` for consistency?

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


Mime
View raw message