spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sro...@apache.org
Subject spark git commit: [SPARK-15445][SQL] Build fails for java 1.7 after adding java.mathBigInteger support
Date Sat, 21 May 2016 11:40:01 GMT
Repository: spark
Updated Branches:
  refs/heads/branch-2.0 c0cc921a5 -> a93f04dd4


[SPARK-15445][SQL] Build fails for java 1.7 after adding java.mathBigInteger support

## What changes were proposed in this pull request?
Using longValue() and then checking whether the value is in the range for a long manually.

## How was this patch tested?
Existing tests

Author: Sandeep Singh <sandeep@techaddict.me>

Closes #13223 from techaddict/SPARK-15445.

(cherry picked from commit 666bf2e8357cb9a61eaa2270c414e635d9c331ce)
Signed-off-by: Sean Owen <sowen@cloudera.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/a93f04dd
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/a93f04dd
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/a93f04dd

Branch: refs/heads/branch-2.0
Commit: a93f04dd43669ddac3ab0b298036b1a5e8e263f1
Parents: c0cc921
Author: Sandeep Singh <sandeep@techaddict.me>
Authored: Sat May 21 06:39:47 2016 -0500
Committer: Sean Owen <sowen@cloudera.com>
Committed: Sat May 21 06:39:54 2016 -0500

----------------------------------------------------------------------
 .../org/apache/spark/sql/types/Decimal.scala    | 24 +++++++++++---------
 1 file changed, 13 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/a93f04dd/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
index b907f62..31604ba 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.sql.types
 
+import java.lang.{Long => JLong}
 import java.math.{BigInteger, MathContext, RoundingMode}
 
 import org.apache.spark.annotation.DeveloperApi
@@ -132,17 +133,15 @@ final class Decimal extends Ordered[Decimal] with Serializable {
    * Set this Decimal to the given BigInteger value. Will have precision 38 and scale 0.
    */
   def set(bigintval: BigInteger): Decimal = {
-    try {
-      this.decimalVal = null
-      this.longVal = bigintval.longValueExact()
-      this._precision = DecimalType.MAX_PRECISION
-      this._scale = 0
-      this
-    }
-    catch {
-      case e: ArithmeticException =>
-        throw new IllegalArgumentException(s"BigInteger ${bigintval} too large for decimal")
-     }
+    // TODO: Remove this once we migrate to java8 and use longValueExact() instead.
+    require(
+      bigintval.compareTo(LONG_MAX_BIG_INT) <= 0 && bigintval.compareTo(LONG_MIN_BIG_INT)
>= 0,
+      s"BigInteger $bigintval too large for decimal")
+    this.decimalVal = null
+    this.longVal = bigintval.longValue()
+    this._precision = DecimalType.MAX_PRECISION
+    this._scale = 0
+    this
   }
 
   /**
@@ -382,6 +381,9 @@ object Decimal {
   private[sql] val ZERO = Decimal(0)
   private[sql] val ONE = Decimal(1)
 
+  private val LONG_MAX_BIG_INT = BigInteger.valueOf(JLong.MAX_VALUE)
+  private val LONG_MIN_BIG_INT = BigInteger.valueOf(JLong.MIN_VALUE)
+
   def apply(value: Double): Decimal = new Decimal().set(value)
 
   def apply(value: Long): Decimal = new Decimal().set(value)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


Mime
View raw message