spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From lix...@apache.org
Subject spark git commit: [SPARK-20555][SQL] Fix mapping of Oracle DECIMAL types to Spark types in read path
Date Sat, 24 Jun 2017 05:09:55 GMT
Repository: spark
Updated Branches:
  refs/heads/branch-2.1 bcaf06c49 -> f12883e32


[SPARK-20555][SQL] Fix mapping of Oracle DECIMAL types to Spark types in read path

This PR is to revert some code changes in the read path of https://github.com/apache/spark/pull/14377.
The original fix is https://github.com/apache/spark/pull/17830

When merging this PR, please give the credit to gaborfeher

Added a test case to OracleIntegrationSuite.scala

Author: Gabor Feher <gabor.feher@lynxanalytics.com>
Author: gatorsmile <gatorsmile@gmail.com>

Closes #18408 from gatorsmile/OracleType.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f12883e3
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f12883e3
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f12883e3

Branch: refs/heads/branch-2.1
Commit: f12883e3232c50a01fc20e0520bb0f4099d2c79a
Parents: bcaf06c
Author: Gabor Feher <gabor.feher@lynxanalytics.com>
Authored: Fri Jun 23 21:53:38 2017 -0700
Committer: gatorsmile <gatorsmile@gmail.com>
Committed: Fri Jun 23 22:08:43 2017 -0700

----------------------------------------------------------------------
 .../spark/sql/jdbc/OracleIntegrationSuite.scala | 61 ++++++++++++++------
 .../apache/spark/sql/jdbc/OracleDialect.scala   |  4 --
 2 files changed, 42 insertions(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/f12883e3/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala
----------------------------------------------------------------------
diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala
b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala
index 8c880f3..e111e17 100644
--- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala
+++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/OracleIntegrationSuite.scala
@@ -19,6 +19,7 @@ package org.apache.spark.sql.jdbc
 
 import java.sql.{Connection, Date, Timestamp}
 import java.util.Properties
+import java.math.BigDecimal
 
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.test.SharedSQLContext
@@ -62,8 +63,29 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with SharedSQLCo
   }
 
   override def dataPreparation(conn: Connection): Unit = {
+    conn.prepareStatement("CREATE TABLE numerics (b DECIMAL(1), f DECIMAL(3, 2), i DECIMAL(10))").executeUpdate();
+    conn.prepareStatement(
+      "INSERT INTO numerics VALUES (4, 1.23, 9999999999)").executeUpdate();
+    conn.commit();
   }
 
+
+  test("SPARK-16625 : Importing Oracle numeric types") { 
+    val df = sqlContext.read.jdbc(jdbcUrl, "numerics", new Properties);
+    val rows = df.collect()
+    assert(rows.size == 1)
+    val row = rows(0)
+    // The main point of the below assertions is not to make sure that these Oracle types
are
+    // mapped to decimal types, but to make sure that the returned values are correct.
+    // A value > 1 from DECIMAL(1) is correct:
+    assert(row.getDecimal(0).compareTo(BigDecimal.valueOf(4)) == 0)
+    // A value with fractions from DECIMAL(3, 2) is correct:
+    assert(row.getDecimal(1).compareTo(BigDecimal.valueOf(1.23)) == 0)
+    // A value > Int.MaxValue from DECIMAL(10) is correct:
+    assert(row.getDecimal(2).compareTo(BigDecimal.valueOf(9999999999l)) == 0)
+  }
+
+
   test("SPARK-12941: String datatypes to be mapped to Varchar in Oracle") {
     // create a sample dataframe with string type
     val df1 = sparkContext.parallelize(Seq(("foo"))).toDF("x")
@@ -123,27 +145,28 @@ class OracleIntegrationSuite extends DockerJDBCIntegrationSuite with
SharedSQLCo
     val dfRead = spark.read.jdbc(jdbcUrl, tableName, props)
     val rows = dfRead.collect()
     // verify the data type is inserted
-    val types = rows(0).toSeq.map(x => x.getClass.toString)
-    assert(types(0).equals("class java.lang.Boolean"))
-    assert(types(1).equals("class java.lang.Integer"))
-    assert(types(2).equals("class java.lang.Long"))
-    assert(types(3).equals("class java.lang.Float"))
-    assert(types(4).equals("class java.lang.Float"))
-    assert(types(5).equals("class java.lang.Integer"))
-    assert(types(6).equals("class java.lang.Integer"))
-    assert(types(7).equals("class java.lang.String"))
-    assert(types(8).equals("class [B"))
-    assert(types(9).equals("class java.sql.Date"))
-    assert(types(10).equals("class java.sql.Timestamp"))
+    val types = dfRead.schema.map(field => field.dataType)
+    assert(types(0).equals(DecimalType(1, 0)))
+    assert(types(1).equals(DecimalType(10, 0)))
+    assert(types(2).equals(DecimalType(19, 0)))
+    assert(types(3).equals(DecimalType(19, 4)))
+    assert(types(4).equals(DecimalType(19, 4)))
+    assert(types(5).equals(DecimalType(3, 0)))
+    assert(types(6).equals(DecimalType(5, 0)))
+    assert(types(7).equals(StringType))
+    assert(types(8).equals(BinaryType))
+    assert(types(9).equals(DateType))
+    assert(types(10).equals(TimestampType))
+
     // verify the value is the inserted correct or not
     val values = rows(0)
-    assert(values.getBoolean(0).equals(booleanVal))
-    assert(values.getInt(1).equals(integerVal))
-    assert(values.getLong(2).equals(longVal))
-    assert(values.getFloat(3).equals(floatVal))
-    assert(values.getFloat(4).equals(doubleVal.toFloat))
-    assert(values.getInt(5).equals(byteVal.toInt))
-    assert(values.getInt(6).equals(shortVal.toInt))
+    assert(values.getDecimal(0).compareTo(BigDecimal.valueOf(1)) == 0)
+    assert(values.getDecimal(1).compareTo(BigDecimal.valueOf(integerVal)) == 0)
+    assert(values.getDecimal(2).compareTo(BigDecimal.valueOf(longVal)) == 0)
+    assert(values.getDecimal(3).compareTo(BigDecimal.valueOf(floatVal)) == 0)
+    assert(values.getDecimal(4).compareTo(BigDecimal.valueOf(doubleVal)) == 0)
+    assert(values.getDecimal(5).compareTo(BigDecimal.valueOf(byteVal)) == 0)
+    assert(values.getDecimal(6).compareTo(BigDecimal.valueOf(shortVal)) == 0)
     assert(values.getString(7).equals(stringVal))
     assert(values.getAs[Array[Byte]](8).mkString.equals("678"))
     assert(values.getDate(9).equals(dateVal))

http://git-wip-us.apache.org/repos/asf/spark/blob/f12883e3/sql/core/src/main/scala/org/apache/spark/sql/jdbc/OracleDialect.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/OracleDialect.scala b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/OracleDialect.scala
index f541996..20e634c 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/OracleDialect.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/OracleDialect.scala
@@ -43,10 +43,6 @@ private case object OracleDialect extends JdbcDialect {
         // Not sure if there is a more robust way to identify the field as a float (or other
         // numeric types that do not specify a scale.
         case _ if scale == -127L => Option(DecimalType(DecimalType.MAX_PRECISION, 10))
-        case 1 => Option(BooleanType)
-        case 3 | 5 | 10 => Option(IntegerType)
-        case 19 if scale == 0L => Option(LongType)
-        case 19 if scale == 4L => Option(FloatType)
         case _ => None
       }
     } else {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


Mime
View raw message