phoenix-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tdsi...@apache.org
Subject [15/42] phoenix git commit: PHOENIX-3540 Fix Time data type in Phoenix Spark integration
Date Fri, 23 Dec 2016 01:27:49 GMT
PHOENIX-3540 Fix Time data type in Phoenix Spark integration


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/5706f514
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/5706f514
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/5706f514

Branch: refs/heads/encodecolumns2
Commit: 5706f514416079e9afda7c59474492a74c704473
Parents: f11d501
Author: Ankit Singhal <ankitsinghal59@gmail.com>
Authored: Thu Dec 22 13:18:35 2016 +0530
Committer: Ankit Singhal <ankitsinghal59@gmail.com>
Committed: Thu Dec 22 13:18:35 2016 +0530

----------------------------------------------------------------------
 phoenix-spark/src/it/resources/globalSetup.sql       |  2 ++
 .../org/apache/phoenix/spark/PhoenixSparkIT.scala    | 12 ++++++++++++
 .../scala/org/apache/phoenix/spark/PhoenixRDD.scala  | 15 ++++++++-------
 3 files changed, 22 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/5706f514/phoenix-spark/src/it/resources/globalSetup.sql
----------------------------------------------------------------------
diff --git a/phoenix-spark/src/it/resources/globalSetup.sql b/phoenix-spark/src/it/resources/globalSetup.sql
index 852687e..72f8620 100644
--- a/phoenix-spark/src/it/resources/globalSetup.sql
+++ b/phoenix-spark/src/it/resources/globalSetup.sql
@@ -48,6 +48,8 @@ CREATE TABLE TEST_SMALL_TINY (ID BIGINT NOT NULL PRIMARY KEY, COL1 SMALLINT,
COL
 UPSERT INTO TEST_SMALL_TINY VALUES (1, 32767, 127)
 CREATE TABLE DATE_TEST(ID BIGINT NOT NULL PRIMARY KEY, COL1 DATE)
 UPSERT INTO DATE_TEST VALUES(1, CURRENT_DATE())
+CREATE TABLE TIME_TEST(ID BIGINT NOT NULL PRIMARY KEY, COL1 TIME)
+UPSERT INTO TIME_TEST VALUES(1, CURRENT_TIME())
 CREATE TABLE "space" ("key" VARCHAR PRIMARY KEY, "first name" VARCHAR)
 UPSERT INTO "space" VALUES ('key1', 'xyz')
 CREATE TABLE "small" ("key" VARCHAR PRIMARY KEY, "first name" VARCHAR, "salary" INTEGER )

http://git-wip-us.apache.org/repos/asf/phoenix/blob/5706f514/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
----------------------------------------------------------------------
diff --git a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
index 90822d4..fc8483d 100644
--- a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
+++ b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
@@ -620,4 +620,16 @@ class PhoenixSparkIT extends AbstractPhoenixSparkIT {
 
     assert(Math.abs(epoch - ts) < 300000)
   }
+
+  test("Can load Phoenix Time columns through DataFrame API") {
+    val sqlContext = new SQLContext(sc)
+    val df = sqlContext.read
+      .format("org.apache.phoenix.spark")
+      .options(Map("table" -> "TIME_TEST", "zkUrl" -> quorumAddress))
+      .load
+    val time = df.select("COL1").first().getTimestamp(0).getTime
+    val epoch = new Date().getTime
+    assert(Math.abs(epoch - time) < 86400000)
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/5706f514/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRDD.scala
----------------------------------------------------------------------
diff --git a/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRDD.scala b/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRDD.scala
index 505de1b..204a7ef 100644
--- a/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRDD.scala
+++ b/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRDD.scala
@@ -130,13 +130,14 @@ class PhoenixRDD(sc: SparkContext, table: String, columns: Seq[String],
       val rowSeq = columns.map { case (name, sqlType) =>
         val res = pr.resultMap(name)
 
-        // Special handling for data types
-        if(dateAsTimestamp && sqlType == 91) { // 91 is the defined type for Date
-          new java.sql.Timestamp(res.asInstanceOf[java.sql.Date].getTime)
-        }
-        else {
-          res
-        }
+          // Special handling for data types
+          if (dateAsTimestamp && sqlType == 91) { // 91 is the defined type for Date
+            new java.sql.Timestamp(res.asInstanceOf[java.sql.Date].getTime)
+          } else if (sqlType == 92) { // 92 is the defined type for Time
+            new java.sql.Timestamp(res.asInstanceOf[java.sql.Time].getTime)
+          } else {
+            res
+          }
       }
 
       // Create a Spark Row from the sequence


Mime
View raw message