phoenix-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jmaho...@apache.org
Subject phoenix git commit: PHOENIX-2426 phoenix-spark: Support for TINYINT and SMALLINT
Date Tue, 17 Nov 2015 19:26:31 GMT
Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.0 4a0ed8270 -> c48dc0c5a


PHOENIX-2426 phoenix-spark: Support for TINYINT and SMALLINT


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c48dc0c5
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c48dc0c5
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c48dc0c5

Branch: refs/heads/4.x-HBase-1.0
Commit: c48dc0c5a01d552545b232ab851253bdf152bbd1
Parents: 4a0ed82
Author: Josh Mahonin <jmahonin@gmail.com>
Authored: Tue Nov 17 14:24:10 2015 -0500
Committer: Josh Mahonin <jmahonin@gmail.com>
Committed: Tue Nov 17 14:26:17 2015 -0500

----------------------------------------------------------------------
 phoenix-spark/src/it/resources/setup.sql                      | 4 +++-
 .../it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala    | 7 +++++++
 .../src/main/scala/org/apache/phoenix/spark/PhoenixRDD.scala  | 3 ++-
 3 files changed, 12 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c48dc0c5/phoenix-spark/src/it/resources/setup.sql
----------------------------------------------------------------------
diff --git a/phoenix-spark/src/it/resources/setup.sql b/phoenix-spark/src/it/resources/setup.sql
index d6dbe20..814d311 100644
--- a/phoenix-spark/src/it/resources/setup.sql
+++ b/phoenix-spark/src/it/resources/setup.sql
@@ -36,4 +36,6 @@ CREATE TABLE OUTPUT_TEST_TABLE (id BIGINT NOT NULL PRIMARY KEY, col1 VARCHAR,
co
 CREATE TABLE CUSTOM_ENTITY."z02"(id BIGINT NOT NULL PRIMARY KEY)
 UPSERT INTO CUSTOM_ENTITY."z02" (id) VALUES(1)
 CREATE TABLE TEST_DECIMAL (ID BIGINT NOT NULL PRIMARY KEY, COL1 DECIMAL(9, 6))
-UPSERT INTO TEST_DECIMAL VALUES (1, 123.456789)
\ No newline at end of file
+UPSERT INTO TEST_DECIMAL VALUES (1, 123.456789)
+CREATE TABLE TEST_SMALL_TINY (ID BIGINT NOT NULL PRIMARY KEY, COL1 SMALLINT, COL2 TINYINT)
+UPSERT INTO TEST_SMALL_TINY VALUES (1, 32767, 127)

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c48dc0c5/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
----------------------------------------------------------------------
diff --git a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
index 31104ba..5ae17da 100644
--- a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
+++ b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
@@ -516,4 +516,11 @@ class PhoenixSparkIT extends FunSuite with Matchers with BeforeAndAfterAll
{
     val df = sqlContext.load("org.apache.phoenix.spark", Map("table" -> "TEST_DECIMAL",
"zkUrl" -> quorumAddress))
     assert(df.select("COL1").first().getDecimal(0) == BigDecimal("123.456789").bigDecimal)
   }
+
+  test("Can load small and tiny integeger types (PHOENIX-2426)") {
+    val sqlContext = new SQLContext(sc)
+    val df = sqlContext.load("org.apache.phoenix.spark", Map("table" -> "TEST_SMALL_TINY",
"zkUrl" -> quorumAddress))
+    assert(df.select("COL1").first().getShort(0).toInt == 32767)
+    assert(df.select("COL2").first().getByte(0).toInt == 127)
+  }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c48dc0c5/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRDD.scala
----------------------------------------------------------------------
diff --git a/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRDD.scala b/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRDD.scala
index ac60ceb..fa36a1f 100644
--- a/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRDD.scala
+++ b/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRDD.scala
@@ -23,7 +23,6 @@ import org.apache.phoenix.util.ColumnInfo
 import org.apache.spark._
 import org.apache.spark.annotation.DeveloperApi
 import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.types.{DataType, StructField, StructType}
 import org.apache.spark.sql.{Row, DataFrame, SQLContext}
 import org.apache.spark.sql.types._
 import scala.collection.JavaConverters._
@@ -133,6 +132,8 @@ class PhoenixRDD(sc: SparkContext, table: String, columns: Seq[String],
     case t if t.isInstanceOf[PVarchar] || t.isInstanceOf[PChar] => StringType
     case t if t.isInstanceOf[PLong] || t.isInstanceOf[PUnsignedLong] => LongType
     case t if t.isInstanceOf[PInteger] || t.isInstanceOf[PUnsignedInt] => IntegerType
+    case t if t.isInstanceOf[PSmallint] || t.isInstanceOf[PUnsignedSmallint] => ShortType
+    case t if t.isInstanceOf[PTinyint] || t.isInstanceOf[PUnsignedTinyint] => ByteType
     case t if t.isInstanceOf[PFloat] || t.isInstanceOf[PUnsignedFloat] => FloatType
     case t if t.isInstanceOf[PDouble] || t.isInstanceOf[PUnsignedDouble] => DoubleType
     // Use Spark system default precision for now (explicit to work with < 1.5)


Mime
View raw message