spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From marmb...@apache.org
Subject [1/2] [SPARK-2179][SQL] A minor refactoring Java data type APIs (2179 follow-up).
Date Fri, 01 Aug 2014 18:15:00 GMT
Repository: spark
Updated Branches:
  refs/heads/master 8d338f64c -> c41fdf04f


http://git-wip-us.apache.org/repos/asf/spark/blob/c41fdf04/sql/core/src/main/scala/org/apache/spark/sql/types/util/DataTypeConversions.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/types/util/DataTypeConversions.scala
b/sql/core/src/main/scala/org/apache/spark/sql/types/util/DataTypeConversions.scala
index d1aa3c8..77353f4 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/types/util/DataTypeConversions.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/types/util/DataTypeConversions.scala
@@ -18,7 +18,7 @@
 package org.apache.spark.sql.types.util
 
 import org.apache.spark.sql._
-import org.apache.spark.sql.api.java.types.{DataType => JDataType, StructField => JStructField}
+import org.apache.spark.sql.api.java.{DataType => JDataType, StructField => JStructField}
 
 import scala.collection.JavaConverters._
 
@@ -74,37 +74,37 @@ protected[sql] object DataTypeConversions {
    * Returns the equivalent DataType in Scala for the given DataType in Java.
    */
   def asScalaDataType(javaDataType: JDataType): DataType = javaDataType match {
-    case stringType: org.apache.spark.sql.api.java.types.StringType =>
+    case stringType: org.apache.spark.sql.api.java.StringType =>
       StringType
-    case binaryType: org.apache.spark.sql.api.java.types.BinaryType =>
+    case binaryType: org.apache.spark.sql.api.java.BinaryType =>
       BinaryType
-    case booleanType: org.apache.spark.sql.api.java.types.BooleanType =>
+    case booleanType: org.apache.spark.sql.api.java.BooleanType =>
       BooleanType
-    case timestampType: org.apache.spark.sql.api.java.types.TimestampType =>
+    case timestampType: org.apache.spark.sql.api.java.TimestampType =>
       TimestampType
-    case decimalType: org.apache.spark.sql.api.java.types.DecimalType =>
+    case decimalType: org.apache.spark.sql.api.java.DecimalType =>
       DecimalType
-    case doubleType: org.apache.spark.sql.api.java.types.DoubleType =>
+    case doubleType: org.apache.spark.sql.api.java.DoubleType =>
       DoubleType
-    case floatType: org.apache.spark.sql.api.java.types.FloatType =>
+    case floatType: org.apache.spark.sql.api.java.FloatType =>
       FloatType
-    case byteType: org.apache.spark.sql.api.java.types.ByteType =>
+    case byteType: org.apache.spark.sql.api.java.ByteType =>
       ByteType
-    case integerType: org.apache.spark.sql.api.java.types.IntegerType =>
+    case integerType: org.apache.spark.sql.api.java.IntegerType =>
       IntegerType
-    case longType: org.apache.spark.sql.api.java.types.LongType =>
+    case longType: org.apache.spark.sql.api.java.LongType =>
       LongType
-    case shortType: org.apache.spark.sql.api.java.types.ShortType =>
+    case shortType: org.apache.spark.sql.api.java.ShortType =>
       ShortType
 
-    case arrayType: org.apache.spark.sql.api.java.types.ArrayType =>
+    case arrayType: org.apache.spark.sql.api.java.ArrayType =>
       ArrayType(asScalaDataType(arrayType.getElementType), arrayType.isContainsNull)
-    case mapType: org.apache.spark.sql.api.java.types.MapType =>
+    case mapType: org.apache.spark.sql.api.java.MapType =>
       MapType(
         asScalaDataType(mapType.getKeyType),
         asScalaDataType(mapType.getValueType),
         mapType.isValueContainsNull)
-    case structType: org.apache.spark.sql.api.java.types.StructType =>
+    case structType: org.apache.spark.sql.api.java.StructType =>
       StructType(structType.getFields.map(asScalaStructField))
   }
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/c41fdf04/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaApplySchemaSuite.java
----------------------------------------------------------------------
diff --git a/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaApplySchemaSuite.java
b/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaApplySchemaSuite.java
index 8ee4591..3c92906 100644
--- a/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaApplySchemaSuite.java
+++ b/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaApplySchemaSuite.java
@@ -28,9 +28,6 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import org.apache.spark.sql.api.java.types.DataType;
-import org.apache.spark.sql.api.java.types.StructField;
-import org.apache.spark.sql.api.java.types.StructType;
 import org.apache.spark.api.java.JavaRDD;
 import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.spark.api.java.function.Function;

http://git-wip-us.apache.org/repos/asf/spark/blob/c41fdf04/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaSideDataTypeConversionSuite.java
----------------------------------------------------------------------
diff --git a/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaSideDataTypeConversionSuite.java
b/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaSideDataTypeConversionSuite.java
index 96a5039..d099a48 100644
--- a/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaSideDataTypeConversionSuite.java
+++ b/sql/core/src/test/java/org/apache/spark/sql/api/java/JavaSideDataTypeConversionSuite.java
@@ -24,8 +24,6 @@ import org.junit.Assert;
 import org.junit.Test;
 
 import org.apache.spark.sql.types.util.DataTypeConversions;
-import org.apache.spark.sql.api.java.types.DataType;
-import org.apache.spark.sql.api.java.types.StructField;
 
 public class JavaSideDataTypeConversionSuite {
   public void checkDataType(DataType javaDataType) {

http://git-wip-us.apache.org/repos/asf/spark/blob/c41fdf04/sql/core/src/test/scala/org/apache/spark/sql/api/java/ScalaSideDataTypeConversionSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/api/java/ScalaSideDataTypeConversionSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/api/java/ScalaSideDataTypeConversionSuite.scala
index 46de6fe..ff1debf 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/api/java/ScalaSideDataTypeConversionSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/api/java/ScalaSideDataTypeConversionSuite.scala
@@ -20,12 +20,13 @@ package org.apache.spark.sql.api.java
 import org.apache.spark.sql.types.util.DataTypeConversions
 import org.scalatest.FunSuite
 
-import org.apache.spark.sql._
+import org.apache.spark.sql.{DataType => SDataType, StructField => SStructField}
+import org.apache.spark.sql.{StructType => SStructType}
 import DataTypeConversions._
 
 class ScalaSideDataTypeConversionSuite extends FunSuite {
 
-  def checkDataType(scalaDataType: DataType) {
+  def checkDataType(scalaDataType: SDataType) {
     val javaDataType = asJavaDataType(scalaDataType)
     val actual = asScalaDataType(javaDataType)
     assert(scalaDataType === actual, s"Converted data type ${actual} " +
@@ -34,48 +35,52 @@ class ScalaSideDataTypeConversionSuite extends FunSuite {
 
   test("convert data types") {
     // Simple DataTypes.
-    checkDataType(StringType)
-    checkDataType(BinaryType)
-    checkDataType(BooleanType)
-    checkDataType(TimestampType)
-    checkDataType(DecimalType)
-    checkDataType(DoubleType)
-    checkDataType(FloatType)
-    checkDataType(ByteType)
-    checkDataType(IntegerType)
-    checkDataType(LongType)
-    checkDataType(ShortType)
+    checkDataType(org.apache.spark.sql.StringType)
+    checkDataType(org.apache.spark.sql.BinaryType)
+    checkDataType(org.apache.spark.sql.BooleanType)
+    checkDataType(org.apache.spark.sql.TimestampType)
+    checkDataType(org.apache.spark.sql.DecimalType)
+    checkDataType(org.apache.spark.sql.DoubleType)
+    checkDataType(org.apache.spark.sql.FloatType)
+    checkDataType(org.apache.spark.sql.ByteType)
+    checkDataType(org.apache.spark.sql.IntegerType)
+    checkDataType(org.apache.spark.sql.LongType)
+    checkDataType(org.apache.spark.sql.ShortType)
 
     // Simple ArrayType.
-    val simpleScalaArrayType = ArrayType(StringType, true)
+    val simpleScalaArrayType =
+      org.apache.spark.sql.ArrayType(org.apache.spark.sql.StringType, true)
     checkDataType(simpleScalaArrayType)
 
     // Simple MapType.
-    val simpleScalaMapType = MapType(StringType, LongType)
+    val simpleScalaMapType =
+      org.apache.spark.sql.MapType(org.apache.spark.sql.StringType, org.apache.spark.sql.LongType)
     checkDataType(simpleScalaMapType)
 
     // Simple StructType.
-    val simpleScalaStructType = StructType(
-      StructField("a", DecimalType, false) ::
-      StructField("b", BooleanType, true) ::
-      StructField("c", LongType, true) ::
-      StructField("d", BinaryType, false) :: Nil)
+    val simpleScalaStructType = SStructType(
+      SStructField("a", org.apache.spark.sql.DecimalType, false) ::
+      SStructField("b", org.apache.spark.sql.BooleanType, true) ::
+      SStructField("c", org.apache.spark.sql.LongType, true) ::
+      SStructField("d", org.apache.spark.sql.BinaryType, false) :: Nil)
     checkDataType(simpleScalaStructType)
 
     // Complex StructType.
-    val complexScalaStructType = StructType(
-      StructField("simpleArray", simpleScalaArrayType, true) ::
-      StructField("simpleMap", simpleScalaMapType, true) ::
-      StructField("simpleStruct", simpleScalaStructType, true) ::
-      StructField("boolean", BooleanType, false) :: Nil)
+    val complexScalaStructType = SStructType(
+      SStructField("simpleArray", simpleScalaArrayType, true) ::
+      SStructField("simpleMap", simpleScalaMapType, true) ::
+      SStructField("simpleStruct", simpleScalaStructType, true) ::
+      SStructField("boolean", org.apache.spark.sql.BooleanType, false) :: Nil)
     checkDataType(complexScalaStructType)
 
     // Complex ArrayType.
-    val complexScalaArrayType = ArrayType(complexScalaStructType, true)
+    val complexScalaArrayType =
+      org.apache.spark.sql.ArrayType(complexScalaStructType, true)
     checkDataType(complexScalaArrayType)
 
     // Complex MapType.
-    val complexScalaMapType = MapType(complexScalaStructType, complexScalaArrayType, false)
+    val complexScalaMapType =
+      org.apache.spark.sql.MapType(complexScalaStructType, complexScalaArrayType, false)
     checkDataType(complexScalaMapType)
   }
 }


Mime
View raw message