carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From chenliang...@apache.org
Subject [1/3] incubator-carbondata git commit: remove redundant declaration
Date Tue, 27 Dec 2016 15:41:30 GMT
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master e7b46ccf0 -> a011aafb0


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/6fee9930/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetadataUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetadataUtil.scala
b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetadataUtil.scala
index 86c55d3..c278cd3 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetadataUtil.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetadataUtil.scala
@@ -40,10 +40,7 @@ object CarbonHiveMetadataUtil {
   def invalidateAndDropTable(schemaName: String,
       cubeName: String,
       sparkSession: SparkSession): Unit = {
-    val tableWithDb = schemaName + "." + cubeName
-    val tableIdent = sparkSession.sessionState.sqlParser.parseTableIdentifier(tableWithDb)
     try {
-      // todo(wf): in spark no invalidate method now
       sparkSession.sql(s"DROP TABLE IF EXISTS $schemaName.$cubeName")
     } catch {
       case e: Exception =>

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/6fee9930/integration/spark2/src/main/scala/org/apache/spark/sql/optimizer/CarbonLateDecodeRule.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/optimizer/CarbonLateDecodeRule.scala
b/integration/spark2/src/main/scala/org/apache/spark/sql/optimizer/CarbonLateDecodeRule.scala
index b3a7d5a..463faf1 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/optimizer/CarbonLateDecodeRule.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/optimizer/CarbonLateDecodeRule.scala
@@ -163,7 +163,7 @@ class CarbonLateDecodeRule extends Rule[LogicalPlan] with PredicateHelper
{
             Sort(sort.order, sort.global, child)
           }
         case union: Union
-          if !(union.children(0).isInstanceOf[CarbonDictionaryTempDecoder] ||
+          if !(union.children.head.isInstanceOf[CarbonDictionaryTempDecoder] ||
             union.children(1).isInstanceOf[CarbonDictionaryTempDecoder]) =>
           val children = union.children.map { child =>
             val condAttrs = new util.HashSet[AttributeReferenceWrapper]
@@ -173,7 +173,7 @@ class CarbonLateDecodeRule extends Rule[LogicalPlan] with PredicateHelper
{
               !child.isInstanceOf[CarbonDictionaryCatalystDecoder]) {
               CarbonDictionaryTempDecoder(condAttrs,
                 new util.HashSet[AttributeReferenceWrapper](),
-                union.children(0))
+                union.children.head)
             } else {
               child
             }
@@ -557,24 +557,6 @@ class CarbonLateDecodeRule extends Rule[LogicalPlan] with PredicateHelper
{
     }
   }
 
-  private def updateRelation(relation: CarbonDatasourceHadoopRelation):
-  CarbonDatasourceHadoopRelation = {
-    val fields = relation.schema.fields
-    val numberOfFields = relation.schema.fields.length
-    val newFields = new Array[StructField](numberOfFields)
-    val dictionaryMap = relation.carbonRelation.metaData.dictionaryMap
-    for (i <- 0 until numberOfFields ) {
-      dictionaryMap.get(fields(i).name) match {
-        case Some(true) =>
-          val field = fields(i)
-          newFields(i) = StructField(field.name, IntegerType, field.nullable, field.metadata)
-        case _ => newFields(i) = fields(i)
-      }
-    }
-    CarbonDatasourceHadoopRelation(relation.sparkSession,
-      relation.paths, relation.parameters, Option(StructType(newFields)))
-  }
-
   private def updateProjection(plan: LogicalPlan): LogicalPlan = {
     val transFormedPlan = plan transform {
       case p@Project(projectList: Seq[NamedExpression], cd: CarbonDictionaryCatalystDecoder)
=>
@@ -605,7 +587,7 @@ class CarbonLateDecodeRule extends Rule[LogicalPlan] with PredicateHelper
{
       case a@Alias(exp, name) =>
         exp match {
           case attr: Attribute => aliasMap.put(a.toAttribute, attr)
-          case _ => aliasMap.put(a.toAttribute, new AttributeReference("", StringType)())
+          case _ => aliasMap.put(a.toAttribute, AttributeReference("", StringType)())
         }
         a
     }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/6fee9930/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParser.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParser.scala
b/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParser.scala
index 9a3f828..5a91ad1 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParser.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParser.scala
@@ -78,7 +78,6 @@ class CarbonSqlAstBuilder(conf: SQLConf) extends SparkSqlAstBuilder(conf)
{
       if (ctx.bucketSpec != null) {
         operationNotAllowed("CREATE TABLE ... CLUSTERED BY", ctx)
       }
-      val comment = Option(ctx.STRING).map(string)
       val partitionCols = Option(ctx.partitionColumns).toSeq.flatMap(visitCatalogColumns)
       val cols = Option(ctx.columns).toSeq.flatMap(visitCatalogColumns)
       val properties = Option(ctx.tablePropertyList).map(visitPropertyKeyValues)

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/6fee9930/integration/spark2/src/main/scala/org/apache/spark/util/CleanFiles.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/CleanFiles.scala b/integration/spark2/src/main/scala/org/apache/spark/util/CleanFiles.scala
index 399b3e6..e72abd7 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/CleanFiles.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/CleanFiles.scala
@@ -36,7 +36,7 @@ object CleanFiles {
   def main(args: Array[String]): Unit = {
 
     if (args.length < 2) {
-      System.err.println("Usage: CleanFiles <store path> <table name>");
+      System.err.println("Usage: CleanFiles <store path> <table name>")
       System.exit(1)
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/6fee9930/integration/spark2/src/main/scala/org/apache/spark/util/Compaction.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/Compaction.scala b/integration/spark2/src/main/scala/org/apache/spark/util/Compaction.scala
index 2db6e48..f0cc19b 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/Compaction.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/Compaction.scala
@@ -33,7 +33,7 @@ object Compaction {
 
   def main(args: Array[String]): Unit = {
     if (args.length < 3) {
-      System.err.println("Usage: Compaction <store path> <table name> <major|minor>");
+      System.err.println("Usage: Compaction <store path> <table name> <major|minor>")
       System.exit(1)
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/6fee9930/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentByDate.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentByDate.scala
b/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentByDate.scala
index 951cd7f..6219f1e 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentByDate.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentByDate.scala
@@ -35,7 +35,7 @@ object DeleteSegmentByDate {
   def main(args: Array[String]): Unit = {
     if (args.length < 3) {
       System.err.println(
-        "Usage: DeleteSegmentByDate <store path> <table name> <before date
value>");
+        "Usage: DeleteSegmentByDate <store path> <table name> <before date
value>")
       System.exit(1)
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/6fee9930/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentById.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentById.scala
b/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentById.scala
index dad9f59..303a062 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentById.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/DeleteSegmentById.scala
@@ -40,7 +40,7 @@ object DeleteSegmentById {
 
     if (args.length < 3) {
       System.err.println(
-        "Usage: DeleteSegmentByID <store path> <table name> <segment id list>");
+        "Usage: DeleteSegmentByID <store path> <table name> <segment id list>")
       System.exit(1)
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/6fee9930/integration/spark2/src/main/scala/org/apache/spark/util/ShowSegments.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/ShowSegments.scala b/integration/spark2/src/main/scala/org/apache/spark/util/ShowSegments.scala
index c953089..c7286ee 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/ShowSegments.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/ShowSegments.scala
@@ -62,7 +62,7 @@ object ShowSegments {
   def main(args: Array[String]): Unit = {
 
     if (args.length < 2) {
-      System.err.println("Usage: ShowSegments <store path> <table name> [limit]");
+      System.err.println("Usage: ShowSegments <store path> <table name> [limit]")
       System.exit(1)
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/6fee9930/integration/spark2/src/main/scala/org/apache/spark/util/TableLoader.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/TableLoader.scala b/integration/spark2/src/main/scala/org/apache/spark/util/TableLoader.scala
index 424d8fa..bcc82ce 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/TableLoader.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/TableLoader.scala
@@ -66,11 +66,11 @@ object TableLoader {
 
   def main(args: Array[String]): Unit = {
     if (args.length < 3) {
-      System.err.println("Usage: TableLoader <properties file> <table name> <input
files>");
+      System.err.println("Usage: TableLoader <properties file> <table name> <input
files>")
       System.exit(1)
     }
     System.out.println("parameter list:")
-    args.foreach(System.out.println(_))
+    args.foreach(System.out.println)
     val map = extractOptions(TableAPIUtil.escape(args(0)))
     val storePath = extractStorePath(map)
     System.out.println(s"${CarbonCommonConstants.STORE_LOCATION}:$storePath")

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/6fee9930/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
b/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
index b7617e8..e9330c8 100644
--- a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
+++ b/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
@@ -43,21 +43,26 @@ class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll
{
     sql("drop table if exists Carbon_automation_hive")
     sql("drop table if exists Carbon_automation_test_hive")
 
-    sql("create table if not exists Carbon_automation_test (imei string,deviceInformationId
int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize
string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode
string,internalModels string, deliveryTime string, channelsId string, channelsName string
, deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict
string, deliveryStreet string, oxSingleNumber string,contractNumber int, ActiveCheckTime string,
ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict
string, ActiveStreet string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion
string, Active_operaSysVersion string, Active_BacVerNumber string, Active_BacFlashVer string,
Active_webUIVersion string, Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string,
Active_operatorsVer
 sion string, Active_phonePADPartitionedVersions string, Latest_YEAR int, Latest_MONTH int,
Latest_DAY int, Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province
string, Latest_city string, Latest_district string, Latest_street string, Latest_releaseId
string, Latest_EMUIVersion string, Latest_operaSysVersion string, Latest_BacVerNumber string,
Latest_BacFlashVer string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber
string, Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, Latest_operatorId
string, gamePointId int,gamePointDescription string)  USING org.apache.spark.sql.CarbonSource
OPTIONS('dbName'='default', 'tableName'='Carbon_automation_test','DICTIONARY_INCLUDE'='Latest_MONTH,Latest_DAY,deviceInformationId')");
+    sql("create table if not exists Carbon_automation_test (imei string,deviceInformationId
int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize
string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode
string,internalModels string, deliveryTime string, channelsId string, channelsName string
, deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict
string, deliveryStreet string, oxSingleNumber string,contractNumber int, ActiveCheckTime string,
ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict
string, ActiveStreet string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion
string, Active_operaSysVersion string, Active_BacVerNumber string, Active_BacFlashVer string,
Active_webUIVersion string, Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string,
Active_operatorsVer
 sion string, Active_phonePADPartitionedVersions string, Latest_YEAR int, Latest_MONTH int,
Latest_DAY int, Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province
string, Latest_city string, Latest_district string, Latest_street string, Latest_releaseId
string, Latest_EMUIVersion string, Latest_operaSysVersion string, Latest_BacVerNumber string,
Latest_BacFlashVer string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber
string, Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, Latest_operatorId
string, gamePointId int,gamePointDescription string)  USING org.apache.spark.sql.CarbonSource
OPTIONS('dbName'='default', 'tableName'='Carbon_automation_test','DICTIONARY_INCLUDE'='Latest_MONTH,Latest_DAY,deviceInformationId')")
+
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
 
-    sql("create table if not exists Carbon_automation_hive (imei string,deviceInformationId
int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize
string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode
string,internalModels string, deliveryTime string, channelsId string, channelsName string
, deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict
string, deliveryStreet string, oxSingleNumber string,contractNumber int, ActiveCheckTime string,
ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict
string, ActiveStreet string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion
string, Active_operaSysVersion string, Active_BacVerNumber string, Active_BacFlashVer string,
Active_webUIVersion string, Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string,
Active_operatorsVer
 sion string, Active_phonePADPartitionedVersions string, Latest_YEAR int, Latest_MONTH int,
Latest_DAY int, Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province
string, Latest_city string, Latest_district string, Latest_street string, Latest_releaseId
string, Latest_EMUIVersion string, Latest_operaSysVersion string, Latest_BacVerNumber string,
Latest_BacFlashVer string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber
string, Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, Latest_operatorId
string, gamePointId int,gamePointDescription string) row format delimited fields terminated
by ','");
-    sql("LOAD DATA LOCAL INPATH '"+currentDirectory+"/src/test/resources/100_olap.csv' INTO
table Carbon_automation_hive ");
+    sql("create table if not exists Carbon_automation_hive (imei string,deviceInformationId
int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize
string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode
string,internalModels string, deliveryTime string, channelsId string, channelsName string
, deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict
string, deliveryStreet string, oxSingleNumber string,contractNumber int, ActiveCheckTime string,
ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict
string, ActiveStreet string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion
string, Active_operaSysVersion string, Active_BacVerNumber string, Active_BacFlashVer string,
Active_webUIVersion string, Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string,
Active_operatorsVer
 sion string, Active_phonePADPartitionedVersions string, Latest_YEAR int, Latest_MONTH int,
Latest_DAY int, Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province
string, Latest_city string, Latest_district string, Latest_street string, Latest_releaseId
string, Latest_EMUIVersion string, Latest_operaSysVersion string, Latest_BacVerNumber string,
Latest_BacFlashVer string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber
string, Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, Latest_operatorId
string, gamePointId int,gamePointDescription string) row format delimited fields terminated
by ','")
+
+    sql("LOAD DATA LOCAL INPATH '"+currentDirectory+"/src/test/resources/100_olap.csv' INTO
table Carbon_automation_hive ")
+
 
     //hive table
-    sql("create table if not exists Carbon_automation_test_hive (imei string,deviceInformationId
int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize
string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode
string,internalModels string, deliveryTime string, channelsId string, channelsName string
, deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict
string, deliveryStreet string, oxSingleNumber string,contractNumber int, ActiveCheckTime string,
ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict
string, ActiveStreet string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion
string, Active_operaSysVersion string, Active_BacVerNumber string, Active_BacFlashVer string,
Active_webUIVersion string, Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string,
Active_operato
 rsVersion string, Active_phonePADPartitionedVersions string, Latest_YEAR int, Latest_MONTH
int, Latest_DAY int, Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province
string, Latest_city string, Latest_district string, Latest_street string, Latest_releaseId
string, Latest_EMUIVersion string, Latest_operaSysVersion string, Latest_BacVerNumber string,
Latest_BacFlashVer string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber
string, Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, Latest_operatorId
string, gamePointId int,gamePointDescription string)row format delimited fields terminated
by ','");
-    sql("LOAD DATA LOCAL INPATH '"+currentDirectory+"/src/test/resources/100_olap.csv' INTO
table Carbon_automation_test_hive");
+    sql("create table if not exists Carbon_automation_test_hive (imei string,deviceInformationId
int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize
string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode
string,internalModels string, deliveryTime string, channelsId string, channelsName string
, deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict
string, deliveryStreet string, oxSingleNumber string,contractNumber int, ActiveCheckTime string,
ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict
string, ActiveStreet string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion
string, Active_operaSysVersion string, Active_BacVerNumber string, Active_BacFlashVer string,
Active_webUIVersion string, Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string,
Active_operato
 rsVersion string, Active_phonePADPartitionedVersions string, Latest_YEAR int, Latest_MONTH
int, Latest_DAY int, Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province
string, Latest_city string, Latest_district string, Latest_street string, Latest_releaseId
string, Latest_EMUIVersion string, Latest_operaSysVersion string, Latest_BacVerNumber string,
Latest_BacFlashVer string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber
string, Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, Latest_operatorId
string, gamePointId int,gamePointDescription string)row format delimited fields terminated
by ','")
+
+    sql("LOAD DATA LOCAL INPATH '"+currentDirectory+"/src/test/resources/100_olap.csv' INTO
table Carbon_automation_test_hive")
+
 
-    sql("INSERT INTO table Carbon_automation_test select * from Carbon_automation_test_hive");
+    sql("INSERT INTO table Carbon_automation_test select * from Carbon_automation_test_hive")
   }
 
-  def dropAllTable{
+  def dropAllTable(): Unit = {
     sql("drop table if exists Carbon_automation_test")
     sql("drop table if exists Carbon_automation_hive")
     sql("drop table if exists Carbon_automation_test_hive")

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/6fee9930/integration/spark2/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
b/integration/spark2/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
index 4310d04..57a8475 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
@@ -79,7 +79,7 @@ class QueryTest extends PlanTest {
 
   def sql(sqlText: String): DataFrame  = spark.sql(sqlText)
 
-  def clean: Unit = {
+  def clean(): Unit = {
     val clean = (path: String) => FileUtils.deleteDirectory(new File(path))
     clean(storeLocation)
   }
@@ -249,7 +249,7 @@ object QueryTest {
       return Some(errorMessage)
     }
 
-    return None
+    None
   }
 
 }


Mime
View raw message