carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jack...@apache.org
Subject [09/38] incubator-carbondata git commit: reuse test case for integration module
Date Sat, 07 Jan 2017 16:36:43 GMT
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
index 18ebea1..1fde335 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
@@ -18,8 +18,6 @@
  */
 package org.apache.carbondata.spark.testsuite.allqueries
 
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.common.util.CarbonHiveContext._
 import org.apache.spark.sql.common.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
@@ -30,7 +28,7 @@ class InsertIntoCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
   override def beforeAll {
     sql("drop table if exists THive")
     sql("create table THive (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string, deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion string, Active_BacVerNumber string, Active_BacFlashVer string, Active_webUIVersion string, Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string, Active_operatorsVersion string, Active_phonePADPartitionedVersions st
 ring, Latest_YEAR int, Latest_MONTH int, Latest_DAY Decimal(30,10), Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province string, Latest_city string, Latest_district string, Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion string, Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, Latest_operatorId string, gamePointDescription string,gamePointId double,contractNumber BigInt) ROW FORMAT DELIMITED FIELDS TERMINATED BY ','")
-    sql("LOAD DATA local INPATH './src/test/resources/100_olap.csv' INTO TABLE THive")
+    sql(s"LOAD DATA local INPATH '$resourcesPath/100_olap.csv' INTO TABLE THive")
   }
   test("insert from hive") {
     val timeStampPropOrig = CarbonProperties.getInstance().getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT)
@@ -63,7 +61,7 @@ class InsertIntoCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
      sql("drop table if exists TCarbonSource")
      sql("drop table if exists TCarbon")
      sql("create table TCarbonSource (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string, deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion string, Active_BacVerNumber string, Active_BacFlashVer string, Active_webUIVersion string, Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string, Active_operatorsVersion string, Active_phonePADPartitionedVe
 rsions string, Latest_YEAR int, Latest_MONTH int, Latest_DAY Decimal(30,10), Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province string, Latest_city string, Latest_district string, Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion string, Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, Latest_operatorId string, gamePointDescription string,gamePointId double,contractNumber BigInt) STORED BY 'org.apache.carbondata.format'")
-     sql("LOAD DATA INPATH './src/test/resources/100_olap.csv' INTO table TCarbonSource options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVer
 Number,Latest_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_operatorsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointDescription,gamePointId,contractNumber')")
+     sql("LOAD DATA INPATH '" + resourcesPath + "/100_olap.csv' INTO table TCarbonSource options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVe
 rNumber,Latest_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_operatorsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointDescription,gamePointId,contractNumber')")
      sql("create table TCarbon (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string, deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion string, Active_BacVerNumber string, Active_BacFlashVer string, Active_webUIVersion string, Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string, Active_operatorsVersion string, Active_phonePADPartitionedVersions
  string, Latest_YEAR int, Latest_MONTH int, Latest_DAY Decimal(30,10), Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province string, Latest_city string, Latest_district string, Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion string, Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, Latest_operatorId string, gamePointDescription string,gamePointId double,contractNumber BigInt) STORED BY 'org.apache.carbondata.format'")
      sql("insert into TCarbon select * from TCarbonSource")
      checkAnswer(
@@ -85,7 +83,7 @@ class InsertIntoCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
      sql("drop table if exists load")
      sql("drop table if exists inser")
      sql("CREATE TABLE load(imei string,age int,task bigint,num double,level decimal(10,3),productdate timestamp,name string,point int)STORED BY 'org.apache.carbondata.format'")
-     sql("LOAD DATA INPATH './src/test/resources/shortolap.csv' INTO TABLE load options ('DELIMITER'=',', 'QUOTECHAR'='\"','FILEHEADER' = 'imei,age,task,num,level,productdate,name,point')")
+     sql("LOAD DATA INPATH '" + resourcesPath + "/shortolap.csv' INTO TABLE load options ('DELIMITER'=',', 'QUOTECHAR'='\"','FILEHEADER' = 'imei,age,task,num,level,productdate,name,point')")
      sql("CREATE TABLE inser(imei string,age int,task bigint,num double,level decimal(10,3),productdate timestamp)STORED BY 'org.apache.carbondata.format'")
      sql("insert into inser select * from load")
      checkAnswer(
@@ -104,7 +102,7 @@ class InsertIntoCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
      sql("drop table if exists TCarbonSource")
      sql("drop table if exists TCarbon")
      sql("create table TCarbonSource (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string, deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion string, Active_BacVerNumber string, Active_BacFlashVer string, Active_webUIVersion string, Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string, Active_operatorsVersion string, Active_phonePADPartitionedVe
 rsions string, Latest_YEAR int, Latest_MONTH int, Latest_DAY Decimal(30,10), Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province string, Latest_city string, Latest_district string, Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion string, Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, Latest_operatorId string, gamePointDescription string,gamePointId double,contractNumber BigInt) STORED BY 'org.apache.carbondata.format'")
-     sql("LOAD DATA INPATH './src/test/resources/100_olap.csv' INTO table TCarbonSource options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVer
 Number,Latest_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_operatorsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointDescription,gamePointId,contractNumber')")
+     sql("LOAD DATA INPATH '" + resourcesPath + "/100_olap.csv' INTO table TCarbonSource options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVe
 rNumber,Latest_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_operatorsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointDescription,gamePointId,contractNumber')")
      sql("create table TCarbon (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string, deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion string, Active_BacVerNumber string, Active_BacFlashVer string, Active_webUIVersion string, Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string, Active_operatorsVersion string, Active_phonePADPartitionedVersions
  string, Latest_YEAR int, Latest_MONTH int, Latest_DAY Decimal(30,10), Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province string, Latest_city string, Latest_district string, Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion string, Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, Latest_operatorId string, gamePointDescription string,gamePointId double,contractNumber BigInt) STORED BY 'org.apache.carbondata.format'")
      sql("insert into TCarbon select * from TCarbonSource")
      checkAnswer(
@@ -138,7 +136,7 @@ class InsertIntoCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
      sql("create table TCarbon (imei string,deviceInformationId int,MAC string) STORED BY 'org.apache.carbondata.format'")
      sql("drop table if exists TCarbonLocal")
      sql("create table TCarbonLocal (imei string,deviceInformationId int,MAC string) STORED BY 'org.apache.carbondata.format'")
-     sql("LOAD DATA INPATH './src/test/resources/100_olap.csv' INTO table TCarbonLocal options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='imei,MAC,deviceInformationId')")
+     sql("LOAD DATA INPATH '" + resourcesPath + "/100_olap.csv' INTO table TCarbonLocal options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='imei,MAC,deviceInformationId')")
      sql("insert into TCarbon select imei,MAC,deviceInformationId from THive")
      checkAnswer(
          sql("select imei,deviceInformationId,MAC from TCarbon"),
@@ -162,9 +160,9 @@ class InsertIntoCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
      
      sql("drop table if exists TCarbon")
      sql("create table TCarbon (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string, deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion string, Active_BacVerNumber string, Active_BacFlashVer string, Active_webUIVersion string, Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string, Active_operatorsVersion string, Active_phonePADPartitionedVersions
  string, Latest_YEAR int, Latest_MONTH int, Latest_DAY Decimal(30,10), Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province string, Latest_city string, Latest_district string, Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion string, Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, Latest_operatorId string, gamePointDescription string,gamePointId double,contractNumber BigInt) STORED BY 'org.apache.carbondata.format'")
-     sql("LOAD DATA INPATH './src/test/resources/100_olap.csv' INTO table TCarbon options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVerNumber
 ,Latest_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_operatorsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointDescription,gamePointId,contractNumber')")
+     sql("LOAD DATA INPATH '" + resourcesPath + "/100_olap.csv' INTO table TCarbon options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVerNumbe
 r,Latest_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_operatorsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointDescription,gamePointId,contractNumber')")
      sql("insert into TCarbon select * from THive")
-     sql("LOAD DATA local INPATH './src/test/resources/100_olap.csv' INTO TABLE THive")
+     sql("LOAD DATA local INPATH '" + resourcesPath + "/100_olap.csv' INTO TABLE THive")
      checkAnswer(
          sql("select imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,contractNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVerNumber,Latest_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_oper
 atorsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointId,gamePointDescription from THive order by imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,contractNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVerNumber,Late
 st_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_operatorsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointId,gamePointDescription"),
          sql("select imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,contractNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVerNumber,Latest_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_oper
 atorsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointId,gamePointDescription from TCarbon order by imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,contractNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVerNumber,La
 test_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_operatorsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointId,gamePointDescription")
@@ -172,7 +170,11 @@ class InsertIntoCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
      CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, timeStampPropOrig)
   }
   override def afterAll {
+    sql("drop table if exists load")
+    sql("drop table if exists inser")
     sql("DROP TABLE IF EXISTS THive")
     sql("DROP TABLE IF EXISTS TCarbon")
+    sql("drop table if exists TCarbonLocal")
+    sql("drop table if exists TCarbonSource")
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithOldCarbonDataFile.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithOldCarbonDataFile.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithOldCarbonDataFile.scala
deleted file mode 100644
index 749a6e8..0000000
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithOldCarbonDataFile.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.spark.testsuite.allqueries
-
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.common.util.CarbonHiveContext._
-import org.apache.spark.sql.common.util.QueryTest
-import org.scalatest.BeforeAndAfterAll
-import org.apache.carbondata.core.util.CarbonProperties
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-
-/*
- * Test Class for query without data load
- *
- */
-class TestQueryWithOldCarbonDataFile extends QueryTest with BeforeAndAfterAll {
-  override def beforeAll {
-	  CarbonProperties.getInstance.addProperty(CarbonCommonConstants.CARBON_DATA_FILE_VERSION, "V1");
-    sql("drop table if exists OldFormatTable")
-    sql("drop table if exists OldFormatTableHIVE")
-     sql("""
-           CREATE TABLE IF NOT EXISTS OldFormatTable
-           (country String,
-           name String, phonetype String, serialname String, salary Int)
-           STORED BY 'carbondata'
-           """)
-      sql("""
-           CREATE TABLE IF NOT EXISTS OldFormatTableHIVE
-           (country String,
-           name String, phonetype String, serialname String, salary Int)
-          row format delimited fields terminated by ','
-           """)      
-    sql("LOAD DATA local inpath './src/test/resources/OLDFORMATTABLE.csv' INTO table OldFormatTable")
-   sql(s"""
-           LOAD DATA LOCAL INPATH './src/test/resources/OLDFORMATTABLEHIVE.csv' into table OldFormatTableHIVE
-           """)
-
-  }
-
-  CarbonProperties.getInstance.addProperty(CarbonCommonConstants.CARBON_DATA_FILE_VERSION, "V2")
-  test("Test select * query") {
-    checkAnswer(
-      sql("select * from OldFormatTable"), sql("select * from OldFormatTableHIVE")
-    )
-  }
-
-  override def afterAll {
-     CarbonProperties.getInstance.addProperty(CarbonCommonConstants.CARBON_DATA_FILE_VERSION, "V1")
-    sql("drop table if exists OldFormatTable")
-    sql("drop table if exists OldFormatTableHIVE")
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithoutDataLoad.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithoutDataLoad.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithoutDataLoad.scala
deleted file mode 100644
index 56314c6..0000000
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithoutDataLoad.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.spark.testsuite.allqueries
-
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.common.util.CarbonHiveContext._
-import org.apache.spark.sql.common.util.QueryTest
-import org.scalatest.BeforeAndAfterAll
-
-/*
- * Test Class for query without data load
- *
- */
-class TestQueryWithoutDataLoad extends QueryTest with BeforeAndAfterAll {
-  override def beforeAll {
-    sql("drop table if exists no_load")
-    sql("""
-        CREATE TABLE no_load(imei string, age int, productdate timestamp, gamePointId double)
-        STORED BY 'org.apache.carbondata.format'
-        TBLPROPERTIES('DICTIONARY_EXCLUDE'='productdate', 'DICTIONARY_INCLUDE'='gamePointId')
-      """)
-  }
-
-  test("test query without data load") {
-    checkAnswer(
-      sql("select count(*) from no_load"), Seq(Row(0))
-    )
-    checkAnswer(
-      sql("select * from no_load"), Seq.empty
-    )
-    checkAnswer(
-      sql("select imei, count(age) from no_load group by imei"), Seq.empty
-    )
-    checkAnswer(
-      sql("select imei, sum(age) from no_load group by imei"), Seq.empty
-    )
-    checkAnswer(
-      sql("select imei, avg(age) from no_load group by imei"), Seq.empty
-    )
-  }
-
-  override def afterAll {
-    sql("drop table no_load")
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestTableNameHasDbName.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestTableNameHasDbName.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestTableNameHasDbName.scala
deleted file mode 100644
index fc7a2a5..0000000
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestTableNameHasDbName.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.spark.testsuite.allqueries
-
-import org.apache.spark.sql.common.util.CarbonHiveContext._
-import org.apache.spark.sql.common.util.QueryTest
-import org.scalatest.BeforeAndAfterAll
-
-/*
- * Test Class for query when part of tableName has dbName
- *
- */
-class TestTableNameHasDbName extends QueryTest with BeforeAndAfterAll {
-
-  override def beforeAll {
-    sql("DROP TABLE IF EXISTS tabledefault")
-    sql("CREATE TABLE tabledefault (empno int, workgroupcategory string, " +
-      "deptno int, projectcode int,attendance int)" +
-      " STORED BY 'org.apache.carbondata.format'")
-    sql("LOAD DATA LOCAL INPATH './src/test/resources/data.csv' INTO TABLE tabledefault")
-  }
-
-  test("test query when part of tableName has dbName") {
-    try {
-      sql("SELECT * FROM tabledefault").collect()
-    } catch {
-      case ex: Exception =>
-        assert(false)
-    }
-  }
-
-  override def afterAll {
-    sql("DROP TABLE tabledefault")
-  }
-
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordLoggerTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordLoggerTest.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordLoggerTest.scala
deleted file mode 100644
index a77d314..0000000
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordLoggerTest.scala
+++ /dev/null
@@ -1,266 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.spark.testsuite.badrecordloger
-
-import java.io.File
-
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.common.util.CarbonHiveContext._
-import org.apache.spark.sql.common.util.QueryTest
-import org.apache.spark.sql.hive.HiveContext
-import org.scalatest.BeforeAndAfterAll
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-
-
-/**
- * Test Class for detailed query on timestamp datatypes
- *
- *
- */
-class BadRecordLoggerTest extends QueryTest with BeforeAndAfterAll {
-  var hiveContext: HiveContext = _
-
-  override def beforeAll {
-    try {
-      sql("drop table IF EXISTS sales")
-      sql("drop table IF EXISTS serializable_values")
-      sql("drop table IF EXISTS serializable_values_false")
-      sql("drop table IF EXISTS insufficientColumn")
-      sql("drop table IF EXISTS insufficientColumn_false")
-      sql("drop table IF EXISTS emptyColumnValues")
-      sql("drop table IF EXISTS emptyColumnValues_false")
-      sql("drop table IF EXISTS empty_timestamp")
-      sql("drop table IF EXISTS empty_timestamp_false")
-      sql(
-        """CREATE TABLE IF NOT EXISTS sales(ID BigInt, date Timestamp, country String,
-          actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata'""")
-
-      CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
-          new File("./target/test/badRecords")
-            .getCanonicalPath)
-
-      CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
-      val currentDirectory = new File(this.getClass.getResource("/").getPath + "/../../")
-        .getCanonicalPath
-      var csvFilePath = currentDirectory + "/src/test/resources/badrecords/datasample.csv"
-      sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales OPTIONS"
-          +
-          "('bad_records_logger_enable'='true','bad_records_action'='redirect', 'DELIMITER'=" +
-          " ',', 'QUOTECHAR'= '\"')");
-
-      // 1.0 "\N" which should be treated as NULL
-      // 1.1 Time stamp "\N" which should be treated as NULL
-      csvFilePath = currentDirectory +
-                    "/src/test/resources/badrecords/seriazableValue.csv"
-      sql(
-        """CREATE TABLE IF NOT EXISTS serializable_values(ID BigInt, date Timestamp, country String,
-          actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata'
-        """)
-      sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE serializable_values OPTIONS"
-          +
-          "('bad_records_logger_enable'='true', 'bad_records_action'='ignore', " +
-          "'DELIMITER'= ',', 'QUOTECHAR'= '\"')");
-      // load with bad_records_logger_enable false
-      sql(
-        """CREATE TABLE IF NOT EXISTS serializable_values_false(ID BigInt, date Timestamp,
-           country String,
-          actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata'
-        """)
-      sql(
-        "LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE serializable_values_false OPTIONS"
-        + "('bad_records_logger_enable'='false', 'DELIMITER'= ',', 'QUOTECHAR'= '\"')");
-      // 2. insufficient columns - Bad records/Null value based on configuration
-      sql(
-        """CREATE TABLE IF NOT EXISTS insufficientColumn(ID BigInt, date Timestamp, country String,
-          actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata'
-        """)
-      csvFilePath = currentDirectory +
-                    "/src/test/resources/badrecords/insufficientColumns.csv"
-      sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE insufficientColumn OPTIONS"
-          +
-          "('bad_records_logger_enable'='true', 'bad_records_action'='ignore', " +
-          "'DELIMITER'= ',', 'QUOTECHAR'= '\"')");
-      // load with bad_records_logger_enable false
-      sql(
-        """CREATE TABLE IF NOT EXISTS insufficientColumn_false(ID BigInt, date Timestamp, country
-            String,
-          actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata'
-        """)
-      sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE insufficientColumn_false OPTIONS"
-          + "('bad_records_logger_enable'='false', 'DELIMITER'= ',', 'QUOTECHAR'= '\"')");
-
-      // 3. empty data for string data type - take empty value
-      // 4. empty data for non-string data type - Bad records/Null value based on configuration
-      //table should have only two records.
-      sql(
-        """CREATE TABLE IF NOT EXISTS emptyColumnValues(ID BigInt, date Timestamp, country String,
-          actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata'
-        """)
-      csvFilePath = currentDirectory +
-                    "/src/test/resources/badrecords/emptyValues.csv"
-      sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE emptyColumnValues OPTIONS"
-          +
-          "('bad_records_logger_enable'='true', 'bad_records_action'='ignore', " +
-          "'DELIMITER'= ',', 'QUOTECHAR'= '\"')");
-      // load with bad_records_logger_enable to false
-      sql(
-        """CREATE TABLE IF NOT EXISTS emptyColumnValues_false(ID BigInt, date Timestamp, country
-           String,
-          actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata'
-        """)
-      csvFilePath = currentDirectory +
-                    "/src/test/resources/badrecords/emptyValues.csv"
-      sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE emptyColumnValues_false OPTIONS"
-          + "('bad_records_logger_enable'='false', 'DELIMITER'= ',', 'QUOTECHAR'= '\"')");
-
-
-      // 4.1 Time stamp empty data - Bad records/Null value based on configuration
-      // 5. non-parsable data - Bad records/Null value based on configuration
-      // 6. empty line(check current one) - Bad records/Null value based on configuration
-      // only one value should be loadded.
-      sql(
-        """CREATE TABLE IF NOT EXISTS empty_timestamp(ID BigInt, date Timestamp, country String,
-          actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata'
-        """)
-      csvFilePath = currentDirectory +
-                    "/src/test/resources/badrecords/emptyTimeStampValue.csv"
-      sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE empty_timestamp OPTIONS"
-          +
-          "('bad_records_logger_enable'='true', 'bad_records_action'='ignore', " +
-          "'DELIMITER'= ',', 'QUOTECHAR'= '\"')");
-      // load with bad_records_logger_enable to false
-      sql(
-        """CREATE TABLE IF NOT EXISTS empty_timestamp_false(ID BigInt, date Timestamp, country
-           String,
-          actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata'
-        """)
-      csvFilePath = currentDirectory +
-                    "/src/test/resources/badrecords/emptyTimeStampValue.csv"
-      sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE empty_timestamp_false OPTIONS"
-          + "('bad_records_logger_enable'='false', 'DELIMITER'= ',', 'QUOTECHAR'= '\"')");
-
-
-    } catch {
-      case x: Throwable => CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
-    }
-  }
-
-  test("select count(*) from sales") {
-    sql("select count(*) from sales").show()
-    checkAnswer(
-      sql("select count(*) from sales"),
-      Seq(Row(2)
-      )
-    )
-  }
-
-  test("select count(*) from serializable_values") {
-    sql("select count(*) from serializable_values").show()
-    checkAnswer(
-      sql("select count(*) from serializable_values"),
-      Seq(Row(2)
-      )
-    )
-  }
-
-  test("select count(*) from serializable_values_false") {
-    sql("select count(*) from serializable_values_false").show()
-    checkAnswer(
-      sql("select count(*) from serializable_values_false"),
-      Seq(Row(2)
-      )
-    )
-  }
-
-  test("select count(*) from empty_timestamp") {
-    sql("select count(*) from empty_timestamp").show()
-    checkAnswer(
-      sql("select count(*) from empty_timestamp"),
-      Seq(Row(1)
-      )
-    )
-  }
-
-  test("select count(*) from insufficientColumn") {
-    sql("select count(*) from insufficientColumn").show()
-    checkAnswer(
-      sql("select count(*) from insufficientColumn"),
-      Seq(Row(1)
-      )
-    )
-  }
-
-  test("select count(*) from insufficientColumn_false") {
-    sql("select count(*) from insufficientColumn_false").show()
-    checkAnswer(
-      sql("select count(*) from insufficientColumn_false"),
-      Seq(Row(3)
-      )
-    )
-  }
-
-
-  test("select count(*) from emptyColumnValues") {
-    sql("select count(*) from emptyColumnValues").show()
-    checkAnswer(
-      sql("select count(*) from emptyColumnValues"),
-      Seq(Row(2)
-      )
-    )
-  }
-
-  test("select count(*) from emptyColumnValues_false") {
-    sql("select count(*) from emptyColumnValues_false").show()
-    checkAnswer(
-      sql("select count(*) from emptyColumnValues_false"),
-      Seq(Row(7)
-      )
-    )
-  }
-
-  test("select count(*) from empty_timestamp_false") {
-    sql("select count(*) from empty_timestamp_false").show()
-    checkAnswer(
-      sql("select count(*) from empty_timestamp_false"),
-      Seq(Row(7)
-      )
-    )
-  }
-
-
-  override def afterAll {
-    sql("drop table sales")
-    sql("drop table serializable_values")
-    sql("drop table serializable_values_false")
-    sql("drop table insufficientColumn")
-    sql("drop table insufficientColumn_false")
-    sql("drop table emptyColumnValues")
-    sql("drop table emptyColumnValues_false")
-    sql("drop table empty_timestamp")
-    sql("drop table empty_timestamp_false")
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestAvgForBigInt.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestAvgForBigInt.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestAvgForBigInt.scala
deleted file mode 100644
index f2c5289..0000000
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestAvgForBigInt.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.spark.testsuite.bigdecimal
-
-import java.io.File
-
-import org.apache.spark.sql.common.util.CarbonHiveContext._
-import org.apache.spark.sql.common.util.QueryTest
-import org.scalatest.BeforeAndAfterAll
-
-class TestAvgForBigInt extends QueryTest with BeforeAndAfterAll {
-
-  override def beforeAll {
-    sql("drop table if exists carbonTable")
-    val currentDirectory = new File(this.getClass.getResource("/").getPath + "/../../")
-      .getCanonicalPath
-    val csvFilePath = currentDirectory + "/src/test/resources/bigIntData.csv"
-
-    sql(
-      """
-      CREATE TABLE IF NOT EXISTS carbonTable (ID Int, date Timestamp, country String,
-      name String, phonetype String, serialname String, salary bigint)
-      STORED BY 'org.apache.carbondata.format'
-      """
-    )
-
-    sql(
-      "LOAD DATA LOCAL INPATH '" + csvFilePath + "' into table carbonTable"
-    )
-  }
-
-  test("test avg function on big int column") {
-    checkAnswer(
-      sql("select avg(salary) from carbonTable"),
-      sql("select sum(salary)/count(salary) from carbonTable")
-    )
-  }
-
-  override def afterAll {
-    sql("drop table if exists carbonTable")
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala
deleted file mode 100644
index b51fbb6..0000000
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala
+++ /dev/null
@@ -1,204 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.spark.testsuite.bigdecimal
-
-import org.apache.spark.sql.common.util.CarbonHiveContext._
-import org.apache.spark.sql.common.util.QueryTest
-import org.scalatest.BeforeAndAfterAll
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-
-/**
-  * Test cases for testing big decimal functionality
-  */
-class TestBigDecimal extends QueryTest with BeforeAndAfterAll {
-
-  override def beforeAll {
-    sql("drop table if exists carbonTable")
-    sql("drop table if exists hiveTable")
-    sql("drop table if exists hiveBigDecimal")
-    sql("drop table if exists carbonBigDecimal_2")
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
-    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.SORT_SIZE, "1")
-    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.SORT_INTERMEDIATE_FILES_LIMIT, "2")
-    sql("CREATE TABLE IF NOT EXISTS carbonTable (ID Int, date Timestamp, country String, name String, phonetype String, serialname String, salary Decimal(17,2))STORED BY 'org.apache.carbondata.format'")
-    sql("create table if not exists hiveTable(ID Int, date Timestamp, country String, name String, phonetype String, serialname String, salary Decimal(17,2))row format delimited fields terminated by ','")
-    sql("LOAD DATA LOCAL INPATH './src/test/resources/decimalDataWithHeader.csv' into table carbonTable")
-    sql("LOAD DATA local inpath './src/test/resources/decimalDataWithoutHeader.csv' INTO table hiveTable")
-    sql("create table if not exists hiveBigDecimal(ID Int, date Timestamp, country String, name String, phonetype String, serialname String, salary decimal(27, 10))row format delimited fields terminated by ','")
-    sql("LOAD DATA local inpath './src/test/resources/decimalBoundaryDataHive.csv' INTO table hiveBigDecimal")
-    sql("create table if not exists carbonBigDecimal_2 (ID Int, date Timestamp, country String, name String, phonetype String, serialname String, salary decimal(30, 10)) STORED BY 'org.apache.carbondata.format'")
-    sql("LOAD DATA LOCAL INPATH './src/test/resources/decimalBoundaryDataCarbon.csv' into table carbonBigDecimal_2")
-  }
-
-  test("test detail query on big decimal column") {
-    checkAnswer(sql("select salary from carbonTable order by salary"),
-      sql("select salary from hiveTable order by salary"))
-  }
-
-  test("test sum function on big decimal column") {
-    checkAnswer(sql("select sum(salary) from carbonTable"),
-      sql("select sum(salary) from hiveTable"))
-  }
-
-  test("test max function on big decimal column") {
-    checkAnswer(sql("select max(salary) from carbonTable"),
-      sql("select max(salary) from hiveTable"))
-  }
-
-  test("test min function on big decimal column") {
-    checkAnswer(sql("select min(salary) from carbonTable"),
-      sql("select min(salary) from hiveTable"))
-  }
-  
-  test("test min datatype on big decimal column") {
-    val output = sql("select min(salary) from carbonTable").collectAsList().get(0).get(0)
-    assert(output.isInstanceOf[java.math.BigDecimal])
-  }
-
-  test("test max datatype on big decimal column") {
-    val output = sql("select max(salary) from carbonTable").collectAsList().get(0).get(0)
-    assert(output.isInstanceOf[java.math.BigDecimal])
-  }
-  
-  test("test count function on big decimal column") {
-    checkAnswer(sql("select count(salary) from carbonTable"),
-      sql("select count(salary) from hiveTable"))
-  }
-
-  test("test distinct function on big decimal column") {
-    checkAnswer(sql("select distinct salary from carbonTable order by salary"),
-      sql("select distinct salary from hiveTable order by salary"))
-  }
-
-  test("test sum-distinct function on big decimal column") {
-    checkAnswer(sql("select sum(distinct salary) from carbonTable"),
-      sql("select sum(distinct salary) from hiveTable"))
-  }
-
-  test("test count-distinct function on big decimal column") {
-    checkAnswer(sql("select count(distinct salary) from carbonTable"),
-      sql("select count(distinct salary) from hiveTable"))
-  }
-  test("test filter query on big decimal column") {
-    // equal to
-    checkAnswer(sql("select salary from carbonTable where salary=45234525465882.24"),
-      sql("select salary from hiveTable where salary=45234525465882.24"))
-    // greater than
-    checkAnswer(sql("select salary from carbonTable where salary>15000"),
-      sql("select salary from hiveTable where salary>15000"))
-    // greater than equal to
-    checkAnswer(sql("select salary from carbonTable where salary>=15000.43525"),
-      sql("select salary from hiveTable where salary>=15000.43525"))
-    // less than
-    checkAnswer(sql("select salary from carbonTable where salary<45234525465882"),
-      sql("select salary from hiveTable where salary<45234525465882"))
-    // less than equal to
-    checkAnswer(sql("select salary from carbonTable where salary<=45234525465882.24"),
-      sql("select salary from hiveTable where salary<=45234525465882.24"))
-  }
-
-  test("test aggregation on big decimal column with increased precision") {
-    sql("drop table if exists carbonBigDecimal")
-    sql("create table if not exists carbonBigDecimal (ID Int, date Timestamp, country String, name String, phonetype String, serialname String, salary decimal(27, 10)) STORED BY 'org.apache.carbondata.format'")
-    sql("LOAD DATA LOCAL INPATH './src/test/resources/decimalBoundaryDataCarbon.csv' into table carbonBigDecimal")
-
-    checkAnswer(sql("select sum(salary) from carbonBigDecimal"),
-      sql("select sum(salary) from hiveBigDecimal"))
-
-    checkAnswer(sql("select sum(distinct salary) from carbonBigDecimal"),
-      sql("select sum(distinct salary) from hiveBigDecimal"))
-
-    sql("drop table if exists carbonBigDecimal")
-  }
-
-  test("test big decimal for dictionary look up") {
-    sql("drop table if exists decimalDictLookUp")
-    sql("create table if not exists decimalDictLookUp (ID Int, date Timestamp, country String, name String, phonetype String, serialname String, salary decimal(27, 10)) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('dictionary_include'='salary')")
-    sql("LOAD DATA LOCAL INPATH './src/test/resources/decimalBoundaryDataCarbon.csv' into table decimalDictLookUp")
-
-    checkAnswer(sql("select sum(salary) from decimalDictLookUp"),
-      sql("select sum(salary) from hiveBigDecimal"))
-
-    sql("drop table if exists decimalDictLookUp")
-  }
-
-  test("test sum+10 aggregation on big decimal column with high precision") {
-    checkAnswer(sql("select sum(salary)+10 from carbonBigDecimal_2"),
-      sql("select sum(salary)+10 from hiveBigDecimal"))
-  }
-
-  test("test sum*10 aggregation on big decimal column with high precision") {
-    checkAnswer(sql("select sum(salary)*10 from carbonBigDecimal_2"),
-      sql("select sum(salary)*10 from hiveBigDecimal"))
-  }
-
-  test("test sum/10 aggregation on big decimal column with high precision") {
-    checkAnswer(sql("select sum(salary)/10 from carbonBigDecimal_2"),
-      sql("select sum(salary)/10 from hiveBigDecimal"))
-  }
-
-  test("test sum-distinct+10 aggregation on big decimal column with high precision") {
-    checkAnswer(sql("select sum(distinct(salary))+10 from carbonBigDecimal_2"),
-      sql("select sum(distinct(salary))+10 from hiveBigDecimal"))
-  }
-
-  test("test sum-distinct*10 aggregation on big decimal column with high precision") {
-    checkAnswer(sql("select sum(distinct(salary))*10 from carbonBigDecimal_2"),
-      sql("select sum(distinct(salary))*10 from hiveBigDecimal"))
-  }
-
-  test("test sum-distinct/10 aggregation on big decimal column with high precision") {
-    checkAnswer(sql("select sum(distinct(salary))/10 from carbonBigDecimal_2"),
-      sql("select sum(distinct(salary))/10 from hiveBigDecimal"))
-  }
-
-  test("test avg+10 aggregation on big decimal column with high precision") {
-    checkAnswer(sql("select avg(salary)+10 from carbonBigDecimal_2"),
-      sql("select avg(salary)+10 from hiveBigDecimal"))
-  }
-
-  test("test avg*10 aggregation on big decimal column with high precision") {
-    checkAnswer(sql("select avg(salary)*10 from carbonBigDecimal_2"),
-      sql("select avg(salary)*10 from hiveBigDecimal"))
-  }
-
-  test("test avg/10 aggregation on big decimal column with high precision") {
-    checkAnswer(sql("select avg(salary)/10 from carbonBigDecimal_2"),
-      sql("select avg(salary)/10 from hiveBigDecimal"))
-  }
-
-  override def afterAll {
-    sql("drop table if exists carbonTable")
-    sql("drop table if exists hiveTable")
-    sql("drop table if exists hiveBigDecimal")
-    sql("drop table if exists carbonBigDecimal_2")
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
-    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.SORT_SIZE,
-      CarbonCommonConstants.SORT_SIZE_DEFAULT_VAL)
-    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.SORT_INTERMEDIATE_FILES_LIMIT,
-      CarbonCommonConstants.SORT_INTERMEDIATE_FILES_LIMIT_DEFAULT_VALUE)
-  }
-}
-
-

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestNullAndEmptyFields.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestNullAndEmptyFields.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestNullAndEmptyFields.scala
deleted file mode 100644
index 85d6f85..0000000
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestNullAndEmptyFields.scala
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.spark.testsuite.bigdecimal
-
-import java.io.File
-
-import org.apache.spark.sql.common.util.CarbonHiveContext._
-import org.apache.spark.sql.common.util.QueryTest
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-import org.scalatest.BeforeAndAfterAll
-
-/**
-  * Test cases for testing columns having null value
-  */
-class TestNullAndEmptyFields extends QueryTest with BeforeAndAfterAll {
-
-  override def beforeAll {
-    sql("drop table if exists carbonTable")
-    sql("drop table if exists hiveTable")
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
-        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT
-      )
-    val currentDirectory = new File(this.getClass.getResource("/").getPath + "/../../")
-      .getCanonicalPath
-    val csvFilePath = currentDirectory + "/src/test/resources/nullandnonparsableValue.csv"
-    sql(
-      "CREATE TABLE IF NOT EXISTS carbonTable (ID String, date Timestamp, country String, name " +
-        "String, phonetype String, serialname String, salary Decimal(17,2))STORED BY 'org.apache" +
-        ".carbondata.format'"
-    )
-    sql(
-      "create table if not exists hiveTable(ID String, date Timestamp, country String, name " +
-        "String, " +
-        "phonetype String, serialname String, salary Decimal(17,2))row format delimited fields " +
-        "terminated by ','"
-    )
-    sql(
-      "LOAD DATA LOCAL INPATH '" + csvFilePath + "' into table carbonTable OPTIONS " +
-        "('FILEHEADER'='ID,date," +
-        "country,name,phonetype,serialname,salary')"
-    )
-    sql(
-      "LOAD DATA local inpath '" + csvFilePath + "' INTO table hiveTable"
-    )
-  }
-
-
-  test("test detail query on column having null values") {
-    checkAnswer(
-      sql("select * from carbonTable"),
-      sql("select * from hiveTable")
-    )
-  }
-
-  test("test filter query on column is null") {
-    checkAnswer(
-      sql("select * from carbonTable where salary is null"),
-      sql("select * from hiveTable where salary is null")
-    )
-  }
-
-  test("test filter query on column is not null") {
-    checkAnswer(
-      sql("select * from carbonTable where salary is not null"),
-      sql("select * from hiveTable where salary is not null")
-    )
-  }
-
-  test("test filter query on columnValue=null") {
-    checkAnswer(
-      sql("select * from carbonTable where salary=null"),
-      sql("select * from hiveTable where salary=null")
-    )
-  }
-
-  test("test filter query where date is null") {
-    checkAnswer(
-      sql("select * from carbonTable where date is null"),
-      sql("select * from hiveTable where date is null")
-    )
-  }
-
-  test("test  subquery on column having null values") {
-    checkAnswer(
-      sql("select * from (select if(country='china','c', country) test from carbonTable)qq where test is null"),
-      sql("select * from (select if(country='china','c', country) test from hiveTable)qq where test is null")
-    )
-  }
-
-  test("test  subquery on column having not null values") {
-    checkAnswer(
-      sql("select * from (select if(country='china','c', country) test from carbonTable)qq where test is not null"),
-      sql("select * from (select if(country='china','c', country) test from hiveTable)qq where test is not null")
-    )
-  }
-
-  override def afterAll {
-    sql("drop table if exists carbonTable")
-    sql("drop table if exists hiveTable")
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
-  }
-}
-
-

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala
deleted file mode 100644
index 0c7b7ce..0000000
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/blockprune/BlockPruneQueryTestCase.scala
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.spark.testsuite.blockprune
-
-import java.io.{DataOutputStream, File}
-
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.common.util.CarbonHiveContext._
-import org.apache.spark.sql.common.util.QueryTest
-import org.apache.carbondata.core.datastorage.store.impl.FileFactory
-import org.scalatest.BeforeAndAfterAll
-
-/**
-  * This class contains test cases for block prune query
-  */
-class BlockPruneQueryTestCase extends QueryTest with BeforeAndAfterAll {
-  def currentPath: String = new File(this.getClass.getResource("/").getPath + "/../../")
-    .getCanonicalPath
-  val outputPath = currentPath + "/src/test/resources/block_prune_test.csv"
-  override def beforeAll {
-    // Since the data needed for plock prune is big, need to create a temp data file
-    val testData: Array[String]= new Array[String](3);
-    testData(0) = "a"
-    testData(1) = "b"
-    testData(2) = "c"
-    var writer: DataOutputStream = null
-    try {
-      val fileType = FileFactory.getFileType(outputPath)
-      val file = FileFactory.getCarbonFile(outputPath, fileType)
-      if (!file.exists()) {
-        file.createNewFile()
-      }
-      writer = FileFactory.getDataOutputStream(outputPath, fileType)
-      for (i <- 0 to 2) {
-        for (j <- 0 to 240000) {
-          writer.writeBytes(testData(i) + "," + j + "\n")
-        }
-      }
-    } catch {
-      case ex: Exception =>
-        LOGGER.error(ex, "Build test file for block prune failed")
-    } finally {
-      if (writer != null) {
-        try {
-          writer.close()
-        } catch {
-          case ex: Exception =>
-            LOGGER.error(ex, "Close output stream catching exception")
-        }
-      }
-    }
-
-    sql("DROP TABLE IF EXISTS blockprune")
-  }
-
-  test("test block prune query") {
-    sql(
-      """
-        CREATE TABLE IF NOT EXISTS blockprune (name string, id int)
-        STORED BY 'org.apache.carbondata.format'
-      """)
-    sql(
-        s"LOAD DATA LOCAL INPATH '$outputPath' INTO table blockprune options('FILEHEADER'='name,id')"
-      )
-    // data is in all 7 blocks
-    checkAnswer(
-      sql(
-        """
-          select name,count(name) as amount from blockprune
-          where name='c' or name='b' or name='a' group by name
-        """),
-      Seq(Row("a", 240001), Row("b", 240001), Row("c", 240001)))
-
-    // data only in middle 3/4/5 blocks
-    checkAnswer(
-      sql(
-        """
-          select name,count(name) as amount from blockprune
-          where name='b' group by name
-        """),
-      Seq(Row("b", 240001)))
-  }
-
-  override def afterAll {
-    // delete the temp data file
-    try {
-      val fileType = FileFactory.getFileType(outputPath)
-      val file = FileFactory.getCarbonFile(outputPath, fileType)
-      if (file.exists()) {
-        file.delete()
-      }
-    } catch {
-      case ex: Exception =>
-        LOGGER.error(ex, "Delete temp test data file for block prune catching exception")
-    }
-    sql("DROP TABLE IF EXISTS blockprune")
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createtable/TestCreateTableSyntax.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createtable/TestCreateTableSyntax.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createtable/TestCreateTableSyntax.scala
index 5f8c150..6efbe37 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createtable/TestCreateTableSyntax.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/createtable/TestCreateTableSyntax.scala
@@ -19,13 +19,12 @@
 
 package org.apache.carbondata.spark.testsuite.createtable
 
-import org.apache.spark.sql.common.util.CarbonHiveContext._
+import org.apache.spark.sql.Row
 import org.apache.spark.sql.common.util.QueryTest
-import org.apache.spark.sql.{CarbonContext, Row}
-import org.apache.carbondata.spark.exception.MalformedCarbonCommandException
-
 import org.scalatest.BeforeAndAfterAll
 
+import org.apache.carbondata.spark.exception.MalformedCarbonCommandException
+
 /**
  * Test Class for validating create table syntax for carbontable
  *
@@ -33,18 +32,21 @@ import org.scalatest.BeforeAndAfterAll
 class TestCreateTableSyntax extends QueryTest with BeforeAndAfterAll {
   
   override def beforeAll {
-    sql("drop table if exists carbontable")
   }
 
   test("Struct field with underscore and struct<struct> syntax check") {
+    sql("drop table if exists carbontable")
     sql("create table carbontable(id int, username struct<sur_name:string," +
         "actual_name:struct<first_name:string,last_name:string>>, country string, salary double)" +
         "STORED BY 'org.apache.carbondata.format'")
     sql("describe carbontable").show
-    sql("drop table if exists carbontable")
   }
   
   test("Test table rename operation on carbon table and on hive table") {
+    sql("drop table if exists hivetable")
+    sql("drop table if exists carbontable")
+    sql("drop table if exists hiveRenamedTable")
+    sql("drop table if exists carbonRenamedTable")
     sql("create table hivetable(test1 int, test2 array<String>,test3 array<bigint>,"+
         "test4 array<int>,test5 array<decimal>,test6 array<timestamp>,test7 array<double>)"+
         "row format delimited fields terminated by ',' collection items terminated by '$' map keys terminated by ':'")
@@ -61,13 +63,12 @@ class TestCreateTableSyntax extends QueryTest with BeforeAndAfterAll {
         assert(e.getMessage.equals("Unsupported alter operation on carbon table"))
       }
     }
-    sql("drop table if exists hiveRenamedTable")
-    sql("drop table if exists carbontable")
   }
 
   
   test("test carbon table create with complex datatype as dictionary exclude") {
     try {
+      sql("drop table if exists carbontable")
       sql("create table carbontable(id int, name string, dept string, mobile array<string>, "+
           "country string, salary double) STORED BY 'org.apache.carbondata.format' " +
           "TBLPROPERTIES('DICTIONARY_EXCLUDE'='dept,mobile')")
@@ -77,11 +78,11 @@ class TestCreateTableSyntax extends QueryTest with BeforeAndAfterAll {
         assert(e.getMessage.equals("DICTIONARY_EXCLUDE is unsupported for complex datatype column: mobile"))
       }
     }
-    sql("drop table if exists carbontable")
   }
 
   test("test carbon table create with double datatype as dictionary exclude") {
     try {
+      sql("drop table if exists carbontable")
       sql("create table carbontable(id int, name string, dept string, mobile array<string>, "+
         "country string, salary double) STORED BY 'org.apache.carbondata.format' " +
         "TBLPROPERTIES('DICTIONARY_EXCLUDE'='salary')")
@@ -92,10 +93,10 @@ class TestCreateTableSyntax extends QueryTest with BeforeAndAfterAll {
           "data type column: salary"))
       }
     }
-    sql("drop table if exists carbontable")
   }
     test("test carbon table create with int datatype as dictionary exclude") {
     try {
+      sql("drop table if exists carbontable")
       sql("create table carbontable(id int, name string, dept string, mobile array<string>, "+
         "country string, salary double) STORED BY 'org.apache.carbondata.format' " +
         "TBLPROPERTIES('DICTIONARY_EXCLUDE'='id')")
@@ -106,11 +107,11 @@ class TestCreateTableSyntax extends QueryTest with BeforeAndAfterAll {
           "data type column: id"))
       }
     }
-    sql("drop table if exists carbontable")
   }
 
   test("test carbon table create with decimal datatype as dictionary exclude") {
     try {
+      sql("drop table if exists carbontable")
       sql("create table carbontable(id int, name string, dept string, mobile array<string>, "+
         "country string, salary decimal) STORED BY 'org.apache.carbondata.format' " +
         "TBLPROPERTIES('DICTIONARY_EXCLUDE'='salary')")
@@ -124,19 +125,20 @@ class TestCreateTableSyntax extends QueryTest with BeforeAndAfterAll {
   }
   
   test("describe formatted on hive table and carbon table") {
+    sql("drop table if exists hivetable")
+    sql("drop table if exists carbontable")
     sql("create table carbontable(id int, username struct<sur_name:string," +
         "actual_name:struct<first_name:string,last_name:string>>, country string, salary double)" +
         "STORED BY 'org.apache.carbondata.format'")
     sql("describe formatted carbontable").show(50)
-    sql("drop table if exists carbontable")
     sql("create table hivetable(id int, username struct<sur_name:string," +
         "actual_name:struct<first_name:string,last_name:string>>, country string, salary double)")
     sql("describe formatted hivetable").show(50)
-    sql("drop table if exists hivetable")
   }
 
-    test("describe command carbon table for decimal scale and precision test") {
-            sql("create table carbontablePrecision(id int, name string, dept string, mobile array<string>, "+
+  test("describe command carbon table for decimal scale and precision test") {
+    sql("drop table if exists carbontablePrecision")
+    sql("create table carbontablePrecision(id int, name string, dept string, mobile array<string>, "+
         "country string, salary decimal(10,6)) STORED BY 'org.apache.carbondata.format' " +
         "TBLPROPERTIES('DICTIONARY_INCLUDE'='salary,id')")
     checkAnswer(
@@ -146,11 +148,11 @@ class TestCreateTableSyntax extends QueryTest with BeforeAndAfterAll {
         Row("salary","decimal(10,6)","")
       )
     )
-     sql("drop table if exists carbontablePrecision")
   }
   
   test("create carbon table without dimensions") {
     try {
+      sql("drop table if exists carbontable")
       sql("create table carbontable(msr1 int, msr2 double, msr3 bigint, msr4 decimal)" +
         " stored by 'org.apache.carbondata.format'")
       assert(false)
@@ -165,6 +167,7 @@ class TestCreateTableSyntax extends QueryTest with BeforeAndAfterAll {
 
   test("create carbon table with repeated table properties") {
     try {
+      sql("drop table if exists carbontable")
       sql(
         """
           CREATE TABLE IF NOT EXISTS carbontable
@@ -183,6 +186,10 @@ class TestCreateTableSyntax extends QueryTest with BeforeAndAfterAll {
   }
 
   override def afterAll {
+    sql("drop table if exists hivetable")
     sql("drop table if exists carbontable")
+    sql("drop table if exists hiveRenamedTable")
+    sql("drop table if exists carbonRenamedTable")
+    sql("drop table if exists carbontablePrecision")
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CompactionSystemLockFeatureTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CompactionSystemLockFeatureTest.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CompactionSystemLockFeatureTest.scala
index 3098b43..d5586b0 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CompactionSystemLockFeatureTest.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CompactionSystemLockFeatureTest.scala
@@ -18,7 +18,10 @@
  */
 package org.apache.carbondata.spark.testsuite.datacompaction
 
-import java.io.File
+import scala.collection.JavaConverters._
+
+import org.apache.spark.sql.common.util.QueryTest
+import org.scalatest.BeforeAndAfterAll
 
 import org.apache.carbondata.core.carbon.path.{CarbonStorePath, CarbonTablePath}
 import org.apache.carbondata.core.carbon.{AbsoluteTableIdentifier, CarbonTableIdentifier}
@@ -26,12 +29,6 @@ import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.datastorage.store.impl.FileFactory
 import org.apache.carbondata.core.updatestatus.SegmentStatusManager
 import org.apache.carbondata.core.util.CarbonProperties
-import org.apache.carbondata.core.updatestatus.SegmentStatusManager
-import org.apache.spark.sql.common.util.CarbonHiveContext._
-import org.apache.spark.sql.common.util.QueryTest
-import org.scalatest.BeforeAndAfterAll
-
-import scala.collection.JavaConverters._
 
 /**
   * FT for compaction scenario where major segment should not be included in minor.
@@ -60,12 +57,10 @@ class CompactionSystemLockFeatureTest extends QueryTest with BeforeAndAfterAll {
     )
 
 
-    val currentDirectory = new File(this.getClass.getResource("/").getPath + "/../../")
-      .getCanonicalPath
-    val csvFilePath1 = currentDirectory + "/src/test/resources/compaction/compaction1.csv"
+    val csvFilePath1 = s"$resourcesPath/compaction/compaction1.csv"
 
-    val csvFilePath2 = currentDirectory + "/src/test/resources/compaction/compaction2.csv"
-    val csvFilePath3 = currentDirectory + "/src/test/resources/compaction/compaction3.csv"
+    val csvFilePath2 = s"$resourcesPath/compaction/compaction2.csv"
+    val csvFilePath3 = s"$resourcesPath/compaction/compaction3.csv"
 
     // load table1
     sql("LOAD DATA LOCAL INPATH '" + csvFilePath1 + "' INTO TABLE table1 OPTIONS" +

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionBlockletBoundryTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionBlockletBoundryTest.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionBlockletBoundryTest.scala
deleted file mode 100644
index bccdeb5..0000000
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionBlockletBoundryTest.scala
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.spark.testsuite.datacompaction
-
-import java.io.File
-
-import org.apache.carbondata.core.updatestatus.SegmentStatusManager
-
-import scala.collection.JavaConverters._
-
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.common.util.CarbonHiveContext._
-import org.apache.spark.sql.common.util.QueryTest
-import org.scalatest.BeforeAndAfterAll
-
-import org.apache.carbondata.core.carbon.{AbsoluteTableIdentifier, CarbonTableIdentifier}
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-
-/**
-  * FT for data compaction scenario.
-  */
-class DataCompactionBlockletBoundryTest extends QueryTest with BeforeAndAfterAll {
-
-  override def beforeAll {
-    sql("drop table if exists  blocklettest")
-    sql("drop table if exists  Carbon_automation_hive")
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "mm/dd/yyyy")
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.BLOCKLET_SIZE,
-        "55")
-    sql(
-      "CREATE TABLE IF NOT EXISTS blocklettest (country String, ID String, date Timestamp, name " +
-        "String, " +
-        "phonetype String, serialname String, salary Int) STORED BY 'org.apache.carbondata" +
-        ".format'"
-    )
-
-
-    val currentDirectory = new File(this.getClass.getResource("/").getPath + "/../../")
-      .getCanonicalPath
-    val csvFilePath1 = currentDirectory + "/src/test/resources/compaction/compaction1.csv"
-
-    // loading the rows greater than 256. so that the column cardinality crosses byte boundary.
-    val csvFilePath2 = currentDirectory + "/src/test/resources/compaction/compactioncard2.csv"
-
-
-    sql("LOAD DATA LOCAL INPATH '" + csvFilePath1 + "' INTO TABLE blocklettest OPTIONS" +
-      "('DELIMITER'= ',', 'QUOTECHAR'= '\"')"
-    )
-    sql("LOAD DATA LOCAL INPATH '" + csvFilePath2 + "' INTO TABLE blocklettest  OPTIONS" +
-      "('DELIMITER'= ',', 'QUOTECHAR'= '\"')"
-    )
-    // compaction will happen here.
-    sql("alter table blocklettest compact 'major'"
-    )
-
-    sql(
-      "create table Carbon_automation_hive (ID String, date " +
-      "Timestamp,country String, name String, phonetype String, serialname String, salary Int ) row format " +
-      "delimited fields terminated by ',' TBLPROPERTIES ('skip.header.line.count'='1') "
-    )
-
-    sql("LOAD DATA LOCAL INPATH '" + currentDirectory +
-        "/src/test/resources/compaction/compaction1_forhive.csv" + "' INTO " +
-        "table Carbon_automation_hive ")
-    sql("LOAD DATA LOCAL INPATH '" + currentDirectory +
-        "/src/test/resources/compaction/compactioncard2_forhive.csv" + "' INTO " +
-        "table Carbon_automation_hive ")
-
-  }
-
-  test("select country,count(*) as a from blocklettest")({
-    sql("select country,count(*) as a from Carbon_automation_hive group by country").show()
-    sql("select *  from Carbon_automation_hive").show
-    checkAnswer(
-      sql("select country,count(*) as a from blocklettest group by country"),
-      sql("select country,count(*) as a from Carbon_automation_hive group by country")
-    )
-  }
-  )
-
-  override def afterAll {
-    sql("drop table if exists  blocklettest")
-    sql("drop table if exists  Carbon_automation_hive")
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.BLOCKLET_SIZE,
-        "" + CarbonCommonConstants.BLOCKLET_SIZE_DEFAULT_VAL)
-  }
-
-}


Mime
View raw message