carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From chenliang...@apache.org
Subject [23/50] [abbrv] incubator-carbondata git commit: Handling for percentile aggregate function (#822)
Date Wed, 20 Jul 2016 10:13:51 GMT
Handling for percentile aggregate function (#822)

While data loading changed the data type to Long instead of Double

Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/9821beea
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/9821beea
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/9821beea

Branch: refs/heads/master
Commit: 9821beeabe0aa52d31d6fc693f3c1a972be69253
Parents: f4c3d10
Author: ashokblend <ashok.blend@gmail.com>
Authored: Sun Jul 17 21:12:20 2016 +0530
Committer: Ravindra Pesala <ravi.pesala@gmail.com>
Committed: Sun Jul 17 21:12:20 2016 +0530

----------------------------------------------------------------------
 .../org/carbondata/core/util/DataTypeUtil.java  |  3 ++
 .../util/MeasureAggregatorFactory.java          |  7 ++++
 .../expression/ExpressionAggregator.java        |  1 +
 .../impl/ListBasedResultAggregator.java         |  1 +
 .../impl/QueryResultPreparatorImpl.java         |  1 +
 .../executer/RowLevelFilterExecuterImpl.java    |  3 ++
 .../spark/sql/CarbonDatasourceRelation.scala    |  2 +-
 .../TestLoadDataWithMaxMinInteger.scala         | 18 ++++----
 .../AllDataTypesTestCaseAggregate.scala         | 23 ++++++++--
 .../AllDataTypesTestCaseAggregate.scala         | 44 +++++++++++---------
 .../detailquery/AllDataTypesTestCase.scala      |  7 +++-
 .../filterexpr/AllDataTypesTestCaseFilter.scala |  7 +++-
 .../HadoopFSRelationTestCase.scala              |  8 +++-
 .../joinquery/AllDataTypesTestCaseJoin.scala    |  7 +++-
 .../sortexpr/AllDataTypesTestCaseSort.scala     |  7 +++-
 15 files changed, 101 insertions(+), 38 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/9821beea/core/src/main/java/org/carbondata/core/util/DataTypeUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/util/DataTypeUtil.java b/core/src/main/java/org/carbondata/core/util/DataTypeUtil.java
index c0be31e..f14f29e 100644
--- a/core/src/main/java/org/carbondata/core/util/DataTypeUtil.java
+++ b/core/src/main/java/org/carbondata/core/util/DataTypeUtil.java
@@ -33,6 +33,8 @@ public final class DataTypeUtil {
         BigDecimal bigDecimal =
             new BigDecimal(msrValue).setScale(carbonMeasure.getScale(), RoundingMode.HALF_UP);
         return normalizeDecimalValue(bigDecimal, carbonMeasure.getPrecision());
+      case INT:
+        return Double.valueOf(msrValue).longValue();
       case LONG:
         return Long.valueOf(msrValue);
       default:
@@ -64,6 +66,7 @@ public final class DataTypeUtil {
     switch (dataType) {
       case DECIMAL:
         return CarbonCommonConstants.BIG_DECIMAL_MEASURE;
+      case INT:
       case LONG:
         return CarbonCommonConstants.BIG_INT_MEASURE;
       default:

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/9821beea/core/src/main/java/org/carbondata/query/aggregator/util/MeasureAggregatorFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/query/aggregator/util/MeasureAggregatorFactory.java b/core/src/main/java/org/carbondata/query/aggregator/util/MeasureAggregatorFactory.java
index 0f0228b..a49c374 100644
--- a/core/src/main/java/org/carbondata/query/aggregator/util/MeasureAggregatorFactory.java
+++ b/core/src/main/java/org/carbondata/query/aggregator/util/MeasureAggregatorFactory.java
@@ -95,6 +95,7 @@ public class MeasureAggregatorFactory {
     // get the MeasureAggregator based on aggregate type
     if (CarbonCommonConstants.MIN.equalsIgnoreCase(aggregatorType)) {
       switch (dataType) {
+        case INT:
         case LONG:
           return new MinLongAggregator();
         case DECIMAL:
@@ -108,6 +109,7 @@ public class MeasureAggregatorFactory {
     //
     else if (CarbonCommonConstants.MAX.equalsIgnoreCase(aggregatorType)) {
       switch (dataType) {
+        case INT:
         case LONG:
           return new MaxLongAggregator();
         case DECIMAL:
@@ -119,6 +121,7 @@ public class MeasureAggregatorFactory {
     //
     else if (CarbonCommonConstants.AVERAGE.equalsIgnoreCase(aggregatorType)) {
       switch (dataType) {
+        case INT:
         case LONG:
 
           return new AvgLongAggregator();
@@ -136,6 +139,7 @@ public class MeasureAggregatorFactory {
     //
     else if (CarbonCommonConstants.DISTINCT_COUNT.equalsIgnoreCase(aggregatorType)) {
       switch (dataType) {
+        case INT:
         case LONG:
           return new DistinctCountLongAggregatorObjectSet();
         case DECIMAL:
@@ -146,6 +150,7 @@ public class MeasureAggregatorFactory {
 
     } else if (CarbonCommonConstants.SUM.equalsIgnoreCase(aggregatorType)) {
       switch (dataType) {
+        case INT:
         case LONG:
 
           return new SumLongAggregator();
@@ -162,6 +167,7 @@ public class MeasureAggregatorFactory {
       }
     } else if (CarbonCommonConstants.SUM_DISTINCT.equalsIgnoreCase(aggregatorType)) {
       switch (dataType) {
+        case INT:
         case LONG:
 
           return new SumDistinctLongAggregator();
@@ -174,6 +180,7 @@ public class MeasureAggregatorFactory {
       }
     } else if (CarbonCommonConstants.DUMMY.equalsIgnoreCase(aggregatorType)) {
       switch (dataType) {
+        case INT:
         case LONG:
 
           return new DummyLongAggregator();

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/9821beea/core/src/main/java/org/carbondata/query/carbon/aggregator/expression/ExpressionAggregator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/query/carbon/aggregator/expression/ExpressionAggregator.java b/core/src/main/java/org/carbondata/query/carbon/aggregator/expression/ExpressionAggregator.java
index b7b79a1..ae839f5 100644
--- a/core/src/main/java/org/carbondata/query/carbon/aggregator/expression/ExpressionAggregator.java
+++ b/core/src/main/java/org/carbondata/query/carbon/aggregator/expression/ExpressionAggregator.java
@@ -90,6 +90,7 @@ public class ExpressionAggregator {
             // if no null then get the data based on actual data
             // type
             switch (carbonColumn.getDataType()) {
+              case INT:
               case LONG:
                 row[j] = scannedResult.getLongMeasureValue(carbonColumn.getOrdinal());
                 break;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/9821beea/core/src/main/java/org/carbondata/query/carbon/aggregator/impl/ListBasedResultAggregator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/query/carbon/aggregator/impl/ListBasedResultAggregator.java b/core/src/main/java/org/carbondata/query/carbon/aggregator/impl/ListBasedResultAggregator.java
index 2b97085..1ab51e9 100644
--- a/core/src/main/java/org/carbondata/query/carbon/aggregator/impl/ListBasedResultAggregator.java
+++ b/core/src/main/java/org/carbondata/query/carbon/aggregator/impl/ListBasedResultAggregator.java
@@ -160,6 +160,7 @@ public class ListBasedResultAggregator implements ScannedResultAggregator {
     if (!dataChunk.getNullValueIndexHolder().getBitSet().get(index)) {
       Object msrVal;
       switch (dataType) {
+        case INT:
         case LONG:
           msrVal = dataChunk.getMeasureDataHolder().getReadableLongValueByIndex(index);
           break;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/9821beea/core/src/main/java/org/carbondata/query/carbon/result/preparator/impl/QueryResultPreparatorImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/query/carbon/result/preparator/impl/QueryResultPreparatorImpl.java b/core/src/main/java/org/carbondata/query/carbon/result/preparator/impl/QueryResultPreparatorImpl.java
index f185477..918814a 100644
--- a/core/src/main/java/org/carbondata/query/carbon/result/preparator/impl/QueryResultPreparatorImpl.java
+++ b/core/src/main/java/org/carbondata/query/carbon/result/preparator/impl/QueryResultPreparatorImpl.java
@@ -185,6 +185,7 @@ public class QueryResultPreparatorImpl
           } else {
             Object msrVal;
             switch (msr.getMeasure().getDataType()) {
+              case INT:
               case LONG:
                 msrVal = msrAgg[queryExecuterProperties.measureStartIndex + i].getLongValue();
                 break;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/9821beea/core/src/main/java/org/carbondata/query/filter/executer/RowLevelFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/query/filter/executer/RowLevelFilterExecuterImpl.java b/core/src/main/java/org/carbondata/query/filter/executer/RowLevelFilterExecuterImpl.java
index 5af2e35..166426a 100644
--- a/core/src/main/java/org/carbondata/query/filter/executer/RowLevelFilterExecuterImpl.java
+++ b/core/src/main/java/org/carbondata/query/filter/executer/RowLevelFilterExecuterImpl.java
@@ -235,6 +235,7 @@ public class RowLevelFilterExecuterImpl implements FilterExecuter {
 
     for (MeasureColumnResolvedFilterInfo msrColumnEvalutorInfo : msrColEvalutorInfoList) {
       switch (msrColumnEvalutorInfo.getType()) {
+        case INT:
         case LONG:
           msrType = DataType.LONG;
           break;
@@ -257,6 +258,7 @@ public class RowLevelFilterExecuterImpl implements FilterExecuter {
                 blockChunkHolder.getMeasureDataChunk()[msrColumnEvalutorInfo.getColumnIndex()]
                     .getMeasureDataHolder().getReadableByteArrayValueByIndex(index));
             switch (msrType) {
+              case INT:
               case LONG:
                 record[msrColumnEvalutorInfo.getRowIndex()] = aggregator.getLongValue();
                 break;
@@ -270,6 +272,7 @@ public class RowLevelFilterExecuterImpl implements FilterExecuter {
         } else {
           Object msrValue;
           switch (msrType) {
+            case INT:
             case LONG:
               msrValue =
                   blockChunkHolder.getMeasureDataChunk()[msrColumnEvalutorInfo.getColumnIndex()]

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/9821beea/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceRelation.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceRelation.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceRelation.scala
index 3cb9802..3035a47 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceRelation.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceRelation.scala
@@ -229,7 +229,7 @@ case class CarbonRelation(
         .map(x => AttributeReference(x.getColName, CarbonMetastoreTypes.toDataType(
         metaData.carbonTable.getMeasureByName(factTable, x.getColName).getDataType.toString
           .toLowerCase match {
-          case "int" => "double"
+          case "int" => "long"
           case "decimal" => "decimal(" + x.getPrecision + "," + x.getScale + ")"
           case others => others
         }),

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/9821beea/integration/spark/src/test/scala/org/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithMaxMinInteger.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithMaxMinInteger.scala b/integration/spark/src/test/scala/org/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithMaxMinInteger.scala
index ef8fb3d..0b27f91 100644
--- a/integration/spark/src/test/scala/org/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithMaxMinInteger.scala
+++ b/integration/spark/src/test/scala/org/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithMaxMinInteger.scala
@@ -48,9 +48,9 @@ class TestLoadDataWithMaxMinInteger extends QueryTest with BeforeAndAfterAll {
         'QUOTECHAR'='"')
       """)
     checkAnswer(sql("select age from integer_table_01"),
-      Seq(Row(10.0), Row(26.0), Row(10.0), Row(10.0), Row(20.0),
-        Row(10.0), Row(10.0), Row(10.0), Row(10.0), Row(10.0),
-        Row(-2147483648.0)))
+      Seq(Row(10), Row(26), Row(10), Row(10), Row(20),
+        Row(10), Row(10), Row(10), Row(10), Row(10),
+        Row(-2147483648)))
   }
 
   test("test carbon table data loading when the int column " +
@@ -67,9 +67,9 @@ class TestLoadDataWithMaxMinInteger extends QueryTest with BeforeAndAfterAll {
         'QUOTECHAR'='"')
       """)
     checkAnswer(sql("select age from integer_table_02"),
-      Seq(Row(10.0), Row(26.0), Row(10.0), Row(10.0), Row(20.0),
-        Row(10.0), Row(10.0), Row(10.0), Row(10.0), Row(10.0),
-        Row(2147483647.0)))
+      Seq(Row(10), Row(26), Row(10), Row(10), Row(20),
+        Row(10), Row(10), Row(10), Row(10), Row(10),
+        Row(2147483647)))
   }
 
   test("test carbon table data loading when the int column " +
@@ -86,9 +86,9 @@ class TestLoadDataWithMaxMinInteger extends QueryTest with BeforeAndAfterAll {
         'QUOTECHAR'='"')
       """)
     checkAnswer(sql("select age from integer_table_03"),
-      Seq(Row(10.0), Row(26.0), Row(10.0), Row(10.0), Row(20.0),
-        Row(10.0), Row(10.0), Row(10.0), Row(10.0), Row(10.0),
-        Row(-2147483648.0), Row(2147483647.0)))
+      Seq(Row(10), Row(26), Row(10), Row(10), Row(20),
+        Row(10), Row(10), Row(10), Row(10), Row(10),
+        Row(-2147483648), Row(2147483647)))
   }
   override def afterAll {
     sql("drop table if exists integer_table_01")

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/9821beea/integration/spark/src/test/scala/org/carbondata/spark/testsuite/aggquery/AllDataTypesTestCaseAggregate.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/aggquery/AllDataTypesTestCaseAggregate.scala b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/aggquery/AllDataTypesTestCaseAggregate.scala
index 5909419..196ca45 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/aggquery/AllDataTypesTestCaseAggregate.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/aggquery/AllDataTypesTestCaseAggregate.scala
@@ -44,6 +44,16 @@ class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll {
     sql(
       "LOAD DATA LOCAL INPATH './src/test/resources/data.csv' INTO TABLE alldatatypescubeAGG " +
       "OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')")
+      
+    sql(
+      "CREATE TABLE alldatatypescubeAGG_hive (empno int, empname String, designation String, doj " +
+      "Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname " +
+      "String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance " +
+      "int,utilization int,salary int)row format delimited fields terminated by ','")
+    sql(
+      "LOAD DATA LOCAL INPATH './src/test/resources/datawithoutheader.csv' INTO TABLE alldatatypescubeAGG_hive")
+      
+       
   }
 
   test(
@@ -54,7 +64,9 @@ class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll {
       sql(
         "select empno,empname,utilization,count(salary),sum(empno) from alldatatypescubeAGG where" +
         " empname in ('arvind','ayushi') group by empno,empname,utilization"),
-      Seq(Row(11, "arvind", 96.2, 1, 11), Row(15, "ayushi", 91.5, 1, 15)))
+      sql(
+        "select empno,empname,utilization,count(salary),sum(empno) from alldatatypescubeAGG_hive where" +
+        " empname in ('arvind','ayushi') group by empno,empname,utilization"))
   }
 
   test(
@@ -65,7 +77,9 @@ class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll {
       sql(
         "select empname,trim(designation),avg(salary),avg(empno) from alldatatypescubeAGG where " +
         "empname in ('arvind','ayushi') group by empname,trim(designation)"),
-      Seq(Row("arvind", "SE", 5040.56, 11.0), Row("ayushi", "SSA", 13245.48, 15.0)))
+      sql(
+        "select empname,trim(designation),avg(salary),avg(empno) from alldatatypescubeAGG_hive where " +
+        "empname in ('arvind','ayushi') group by empname,trim(designation)"))
   }
 
   test(
@@ -78,7 +92,10 @@ class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll {
         "select empname,length(designation),max(empno),min(empno), avg(empno) from " +
         "alldatatypescubeAGG where empname in ('arvind','ayushi') group by empname,length" +
         "(designation) order by empname"),
-      Seq(Row("arvind", 2, 11, 11, 11.0), Row("ayushi", 3, 15, 15, 15.0)))
+      sql(
+        "select empname,length(designation),max(empno),min(empno), avg(empno) from " +
+        "alldatatypescubeAGG_hive where empname in ('arvind','ayushi') group by empname,length" +
+        "(designation) order by empname"))
   }
 
   override def afterAll {

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/9821beea/integration/spark/src/test/scala/org/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
index 4a02975..c850bd1 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
@@ -45,6 +45,10 @@ class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll {
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
     sql("LOAD DATA LOCAL INPATH '"+currentDirectory+"/src/test/resources/100_olap.csv' INTO table Carbon_automation_test options('DELIMITER'= ',' ,'QUOTECHAR'= '\"', 'FILEHEADER'= 'imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,contractNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Late
 st_EMUIVersion,Latest_operaSysVersion,Latest_BacVerNumber,Latest_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_operatorsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointId,gamePointDescription')");
 
+    //hive table
+    sql("create table Carbon_automation_test_hive (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string, deliveryStreet string, oxSingleNumber string,contractNumber int, ActiveCheckTime string, ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion string, Active_BacVerNumber string, Active_BacFlashVer string, Active_webUIVersion string, Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string, Active_operatorsVersion stri
 ng, Active_phonePADPartitionedVersions string, Latest_YEAR int, Latest_MONTH int, Latest_DAY int, Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province string, Latest_city string, Latest_district string, Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion string, Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, Latest_operatorId string, gamePointId int,gamePointDescription string)row format delimited fields terminated by ','");
+    sql("LOAD DATA LOCAL INPATH '"+currentDirectory+"/src/test/resources/100_olap.csv' INTO table Carbon_automation_test_hive");
+
   }
 
   override def afterAll {
@@ -215,7 +219,7 @@ class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll {
 
     checkAnswer(
       sql("select sum(gamepointid) +10 as a ,series  from Carbon_automation_test group by series"),
-      Seq(Row(12932, "6Series"), Row(25890, "0Series"), Row(12354, "4Series"), Row(13577, "8Series"), Row(18601.197, "7Series"), Row(4011, "1Series"), Row(29081, "5Series"), Row(12930, "9Series"), Row(15245, "3Series"), Row(12364, "2Series")))
+      sql("select sum(gamepointid) +10 as a ,series  from Carbon_automation_test_hive group by series"))
   })  
   
   //Test-50
@@ -223,7 +227,7 @@ class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll {
 
     checkAnswer(
       sql("select sum(gamepointid) +10.36 as a ,series  from Carbon_automation_test group by series"),
-      Seq(Row(12932.36, "6Series"), Row(25890.36, "0Series"), Row(12354.36, "4Series"), Row(13577.36, "8Series"), Row(18601.557, "7Series"), Row(4011.36, "1Series"), Row(29081.36, "5Series"), Row(12930.36, "9Series"), Row(15245.36, "3Series"), Row(12364.36, "2Series")))
+      sql("select sum(gamepointid) +10.36 as a ,series  from Carbon_automation_test_hive group by series"))
   })
   
   //TC_055
@@ -318,7 +322,7 @@ class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll {
   test("select sum(gamePointId) a  from Carbon_automation_test")({
     checkAnswer(
       sql("select sum(gamePointId) a  from Carbon_automation_test"),
-      Seq(Row(156885.197)))
+      sql("select sum(gamePointId) a  from Carbon_automation_test_hive"))
   })
     //TC_077
   test("select sum(DISTINCT  deviceInformationId) a  from Carbon_automation_test")({
@@ -513,7 +517,7 @@ class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll {
   test("select series,sum(gamePointId) a from Carbon_automation_test group by series order by series")({
     checkAnswer(
       sql("select series,sum(gamePointId) a from Carbon_automation_test group by series order by series"),
-      Seq(Row("0Series", 25880.0), Row("1Series", 4001.0), Row("2Series", 12354.0), Row("3Series", 15235.0), Row("4Series", 12344.0), Row("5Series", 29071.0), Row("6Series", 12922.0), Row("7Series", 18591.197), Row("8Series", 13567.0), Row("9Series", 12920.0)))
+      sql("select series,sum(gamePointId) a from Carbon_automation_test_hive group by series order by series"))
   })
   
    //TC_162
@@ -611,56 +615,56 @@ class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll {
   test("select series,sum(gamePointId) a from Carbon_automation_test group by series order by series desc")({
     checkAnswer(
       sql("select series,sum(gamePointId) a from Carbon_automation_test group by series order by series desc"),
-      Seq(Row("9Series", 12920.0), Row("8Series", 13567.0), Row("7Series", 18591.197), Row("6Series", 12922.0), Row("5Series", 29071.0), Row("4Series", 12344.0), Row("3Series", 15235.0), Row("2Series", 12354.0), Row("1Series", 4001.0), Row("0Series", 25880.0)))
+      sql("select series,sum(gamePointId) a from Carbon_automation_test_hive group by series order by series desc"))
   })
 
   //TC_181
   test("select series,sum(gamePointId) a from Carbon_automation_test group by series order by a desc")({
     checkAnswer(
       sql("select series,sum(gamePointId) a from Carbon_automation_test group by series order by a desc"),
-      Seq(Row("5Series", 29071.0), Row("0Series", 25880.0), Row("7Series", 18591.197), Row("3Series", 15235.0), Row("8Series", 13567.0), Row("6Series", 12922.0), Row("9Series", 12920.0), Row("2Series", 12354.0), Row("4Series", 12344.0), Row("1Series", 4001.0)))
+      sql("select series,sum(gamePointId) a from Carbon_automation_test_hive group by series order by a desc"))
   })
 
   //TC_182
   test("select series,sum(gamePointId) a from Carbon_automation_test group by series order by series desc ,a desc")({
     checkAnswer(
       sql("select series,sum(gamePointId) a from Carbon_automation_test group by series order by series desc ,a desc"),
-      Seq(Row("9Series", 12920.0), Row("8Series", 13567.0), Row("7Series", 18591.197), Row("6Series", 12922.0), Row("5Series", 29071.0), Row("4Series", 12344.0), Row("3Series", 15235.0), Row("2Series", 12354.0), Row("1Series", 4001.0), Row("0Series", 25880.0)))
+      sql("select series,sum(gamePointId) a from Carbon_automation_test_hive group by series order by series desc ,a desc"))
   })
 
   //TC_183
   test("select series,sum(gamePointId) a from Carbon_automation_test group by series order by series asc")({
     checkAnswer(
       sql("select series,sum(gamePointId) a from Carbon_automation_test group by series order by series asc"),
-      Seq(Row("0Series", 25880.0), Row("1Series", 4001.0), Row("2Series", 12354.0), Row("3Series", 15235.0), Row("4Series", 12344.0), Row("5Series", 29071.0), Row("6Series", 12922.0), Row("7Series", 18591.197), Row("8Series", 13567.0), Row("9Series", 12920.0)))
+      sql("select series,sum(gamePointId) a from Carbon_automation_test_hive group by series order by series asc"))
   })
 
   //TC_184
   test("select series,sum(gamePointId) a from Carbon_automation_test group by series order by a asc")({
     checkAnswer(
       sql("select series,sum(gamePointId) a from Carbon_automation_test group by series order by a asc"),
-      Seq(Row("1Series", 4001.0), Row("4Series", 12344.0), Row("2Series", 12354.0), Row("9Series", 12920.0), Row("6Series", 12922.0), Row("8Series", 13567.0), Row("3Series", 15235.0), Row("7Series", 18591.197), Row("0Series", 25880.0), Row("5Series", 29071.0)))
+      sql("select series,sum(gamePointId) a from Carbon_automation_test_hive group by series order by a asc"))
   })
 
   //TC_185
   test("select series,sum(gamePointId) a from Carbon_automation_test group by series order by series asc ,a asc")({
     checkAnswer(
       sql("select series,sum(gamePointId) a from Carbon_automation_test group by series order by series asc ,a asc"),
-      Seq(Row("0Series", 25880.0), Row("1Series", 4001.0), Row("2Series", 12354.0), Row("3Series", 15235.0), Row("4Series", 12344.0), Row("5Series", 29071.0), Row("6Series", 12922.0), Row("7Series", 18591.197), Row("8Series", 13567.0), Row("9Series", 12920.0)))
+      sql("select series,sum(gamePointId) a from Carbon_automation_test_hive group by series order by series asc ,a asc"))
   })
 
   //TC_186
   test("select series,sum(gamePointId) a from Carbon_automation_test group by series order by series desc ,a asc")({
     checkAnswer(
       sql("select series,sum(gamePointId) a from Carbon_automation_test group by series order by series desc ,a asc"),
-      Seq(Row("9Series", 12920.0), Row("8Series", 13567.0), Row("7Series", 18591.197), Row("6Series", 12922.0), Row("5Series", 29071.0), Row("4Series", 12344.0), Row("3Series", 15235.0), Row("2Series", 12354.0), Row("1Series", 4001.0), Row("0Series", 25880.0)))
+      sql("select series,sum(gamePointId) a from Carbon_automation_test_hive group by series order by series desc ,a asc"))
   })
 
   //TC_187
   test("select series,ActiveProvince,sum(gamePointId) a from Carbon_automation_test group by series,ActiveProvince order by series desc,ActiveProvince asc")({
     checkAnswer(
       sql("select series,ActiveProvince,sum(gamePointId) a from Carbon_automation_test group by series,ActiveProvince order by series desc,ActiveProvince asc"),
-      Seq(Row("9Series", "Guangdong Province", 2205.0), Row("9Series", "Hubei Province", 2530.0), Row("9Series", "Hunan Province", 8185.0), Row("8Series", "Guangdong Province", 2235.0), Row("8Series", "Hubei Province", 7962.0), Row("8Series", "Hunan Province", 3370.0), Row("7Series", "Guangdong Province", 8935.562), Row("7Series", "Hubei Province", 1714.635), Row("7Series", "Hunan Province", 7941.0), Row("6Series", "Guangdong Province", 907.0), Row("6Series", "Hubei Province", 6504.0), Row("6Series", "Hunan Province", 5511.0), Row("5Series", "Guangdong Province", 8963.0), Row("5Series", "Hubei Province", 6100.0), Row("5Series", "Hunan Province", 14008.0), Row("4Series", "Guangdong Province", 2488.0), Row("4Series", "Hubei Province", 2970.0), Row("4Series", "Hunan Province", 6886.0), Row("3Series", "Guangdong Province", 2586.0), Row("3Series", "Hubei Province", 3555.0), Row("3Series", "Hunan Province", 9094.0), Row("2Series", "Hubei Province", 4016.0), Row("2Series", "Hunan Province"
 , 8338.0), Row("1Series", "Guangdong Province", 1408.0), Row("1Series", "Hunan Province", 2593.0), Row("0Series", "Guangdong Province", 2192.0), Row("0Series", "Hubei Province", 7500.0), Row("0Series", "Hunan Province", 16188.0)))
+      sql("select series,ActiveProvince,sum(gamePointId) a from Carbon_automation_test_hive group by series,ActiveProvince order by series desc,ActiveProvince asc"))
   })
   
    //TC_208
@@ -737,28 +741,28 @@ class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll {
   test("SELECT AMSize, ActiveAreaId, SUM(gamePointId) AS Sum_gamePointId FROM (select * from Carbon_automation_test) SUB_QRY GROUP BY AMSize, ActiveAreaId ORDER BY AMSize ASC, ActiveAreaId ASC")({
     checkAnswer(
       sql("SELECT AMSize, ActiveAreaId, SUM(gamePointId) AS Sum_gamePointId FROM (select * from Carbon_automation_test) SUB_QRY GROUP BY AMSize, ActiveAreaId ORDER BY AMSize ASC, ActiveAreaId ASC"),
-      Seq(Row("0RAM size", "1", 2849.0), Row("0RAM size", "2", 79.0), Row("0RAM size", "3", 7663.0), Row("0RAM size", "5", 1341.0), Row("0RAM size", "6", 6082.0), Row("1RAM size", "1", 256.0), Row("1RAM size", "2", 1333.0), Row("1RAM size", "4", 7510.0), Row("1RAM size", "5", 2745.0), Row("1RAM size", "7", 3942.0), Row("2RAM size", "3", 1973.0), Row("2RAM size", "4", 1350.0), Row("3RAM size", "1", 6640.0), Row("3RAM size", "2", 1999.0), Row("3RAM size", "3", 2863.0), Row("3RAM size", "4", 3824.0), Row("3RAM size", "5", 5699.0), Row("3RAM size", "6", 2635.0), Row("3RAM size", "7", 1491.0), Row("4RAM size", "1", 2255.0), Row("4RAM size", "2", 1728.0), Row("4RAM size", "3", 9130.0), Row("4RAM size", "4", 11560.0), Row("4RAM size", "6", 5344.635), Row("4RAM size", "7", 1338.0), Row("5RAM size", "2", 4712.0), Row("5RAM size", "3", 2769.0), Row("5RAM size", "6", 2478.0), Row("6RAM size", "1", 2142.0), Row("6RAM size", "2", 1768.0), Row("6RAM size", "3", 2633.0), Row("6RAM size", "4", 866.
 0), Row("6RAM size", "5", 2952.0), Row("6RAM size", "6", 3257.0), Row("7RAM size", "3", 151.0), Row("7RAM size", "5", 2239.0), Row("7RAM size", "6", 3979.0), Row("7RAM size", "7", 2031.0), Row("8RAM size", "1", 355.0), Row("8RAM size", "2", 2738.562), Row("8RAM size", "4", 3102.0), Row("8RAM size", "5", 2684.0), Row("8RAM size", "6", 2970.0), Row("8RAM size", "7", 5166.0), Row("9RAM size", "1", 3065.0), Row("9RAM size", "3", 3239.0), Row("9RAM size", "4", 5821.0), Row("9RAM size", "6", 1567.0), Row("9RAM size", "7", 571.0)))
+      sql("SELECT AMSize, ActiveAreaId, SUM(gamePointId) AS Sum_gamePointId FROM (select * from Carbon_automation_test_hive) SUB_QRY GROUP BY AMSize, ActiveAreaId ORDER BY AMSize ASC, ActiveAreaId ASC"))
   })
 
   //TC_265
   test("SELECT AMSize, ActiveAreaId, SUM(gamePointId) AS Sum_gamePointId FROM (select * from Carbon_automation_test) SUB_QRY WHERE NOT(AMSize = \"\") GROUP BY AMSize, ActiveAreaId ORDER BY AMSize ASC, ActiveAreaId ASC")({
     checkAnswer(
       sql("SELECT AMSize, ActiveAreaId, SUM(gamePointId) AS Sum_gamePointId FROM (select * from Carbon_automation_test) SUB_QRY WHERE NOT(AMSize = \"\") GROUP BY AMSize, ActiveAreaId ORDER BY AMSize ASC, ActiveAreaId ASC"),
-      Seq(Row("0RAM size", "1", 2849.0), Row("0RAM size", "2", 79.0), Row("0RAM size", "3", 7663.0), Row("0RAM size", "5", 1341.0), Row("0RAM size", "6", 6082.0), Row("1RAM size", "1", 256.0), Row("1RAM size", "2", 1333.0), Row("1RAM size", "4", 7510.0), Row("1RAM size", "5", 2745.0), Row("1RAM size", "7", 3942.0), Row("2RAM size", "3", 1973.0), Row("2RAM size", "4", 1350.0), Row("3RAM size", "1", 6640.0), Row("3RAM size", "2", 1999.0), Row("3RAM size", "3", 2863.0), Row("3RAM size", "4", 3824.0), Row("3RAM size", "5", 5699.0), Row("3RAM size", "6", 2635.0), Row("3RAM size", "7", 1491.0), Row("4RAM size", "1", 2255.0), Row("4RAM size", "2", 1728.0), Row("4RAM size", "3", 9130.0), Row("4RAM size", "4", 11560.0), Row("4RAM size", "6", 5344.635), Row("4RAM size", "7", 1338.0), Row("5RAM size", "2", 4712.0), Row("5RAM size", "3", 2769.0), Row("5RAM size", "6", 2478.0), Row("6RAM size", "1", 2142.0), Row("6RAM size", "2", 1768.0), Row("6RAM size", "3", 2633.0), Row("6RAM size", "4", 866.
 0), Row("6RAM size", "5", 2952.0), Row("6RAM size", "6", 3257.0), Row("7RAM size", "3", 151.0), Row("7RAM size", "5", 2239.0), Row("7RAM size", "6", 3979.0), Row("7RAM size", "7", 2031.0), Row("8RAM size", "1", 355.0), Row("8RAM size", "2", 2738.562), Row("8RAM size", "4", 3102.0), Row("8RAM size", "5", 2684.0), Row("8RAM size", "6", 2970.0), Row("8RAM size", "7", 5166.0), Row("9RAM size", "1", 3065.0), Row("9RAM size", "3", 3239.0), Row("9RAM size", "4", 5821.0), Row("9RAM size", "6", 1567.0), Row("9RAM size", "7", 571.0)))
+      sql("SELECT AMSize, ActiveAreaId, SUM(gamePointId) AS Sum_gamePointId FROM (select * from Carbon_automation_test_hive) SUB_QRY WHERE NOT(AMSize = \"\") GROUP BY AMSize, ActiveAreaId ORDER BY AMSize ASC, ActiveAreaId ASC"))
   })
   
    //TC_274
   test("SELECT ActiveCountry, ActiveDistrict, Activecity, SUM(gamepointid) AS Sum_gamepointid FROM  Carbon_automation_test group by ActiveCountry,ActiveDistrict,Activecity")({
     checkAnswer(
       sql("SELECT ActiveCountry, ActiveDistrict, Activecity, SUM(gamepointid) AS Sum_gamepointid FROM  Carbon_automation_test group by ActiveCountry,ActiveDistrict,Activecity"),
-      Seq(Row("Chinese", "hongshan", "wuhan", 28312.635000000002), Row("Chinese", "longgang", "shenzhen", 17562.0), Row("Chinese", "yichang", "yichang", 14539.0), Row("Chinese", "tianyuan", "zhuzhou", 17660.0), Row("Chinese", "yuhua", "changsha", 30421.0), Row("Chinese", "xiangtan", "xiangtan", 34033.0), Row("Chinese", "longhua", "guangzhou", 14357.562)))
+      sql("SELECT ActiveCountry, ActiveDistrict, Activecity, SUM(gamepointid) AS Sum_gamepointid FROM  Carbon_automation_test_hive group by ActiveCountry,ActiveDistrict,Activecity"))
   })
 
   //TC_275
   test("SELECT Latest_country, Latest_city, Latest_district, SUM(gamepointid) AS Sum_gamepointid FROM (select * from Carbon_automation_test) SUB_QRY GROUP BY Latest_country, Latest_city, Latest_district ORDER BY Latest_country ASC, Latest_city ASC, Latest_district ASC")({
     checkAnswer(
       sql("SELECT Latest_country, Latest_city, Latest_district, SUM(gamepointid) AS Sum_gamepointid FROM (select * from Carbon_automation_test) SUB_QRY GROUP BY Latest_country, Latest_city, Latest_district ORDER BY Latest_country ASC, Latest_city ASC, Latest_district ASC"),
-      Seq(Row("Chinese", "changsha", "yuhua", 26119.0), Row("Chinese", "guangzhou", "longhua", 31520.561999999998), Row("Chinese", "shenzhen", "longgang", 19969.0), Row("Chinese", "wuhan", "hongshan", 18381.0), Row("Chinese", "xiangtan", "xiangtan", 24753.635000000002), Row("Chinese", "yichang", "yichang", 28467.0), Row("Chinese", "zhuzhou", "tianyuan", 7675.0)))
+      sql("SELECT Latest_country, Latest_city, Latest_district, SUM(gamepointid) AS Sum_gamepointid FROM (select * from Carbon_automation_test_hive) SUB_QRY GROUP BY Latest_country, Latest_city, Latest_district ORDER BY Latest_country ASC, Latest_city ASC, Latest_district ASC"))
   })
 
   //TC_276
@@ -779,7 +783,7 @@ class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll {
   test("SELECT ActiveCountry, ActiveDistrict, Activecity, SUM(gamepointid) AS Sum_gamepointid FROM (select * from Carbon_automation_test) SUB_QRY GROUP BY ActiveCountry, ActiveDistrict, Activecity ORDER BY ActiveCountry ASC, ActiveDistrict ASC, Activecity ASC")({
     checkAnswer(
       sql("SELECT ActiveCountry, ActiveDistrict, Activecity, SUM(gamepointid) AS Sum_gamepointid FROM (select * from Carbon_automation_test) SUB_QRY GROUP BY ActiveCountry, ActiveDistrict, Activecity ORDER BY ActiveCountry ASC, ActiveDistrict ASC, Activecity ASC"),
-      Seq(Row("Chinese", "hongshan", "wuhan", 28312.635000000002), Row("Chinese", "longgang", "shenzhen", 17562.0), Row("Chinese", "longhua", "guangzhou", 14357.562), Row("Chinese", "tianyuan", "zhuzhou", 17660.0), Row("Chinese", "xiangtan", "xiangtan", 34033.0), Row("Chinese", "yichang", "yichang", 14539.0), Row("Chinese", "yuhua", "changsha", 30421.0)))
+      sql("SELECT ActiveCountry, ActiveDistrict, Activecity, SUM(gamepointid) AS Sum_gamepointid FROM (select * from Carbon_automation_test_hive) SUB_QRY GROUP BY ActiveCountry, ActiveDistrict, Activecity ORDER BY ActiveCountry ASC, ActiveDistrict ASC, Activecity ASC"))
   })
   
     //TC_317
@@ -800,7 +804,7 @@ class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll {
   test("SELECT ActiveCountry, ActiveDistrict, Activecity, SUM(gamePointId) AS Sum_gamePointId FROM (select * from Carbon_automation_test) SUB_QRY GROUP BY ActiveCountry, ActiveDistrict, Activecity ORDER BY ActiveCountry ASC, ActiveDistrict ASC, Activecity ASC")({
     checkAnswer(
       sql("SELECT ActiveCountry, ActiveDistrict, Activecity, SUM(gamePointId) AS Sum_gamePointId FROM (select * from Carbon_automation_test) SUB_QRY GROUP BY ActiveCountry, ActiveDistrict, Activecity ORDER BY ActiveCountry ASC, ActiveDistrict ASC, Activecity ASC"),
-      Seq(Row("Chinese", "hongshan", "wuhan", 28312.635000000002), Row("Chinese", "longgang", "shenzhen", 17562.0), Row("Chinese", "longhua", "guangzhou", 14357.562), Row("Chinese", "tianyuan", "zhuzhou", 17660.0), Row("Chinese", "xiangtan", "xiangtan", 34033.0), Row("Chinese", "yichang", "yichang", 14539.0), Row("Chinese", "yuhua", "changsha", 30421.0)))
+      sql("SELECT ActiveCountry, ActiveDistrict, Activecity, SUM(gamePointId) AS Sum_gamePointId FROM (select * from Carbon_automation_test_hive) SUB_QRY GROUP BY ActiveCountry, ActiveDistrict, Activecity ORDER BY ActiveCountry ASC, ActiveDistrict ASC, Activecity ASC"))
   })
 
   //TC_321
@@ -814,7 +818,7 @@ class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll {
   test("SELECT series, SUM(gamePointId) AS Sum_gamePointId FROM (select * from Carbon_automation_test) SUB_QRY GROUP BY series ORDER BY series ASC")({
     checkAnswer(
       sql("SELECT series, SUM(gamePointId) AS Sum_gamePointId FROM (select * from Carbon_automation_test) SUB_QRY GROUP BY series ORDER BY series ASC"),
-      Seq(Row("0Series", 25880.0), Row("1Series", 4001.0), Row("2Series", 12354.0), Row("3Series", 15235.0), Row("4Series", 12344.0), Row("5Series", 29071.0), Row("6Series", 12922.0), Row("7Series", 18591.197), Row("8Series", 13567.0), Row("9Series", 12920.0)))
+      sql("SELECT series, SUM(gamePointId) AS Sum_gamePointId FROM (select * from Carbon_automation_test_hive) SUB_QRY GROUP BY series ORDER BY series ASC"))
   })
 
   //TC_386
@@ -828,7 +832,7 @@ class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll {
   test("SELECT modelId, SUM(gamepointid) AS Sum_gamepointid FROM (select * from Carbon_automation_test) SUB_QRY GROUP BY modelId ORDER BY modelId ASC")({
     checkAnswer(
       sql("SELECT modelId, SUM(gamepointid) AS Sum_gamepointid FROM (select * from Carbon_automation_test) SUB_QRY GROUP BY modelId ORDER BY modelId ASC"),
-      Seq(Row("1017", 2483.0), Row("104", 1442.0), Row("1062", 2507.0), Row("1069", 151.0), Row("1085", 448.0), Row("109", 2738.562), Row("1121", 2239.0), Row("1160", 572.0), Row("1168", 901.0), Row("1184", 2399.0), Row("1185", 1608.0), Row("1195", 1491.0), Row("1198", 1053.0), Row("1210", 1655.0), Row("1232", 1697.0), Row("1234", 2061.0), Row("1326", 2071.0), Row("138", 865.0), Row("1386", 2194.0), Row("1429", 2478.0), Row("1447", 2863.0), Row("1511", 2970.0), Row("155", 1999.0), Row("1580", 2205.0), Row("1602", 2553.0), Row("1619", 2142.0), Row("1624", 813.0), Row("1650", 613.0), Row("1683", 1973.0), Row("1689", 1368.0), Row("1695", 1691.0), Row("1734", 1778.0), Row("1741", 1080.0), Row("1815", 136.0), Row("1835", 1750.0), Row("1841", 2826.0), Row("1845", 505.0), Row("1856", 2192.0), Row("187", 571.0), Row("1890", 412.0), Row("1969", 2078.0), Row("2008", 1341.0), Row("2069", 2572.0), Row("2074", 907.0), Row("2133", 2734.0), Row("2142", 1226.0), Row("2151", 2194.0), Row("2164", 109
 8.0), Row("2167", 355.0), Row("2176", 538.0), Row("2201", 2972.0), Row("2300", 845.0), Row("2319", 1077.0), Row("2320", 1407.0), Row("2355", 954.0), Row("2381", 1015.0), Row("2408", 2175.0), Row("2415", 2224.0), Row("2457", 29.0), Row("2479", 1600.0), Row("2531", 692.0), Row("2563", 1407.0), Row("2574", 256.0), Row("2591", 1271.0), Row("2594", 2952.0), Row("2597", 1717.0), Row("2644", 568.0), Row("2696", 79.0), Row("2705", 2890.0), Row("273", 760.0), Row("2759", 2593.0), Row("2761", 2348.0), Row("2765", 1434.0), Row("2797", 1350.0), Row("2799", 2077.0), Row("2823", 1728.0), Row("2828", 1864.0), Row("2930", 1768.0), Row("2940", 2436.0), Row("2963", 1873.0), Row("297", 2849.0), Row("396", 1991.0), Row("44", 1567.0), Row("446", 441.0), Row("466", 202.0), Row("47", 1724.0), Row("477", 1841.0), Row("499", 1337.0), Row("513", 1333.0), Row("546", 298.0), Row("631", 2745.0), Row("68", 750.0), Row("716", 2288.0), Row("776", 2488.0), Row("839", 1823.0), Row("864", 2635.0), Row("872", 1229.0),
  Row("93", 1714.635), Row("987", 732.0)))
+      sql("SELECT modelId, SUM(gamepointid) AS Sum_gamepointid FROM (select * from Carbon_automation_test_hive) SUB_QRY GROUP BY modelId ORDER BY modelId ASC"))
   })
 
   //TC_388

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/9821beea/integration/spark/src/test/scala/org/carbondata/spark/testsuite/detailquery/AllDataTypesTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/detailquery/AllDataTypesTestCase.scala b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/detailquery/AllDataTypesTestCase.scala
index 478203a..007ed9d 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/detailquery/AllDataTypesTestCase.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/detailquery/AllDataTypesTestCase.scala
@@ -35,15 +35,20 @@ class AllDataTypesTestCase extends QueryTest with BeforeAndAfterAll {
   override def beforeAll {
     sql("CREATE TABLE alldatatypescube (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format'")
     sql("LOAD DATA LOCAL INPATH './src/test/resources/data.csv' INTO TABLE alldatatypescube OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')");
+ 
+    sql("CREATE TABLE alldatatypescube_hive (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int)row format delimited fields terminated by ','")
+    sql("LOAD DATA local inpath './src/test/resources/datawithoutheader.csv' INTO TABLE alldatatypescube_hive");
+
   }
 
   test("select empno,empname,utilization,count(salary),sum(empno) from alldatatypescube where empname in ('arvind','ayushi') group by empno,empname,utilization") {
     checkAnswer(
       sql("select empno,empname,utilization,count(salary),sum(empno) from alldatatypescube where empname in ('arvind','ayushi') group by empno,empname,utilization"),
-      Seq(Row(11, "arvind", 96.2, 1, 11), Row(15, "ayushi", 91.5, 1, 15)))
+      sql("select empno,empname,utilization,count(salary),sum(empno) from alldatatypescube_hive where empname in ('arvind','ayushi') group by empno,empname,utilization"))
   }
 
   override def afterAll {
     sql("drop table alldatatypescube")
+    sql("drop table alldatatypescube_hive")
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/9821beea/integration/spark/src/test/scala/org/carbondata/spark/testsuite/filterexpr/AllDataTypesTestCaseFilter.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/filterexpr/AllDataTypesTestCaseFilter.scala b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/filterexpr/AllDataTypesTestCaseFilter.scala
index eec9a0c..d9abe75 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/filterexpr/AllDataTypesTestCaseFilter.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/filterexpr/AllDataTypesTestCaseFilter.scala
@@ -35,15 +35,20 @@ class AllDataTypesTestCaseFilter extends QueryTest with BeforeAndAfterAll {
   override def beforeAll {
     sql("CREATE TABLE alldatatypescubeFilter (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format'")
     sql("LOAD DATA local inpath './src/test/resources/data.csv' INTO TABLE alldatatypescubeFilter OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')");
+    
+    sql("CREATE TABLE alldatatypescubeFilter_hive (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int)row format delimited fields terminated by ','")
+    sql("LOAD DATA local inpath './src/test/resources/datawithoutheader.csv' INTO TABLE alldatatypescubeFilter_hive");
+
   }
 
   test("select empno,empname,utilization,count(salary),sum(empno) from alldatatypescubeFilter where empname in ('arvind','ayushi') group by empno,empname,utilization") {
     checkAnswer(
       sql("select empno,empname,utilization,count(salary),sum(empno) from alldatatypescubeFilter where empname in ('arvind','ayushi') group by empno,empname,utilization"),
-      Seq(Row(11, "arvind", 96.2, 1, 11), Row(15, "ayushi", 91.5, 1, 15)))
+      sql("select empno,empname,utilization,count(salary),sum(empno) from alldatatypescubeFilter_hive where empname in ('arvind','ayushi') group by empno,empname,utilization"))
   }
 
   override def afterAll {
     sql("drop table alldatatypescubeFilter")
+    sql("drop table alldatatypescubeFilter_hive")
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/9821beea/integration/spark/src/test/scala/org/carbondata/spark/testsuite/hadooprelation/HadoopFSRelationTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/hadooprelation/HadoopFSRelationTestCase.scala b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/hadooprelation/HadoopFSRelationTestCase.scala
index c6fd651..f18bc23 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/hadooprelation/HadoopFSRelationTestCase.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/hadooprelation/HadoopFSRelationTestCase.scala
@@ -40,6 +40,11 @@ class HadoopFSRelationTestCase extends QueryTest with BeforeAndAfterAll {
     sql(
       "LOAD DATA local inpath './src/test/resources/data.csv' INTO TABLE hadoopfsrelation " +
       "OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')");
+    
+    sql("CREATE TABLE hadoopfsrelation_hive (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int)row format delimited fields terminated by ','")
+    
+    sql(
+      "LOAD DATA local inpath './src/test/resources/datawithoutheader.csv' INTO TABLE hadoopfsrelation_hive ");
   }
 
   test("hadoopfsrelation select all test") {
@@ -54,10 +59,11 @@ class HadoopFSRelationTestCase extends QueryTest with BeforeAndAfterAll {
       .select("empno", "empname", "utilization").where("empname in ('arvind','ayushi')")
     checkAnswer(
       rdd,
-      Seq(Row(11, "arvind", 96.2), Row(15, "ayushi", 91.5)))
+      sql("select empno,empname,utilization from hadoopfsrelation_hive where empname in ('arvind','ayushi')"))
   }
 
   override def afterAll {
     sql("drop table hadoopfsrelation")
+    sql("drop table hadoopfsrelation_hive")
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/9821beea/integration/spark/src/test/scala/org/carbondata/spark/testsuite/joinquery/AllDataTypesTestCaseJoin.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/joinquery/AllDataTypesTestCaseJoin.scala b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/joinquery/AllDataTypesTestCaseJoin.scala
index d088021..e0f6ae5 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/joinquery/AllDataTypesTestCaseJoin.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/joinquery/AllDataTypesTestCaseJoin.scala
@@ -35,15 +35,20 @@ class AllDataTypesTestCaseJoin extends QueryTest with BeforeAndAfterAll {
   override def beforeAll {
     sql("CREATE TABLE alldatatypescubeJoin (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format'")
     sql("LOAD DATA local inpath './src/test/resources/data.csv' INTO TABLE alldatatypescubeJoin OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')");
+
+    sql("CREATE TABLE alldatatypescubeJoin_hive (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int)row format delimited fields terminated by ','")
+    sql("LOAD DATA local inpath './src/test/resources/datawithoutheader.csv' INTO TABLE alldatatypescubeJoin_hive");
+
   }
 
   test("select empno,empname,utilization,count(salary),sum(empno) from alldatatypescubeJoin where empname in ('arvind','ayushi') group by empno,empname,utilization") {
     checkAnswer(
       sql("select empno,empname,utilization,count(salary),sum(empno) from alldatatypescubeJoin where empname in ('arvind','ayushi') group by empno,empname,utilization"),
-      Seq(Row(11, "arvind", 96.2, 1, 11), Row(15, "ayushi", 91.5, 1, 15)))
+      sql("select empno,empname,utilization,count(salary),sum(empno) from alldatatypescubeJoin_hive where empname in ('arvind','ayushi') group by empno,empname,utilization"))
   }
 
   override def afterAll {
     sql("drop table alldatatypescubeJoin")
+    sql("drop table alldatatypescubeJoin_hive")
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/9821beea/integration/spark/src/test/scala/org/carbondata/spark/testsuite/sortexpr/AllDataTypesTestCaseSort.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/sortexpr/AllDataTypesTestCaseSort.scala b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/sortexpr/AllDataTypesTestCaseSort.scala
index 9562c8f..3b9c98d 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/sortexpr/AllDataTypesTestCaseSort.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/sortexpr/AllDataTypesTestCaseSort.scala
@@ -35,15 +35,20 @@ class AllDataTypesTestCaseSort extends QueryTest with BeforeAndAfterAll {
   override def beforeAll {
     sql("CREATE TABLE alldatatypescubesort (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) STORED BY 'org.apache.carbondata.format'")
     sql("LOAD DATA local inpath './src/test/resources/data.csv' INTO TABLE alldatatypescubesort OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')");
+
+    sql("CREATE TABLE alldatatypescubesort_hive (empno int, empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int)row format delimited fields terminated by ','")
+    sql("LOAD DATA local inpath './src/test/resources/datawithoutheader.csv' INTO TABLE alldatatypescubesort_hive");
+
   }
 
   test("select empno,empname,utilization,count(salary),sum(empno) from alldatatypescubesort where empname in ('arvind','ayushi') group by empno,empname,utilization order by empno") {
     checkAnswer(
       sql("select empno,empname,utilization,count(salary),sum(empno) from alldatatypescubesort where empname in ('arvind','ayushi') group by empno,empname,utilization order by empno"),
-      Seq(Row(11, "arvind", 96.2, 1, 11), Row(15, "ayushi", 91.5, 1, 15)))
+      sql("select empno,empname,utilization,count(salary),sum(empno) from alldatatypescubesort_hive where empname in ('arvind','ayushi') group by empno,empname,utilization order by empno"))
   }
 
   override def afterAll {
     sql("drop table alldatatypescubesort")
+    sql("drop table alldatatypescubesort_hive")
   }
 }
\ No newline at end of file


Mime
View raw message