carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ravipes...@apache.org
Subject [6/7] carbondata git commit: [CARBONDATA-1448] fix partitionInfo null issue in CarbonTableInputFormat
Date Fri, 22 Sep 2017 11:07:13 GMT
[CARBONDATA-1448] fix partitionInfo null issue in CarbonTableInputFormat

This closes #1369


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/80fa37c5
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/80fa37c5
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/80fa37c5

Branch: refs/heads/branch-1.2
Commit: 80fa37c5547f7d19ee5471602ed16ca2b5303bcd
Parents: 1e7da59
Author: lionelcao <whucaolu@gmail.com>
Authored: Tue Sep 19 16:50:16 2017 +0800
Committer: Ravindra Pesala <ravi.pesala@gmail.com>
Committed: Fri Sep 22 11:39:15 2017 +0530

----------------------------------------------------------------------
 .../ThriftWrapperSchemaConverterImpl.java       |  4 +-
 .../core/metadata/schema/BucketingInfo.java     | 32 ++++++-
 .../core/metadata/schema/PartitionInfo.java     | 95 ++++++++++++++++++--
 .../core/metadata/schema/table/TableSchema.java | 25 ++++++
 .../src/test/resources/partition_data.csv       |  1 -
 .../partition/TestAlterPartitionTable.scala     | 69 +++++++-------
 6 files changed, 179 insertions(+), 47 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/80fa37c5/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java
b/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java
index 2887d2a..bc877b7 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java
@@ -219,7 +219,7 @@ public class ThriftWrapperSchemaConverterImpl implements SchemaConverter
{
     externalPartitionInfo.setList_info(wrapperPartitionInfo.getListInfo());
     externalPartitionInfo.setRange_info(wrapperPartitionInfo.getRangeInfo());
     externalPartitionInfo.setNum_partitions(wrapperPartitionInfo.getNumPartitions());
-    externalPartitionInfo.setMax_partition(wrapperPartitionInfo.getMAX_PARTITION());
+    externalPartitionInfo.setMax_partition(wrapperPartitionInfo.getMaxPartitionId());
     externalPartitionInfo.setPartition_ids(wrapperPartitionInfo
         .getPartitionIds());
     return externalPartitionInfo;
@@ -454,7 +454,7 @@ public class ThriftWrapperSchemaConverterImpl implements SchemaConverter
{
     wrapperPartitionInfo.setNumPartitions(externalPartitionInfo.getNum_partitions());
     wrapperPartitionInfo.setPartitionIds(externalPartitionInfo
         .getPartition_ids());
-    wrapperPartitionInfo.setMAX_PARTITION(externalPartitionInfo.getMax_partition());
+    wrapperPartitionInfo.setMaxPartitionId(externalPartitionInfo.getMax_partition());
     return wrapperPartitionInfo;
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/80fa37c5/core/src/main/java/org/apache/carbondata/core/metadata/schema/BucketingInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/BucketingInfo.java
b/core/src/main/java/org/apache/carbondata/core/metadata/schema/BucketingInfo.java
index 8434601..569241d 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/BucketingInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/BucketingInfo.java
@@ -17,15 +17,20 @@
 
 package org.apache.carbondata.core.metadata.schema;
 
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
 import java.io.Serializable;
+import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.carbondata.core.metadata.schema.table.Writable;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 
 /**
  * Bucketing information
  */
-public class BucketingInfo implements Serializable {
+public class BucketingInfo implements Serializable, Writable {
 
   private static final long serialVersionUID = -0L;
 
@@ -33,6 +38,10 @@ public class BucketingInfo implements Serializable {
 
   private int numberOfBuckets;
 
+  public BucketingInfo() {
+
+  }
+
   public BucketingInfo(List<ColumnSchema> listOfColumns, int numberOfBuckets) {
     this.listOfColumns = listOfColumns;
     this.numberOfBuckets = numberOfBuckets;
@@ -46,4 +55,25 @@ public class BucketingInfo implements Serializable {
     return numberOfBuckets;
   }
 
+  @Override
+  public void write(DataOutput output) throws IOException {
+    output.writeInt(numberOfBuckets);
+    output.writeInt(listOfColumns.size());
+    for (ColumnSchema aColSchema : listOfColumns) {
+      aColSchema.write(output);
+    }
+  }
+
+  @Override
+  public void readFields(DataInput input) throws IOException {
+    this.numberOfBuckets = input.readInt();
+    int colSchemaSize = input.readInt();
+    this.listOfColumns = new ArrayList<>(colSchemaSize);
+    for (int i = 0; i < colSchemaSize; i++) {
+      ColumnSchema aSchema = new ColumnSchema();
+      aSchema.readFields(input);
+      this.listOfColumns.add(aSchema);
+    }
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/80fa37c5/core/src/main/java/org/apache/carbondata/core/metadata/schema/PartitionInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/PartitionInfo.java
b/core/src/main/java/org/apache/carbondata/core/metadata/schema/PartitionInfo.java
index d0c4447..7a0e17f 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/PartitionInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/PartitionInfo.java
@@ -17,17 +17,21 @@
 
 package org.apache.carbondata.core.metadata.schema;
 
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
 import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.carbondata.core.metadata.schema.partition.PartitionType;
+import org.apache.carbondata.core.metadata.schema.table.Writable;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 
 /**
  * Partition information of carbon partition table
  */
-public class PartitionInfo implements Serializable {
+public class PartitionInfo implements Serializable, Writable {
 
   private static final long serialVersionUID = -0L;
 
@@ -53,7 +57,7 @@ public class PartitionInfo implements Serializable {
   /**
    * current max partition id, increase only, will be used in alter table partition operation
    */
-  private int MAX_PARTITION;
+  private int maxPartitionId;
 
   /**
    * record the partitionId in the logical ascending order
@@ -61,6 +65,10 @@ public class PartitionInfo implements Serializable {
    */
   private List<Integer> partitionIds;
 
+  public PartitionInfo() {
+
+  }
+
   public PartitionInfo(List<ColumnSchema> columnSchemaList, PartitionType partitionType)
{
     this.columnSchemaList = columnSchemaList;
     this.partitionType = partitionType;
@@ -72,7 +80,7 @@ public class PartitionInfo implements Serializable {
    */
   public void  addPartition(int addPartitionCount) {
     for (int i = 0; i < addPartitionCount; i++) {
-      partitionIds.add(++MAX_PARTITION);
+      partitionIds.add(++maxPartitionId);
       numPartitions++;
     }
   }
@@ -87,7 +95,7 @@ public class PartitionInfo implements Serializable {
   public void splitPartition(int sourcePartitionIndex, int newPartitionNumbers) {
     partitionIds.remove(sourcePartitionIndex);
     for (int i = 0; i < newPartitionNumbers; i++) {
-      partitionIds.add(sourcePartitionIndex + i, ++MAX_PARTITION);
+      partitionIds.add(sourcePartitionIndex + i, ++maxPartitionId);
     }
     numPartitions = numPartitions - 1 + newPartitionNumbers;
   }
@@ -129,7 +137,7 @@ public class PartitionInfo implements Serializable {
     for (int i = 0; i < partitionNum; i++) {
       partitionIds.add(i);
     }
-    MAX_PARTITION = partitionNum - 1;
+    maxPartitionId = partitionNum - 1;
     numPartitions = partitionNum;
   }
 
@@ -141,12 +149,12 @@ public class PartitionInfo implements Serializable {
     return numPartitions;
   }
 
-  public int getMAX_PARTITION() {
-    return MAX_PARTITION;
+  public int getMaxPartitionId() {
+    return maxPartitionId;
   }
 
-  public void setMAX_PARTITION(int max_partition) {
-    this.MAX_PARTITION = max_partition;
+  public void setMaxPartitionId(int maxPartitionId) {
+    this.maxPartitionId = maxPartitionId;
   }
 
   public List<Integer> getPartitionIds() {
@@ -161,4 +169,73 @@ public class PartitionInfo implements Serializable {
     return partitionIds.get(index);
   }
 
+  @Override
+  public void write(DataOutput output) throws IOException {
+    output.writeInt(columnSchemaList.size());
+    for (ColumnSchema columnSchema: columnSchemaList) {
+      columnSchema.write(output);
+    }
+    output.writeInt(partitionType.ordinal());
+    if (PartitionType.RANGE.equals(partitionType)) {
+      output.writeInt(rangeInfo.size());
+      for (String value: rangeInfo) {
+        output.writeUTF(value);
+      }
+    }
+    output.writeInt(partitionIds.size());
+    for (Integer value: partitionIds) {
+      output.writeInt(value);
+    }
+    if (PartitionType.LIST.equals(partitionType)) {
+      output.writeInt(listInfo.size());
+      for (List<String> listValue: listInfo) {
+        output.writeInt(listValue.size());
+        for (String value: listValue) {
+          output.writeUTF(value);
+        }
+      }
+    }
+    output.writeInt(numPartitions);
+    output.writeInt(maxPartitionId);
+  }
+
+  @Override
+  public void readFields(DataInput input) throws IOException {
+    int colSchemaSize = input.readInt();
+    this.columnSchemaList = new ArrayList<>(colSchemaSize);
+    for (int i = 0;i < colSchemaSize; i++) {
+      ColumnSchema colSchema = new ColumnSchema();
+      colSchema.readFields(input);
+      this.columnSchemaList.add(colSchema);
+    }
+    this.partitionType = PartitionType.values()[input.readInt()];
+    if (PartitionType.RANGE.equals(this.partitionType)) {
+      int rangeSize = input.readInt();
+      this.rangeInfo = new ArrayList<>(rangeSize);
+      for (int i = 0; i < rangeSize; i++) {
+        rangeInfo.add(input.readUTF());
+      }
+    }
+    int partitionIdSize = input.readInt();
+    partitionIds = new ArrayList<>(partitionIdSize);
+    for (int i = 0; i < partitionIdSize; i++) {
+      partitionIds.add(input.readInt());
+    }
+    if (PartitionType.LIST.equals(partitionType)) {
+      int listInfoSize = input.readInt();
+      int aListSize;
+      this.listInfo = new ArrayList<>(listInfoSize);
+      for (int i = 0; i < listInfoSize; i++) {
+        aListSize = input.readInt();
+        List<String> aList = new ArrayList<>(aListSize);
+        for (int j = 0; j < aListSize; j++) {
+          aList.add(input.readUTF());
+        }
+        this.listInfo.add(aList);
+      }
+    }
+
+    numPartitions = input.readInt();
+    maxPartitionId = input.readInt();
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/80fa37c5/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchema.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchema.java
b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchema.java
index a396d19..7dc41a4 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchema.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchema.java
@@ -210,6 +210,19 @@ public class TableSchema implements Serializable, Writable {
     for (ColumnSchema column : listOfColumns) {
       column.write(out);
     }
+
+    if (null != partitionInfo) {
+      out.writeBoolean(true);
+      partitionInfo.write(out);
+    } else {
+      out.writeBoolean(false);
+    }
+    if (null != bucketingInfo) {
+      out.writeBoolean(true);
+      bucketingInfo.write(out);
+    } else {
+      out.writeBoolean(false);
+    }
   }
 
   @Override
@@ -223,6 +236,18 @@ public class TableSchema implements Serializable, Writable {
       schema.readFields(in);
       this.listOfColumns.add(schema);
     }
+
+    boolean partitionExists = in.readBoolean();
+    if (partitionExists) {
+      this.partitionInfo = new PartitionInfo();
+      this.partitionInfo.readFields(in);
+    }
+
+    boolean bucketingExists = in.readBoolean();
+    if (bucketingExists) {
+      this.bucketingInfo = new BucketingInfo();
+      this.bucketingInfo.readFields(in);
+    }
   }
 
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/80fa37c5/integration/spark-common-test/src/test/resources/partition_data.csv
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/resources/partition_data.csv b/integration/spark-common-test/src/test/resources/partition_data.csv
index ec20d92..ae83c56 100644
--- a/integration/spark-common-test/src/test/resources/partition_data.csv
+++ b/integration/spark-common-test/src/test/resources/partition_data.csv
@@ -25,4 +25,3 @@ id,vin,logdate,phonenumber,country,area,salary
 24,A41158494839,2016/07/12,625371324,China,Asia,10023
 25,A41158494840,2016/07/12,626381324,Good,OutSpace,10024
 26,A41158494843,2016/07/12,625378824,NotGood,OutSpace,10025
-27,A41158494999,2016/07/12,625378824,Other,,10026

http://git-wip-us.apache.org/repos/asf/carbondata/blob/80fa37c5/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestAlterPartitionTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestAlterPartitionTable.scala
b/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestAlterPartitionTable.scala
index 29daac9..a4a431d 100644
--- a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestAlterPartitionTable.scala
+++ b/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestAlterPartitionTable.scala
@@ -242,21 +242,21 @@ class TestAlterPartitionTable extends QueryTest with BeforeAndAfterAll
{
 
   }
 
-  ignore("Alter table add partition: List Partition") {
+  test("Alter table add partition: List Partition") {
     sql("""ALTER TABLE list_table_area ADD PARTITION ('OutSpace', 'Hi')""".stripMargin)
     val carbonTable = CarbonMetadata.getInstance().getCarbonTable("default_list_table_area")
     val partitionInfo = carbonTable.getPartitionInfo(carbonTable.getFactTableName)
     val partitionIds = partitionInfo.getPartitionIds
     val list_info = partitionInfo.getListInfo
     assert(partitionIds == List(0, 1, 2, 3, 4, 5).map(Integer.valueOf(_)).asJava)
-    assert(partitionInfo.getMAX_PARTITION == 5)
+    assert(partitionInfo.getMaxPartitionId == 5)
     assert(partitionInfo.getNumPartitions == 6)
     assert(list_info.get(0).get(0) == "Asia")
     assert(list_info.get(1).get(0) == "America")
     assert(list_info.get(2).get(0) == "Europe")
     assert(list_info.get(3).get(0) == "OutSpace")
     assert(list_info.get(4).get(0) == "Hi")
-    validateDataFiles("default_list_table_area", "0", Seq(0, 1, 2, 4))
+    validateDataFiles("default_list_table_area", "0", Seq(1, 2, 4))
     val result_after = sql("select id, vin, logdate, phonenumber, country, area, salary from
list_table_area")
     val result_origin = sql("select id, vin, logdate, phonenumber, country, area, salary
from list_table_area_origin")
     checkAnswer(result_after, result_origin)
@@ -290,13 +290,13 @@ class TestAlterPartitionTable extends QueryTest with BeforeAndAfterAll
{
     val partitionIds2 = partitionInfo2.getPartitionIds
     val list_info2 = partitionInfo2.getListInfo
     assert(partitionIds2 == List(0, 1, 3, 4, 5).map(Integer.valueOf(_)).asJava)
-    assert(partitionInfo2.getMAX_PARTITION == 5)
+    assert(partitionInfo2.getMaxPartitionId == 5)
     assert(partitionInfo2.getNumPartitions == 5)
     assert(list_info2.get(0).get(0) == "Asia")
     assert(list_info2.get(1).get(0) == "Europe")
     assert(list_info2.get(2).get(0) == "OutSpace")
     assert(list_info2.get(3).get(0) == "Hi")
-    validateDataFiles("default_list_table_area", "0", Seq(0, 1, 4))
+    validateDataFiles("default_list_table_area", "0", Seq(1, 4))
     checkAnswer(sql("select id, vin, logdate, phonenumber, country, area, salary from list_table_area"),
       sql("select id, vin, logdate, phonenumber, country, area, salary from list_table_area_origin
where area <> 'America' "))
   }
@@ -308,7 +308,7 @@ class TestAlterPartitionTable extends QueryTest with BeforeAndAfterAll
{
     val partitionIds = partitionInfo.getPartitionIds
     val range_info = partitionInfo.getRangeInfo
     assert(partitionIds == List(0, 1, 2, 3, 4, 5).map(Integer.valueOf(_)).asJava)
-    assert(partitionInfo.getMAX_PARTITION == 5)
+    assert(partitionInfo.getMaxPartitionId == 5)
     assert(partitionInfo.getNumPartitions == 6)
     assert(range_info.get(0) == "2014/01/01")
     assert(range_info.get(1) == "2015/01/01")
@@ -346,7 +346,7 @@ class TestAlterPartitionTable extends QueryTest with BeforeAndAfterAll
{
     val partitionIds1 = partitionInfo1.getPartitionIds
     val range_info1 = partitionInfo1.getRangeInfo
     assert(partitionIds1 == List(0, 1, 2, 4, 5).map(Integer.valueOf(_)).asJava)
-    assert(partitionInfo1.getMAX_PARTITION == 5)
+    assert(partitionInfo1.getMaxPartitionId == 5)
     assert(partitionInfo1.getNumPartitions == 5)
     assert(range_info1.get(0) == "2014/01/01")
     assert(range_info1.get(1) == "2015/01/01")
@@ -377,7 +377,7 @@ class TestAlterPartitionTable extends QueryTest with BeforeAndAfterAll
{
     val partitionIds = partitionInfo.getPartitionIds
     val list_info = partitionInfo.getListInfo
     assert(partitionIds == List(0, 1, 2, 3, 6, 7, 8, 5).map(Integer.valueOf(_)).asJava)
-    assert(partitionInfo.getMAX_PARTITION == 8)
+    assert(partitionInfo.getMaxPartitionId == 8)
     assert(partitionInfo.getNumPartitions == 8)
     assert(list_info.get(0).get(0) == "China")
     assert(list_info.get(0).get(1) == "US")
@@ -388,7 +388,7 @@ class TestAlterPartitionTable extends QueryTest with BeforeAndAfterAll
{
     assert(list_info.get(5).get(0) == "Good")
     assert(list_info.get(5).get(1) == "NotGood")
     assert(list_info.get(6).get(0) == "Korea")
-    validateDataFiles("default_list_table_country", "0", Seq(0, 1, 2, 3, 8))
+    validateDataFiles("default_list_table_country", "0", Seq(1, 2, 3, 8))
     val result_after = sql("""select id, vin, logdate, phonenumber, country, area, salary
from list_table_country""")
     val result_origin = sql("""select id, vin, logdate, phonenumber, country, area, salary
from list_table_country_origin""")
     checkAnswer(result_after, result_origin)
@@ -419,7 +419,7 @@ class TestAlterPartitionTable extends QueryTest with BeforeAndAfterAll
{
     val partitionIds1 = partitionInfo1.getPartitionIds
     val list_info1 = partitionInfo1.getListInfo
     assert(partitionIds1 == List(0, 1, 2, 3, 6, 7, 5).map(Integer.valueOf(_)).asJava)
-    assert(partitionInfo1.getMAX_PARTITION == 8)
+    assert(partitionInfo1.getMaxPartitionId == 8)
     assert(partitionInfo1.getNumPartitions == 7)
     assert(list_info1.get(0).get(0) == "China")
     assert(list_info1.get(0).get(1) == "US")
@@ -442,7 +442,7 @@ class TestAlterPartitionTable extends QueryTest with BeforeAndAfterAll
{
     val partitionIds = partitionInfo.getPartitionIds
     val list_info = partitionInfo.getListInfo
     assert(partitionIds == List(0, 1, 2, 3, 6, 7, 5, 10, 11, 12).map(Integer.valueOf(_)).asJava)
-    assert(partitionInfo.getMAX_PARTITION == 12)
+    assert(partitionInfo.getMaxPartitionId == 12)
     assert(partitionInfo.getNumPartitions == 10)
     assert(list_info.get(0).get(0) == "China")
     assert(list_info.get(0).get(1) == "US")
@@ -483,44 +483,45 @@ class TestAlterPartitionTable extends QueryTest with BeforeAndAfterAll
{
 
   test("Alter table split partition with extra space in New SubList: List Partition") {
     sql("""ALTER TABLE list_table_area ADD PARTITION ('(One,Two, Three, Four)')""".stripMargin)
-    sql("""ALTER TABLE list_table_area SPLIT PARTITION(4) INTO ('One', '(Two, Three )', 'Four')""".stripMargin)
+    sql("""ALTER TABLE list_table_area SPLIT PARTITION(6) INTO ('One', '(Two, Three )', 'Four')""".stripMargin)
     val carbonTable = CarbonMetadata.getInstance().getCarbonTable("default_list_table_area")
     val partitionInfo = carbonTable.getPartitionInfo(carbonTable.getFactTableName)
     val partitionIds = partitionInfo.getPartitionIds
     val list_info = partitionInfo.getListInfo
-    assert(partitionIds == List(0, 1, 2, 3, 5, 6, 7).map(Integer.valueOf(_)).asJava)
-    assert(partitionInfo.getMAX_PARTITION == 7)
-    assert(partitionInfo.getNumPartitions == 7)
+    assert(partitionIds == List(0, 1, 3, 4, 5, 7, 8, 9).map(Integer.valueOf(_)).asJava)
+    assert(partitionInfo.getMaxPartitionId == 9)
+    assert(partitionInfo.getNumPartitions == 8)
     assert(list_info.get(0).get(0) == "Asia")
-    assert(list_info.get(1).get(0) == "America")
-    assert(list_info.get(2).get(0) == "Europe")
-    assert(list_info.get(3).get(0) == "One")
-    assert(list_info.get(4).get(0) == "Two")
-    assert(list_info.get(4).get(1) == "Three")
-    assert(list_info.get(5).get(0) == "Four")
-    validateDataFiles("default_list_table_area", "0", Seq(0, 1, 2))
+    assert(list_info.get(1).get(0) == "Europe")
+    assert(list_info.get(2).get(0) == "OutSpace")
+    assert(list_info.get(3).get(0) == "Hi")
+    assert(list_info.get(4).get(0) == "One")
+    assert(list_info.get(5).get(0) == "Two")
+    assert(list_info.get(5).get(1) == "Three")
+    assert(list_info.get(6).get(0) == "Four")
+    validateDataFiles("default_list_table_area", "0", Seq(1, 4))
     val result_after = sql("""select id, vin, logdate, phonenumber, country, area, salary
from list_table_area""")
-    val result_origin = sql("""select id, vin, logdate, phonenumber, country, area, salary
from list_table_area_origin""")
+    val result_origin = sql("""select id, vin, logdate, phonenumber, country, area, salary
from list_table_area_origin where area <> 'America' """)
     checkAnswer(result_after, result_origin)
 
     val result_after1 = sql("""select id, vin, logdate, phonenumber, country, area, salary
from list_table_area where area < 'Four' """)
-    val result_origin1 = sql("""select id, vin, logdate, phonenumber, country, area, salary
from list_table_area_origin where area < 'Four' """)
+    val result_origin1 = sql("""select id, vin, logdate, phonenumber, country, area, salary
from list_table_area_origin where area < 'Four' and area <> 'America' """)
     checkAnswer(result_after1, result_origin1)
 
     val result_after2 = sql("""select id, vin, logdate, phonenumber, country, area, salary
from list_table_area where area <= 'Four' """)
-    val result_origin2 = sql("""select id, vin, logdate, phonenumber, country, area, salary
from list_table_area_origin where area <= 'Four' """)
+    val result_origin2 = sql("""select id, vin, logdate, phonenumber, country, area, salary
from list_table_area_origin where area <= 'Four' and area <> 'America'  """)
     checkAnswer(result_after2, result_origin2)
 
     val result_after3 = sql("""select id, vin, logdate, phonenumber, country, area, salary
from list_table_area where area = 'Four' """)
-    val result_origin3 = sql("""select id, vin, logdate, phonenumber, country, area, salary
from list_table_area_origin where area = 'Four' """)
+    val result_origin3 = sql("""select id, vin, logdate, phonenumber, country, area, salary
from list_table_area_origin where area = 'Four' and area <> 'America'  """)
     checkAnswer(result_after3, result_origin3)
 
     val result_after4 = sql("""select id, vin, logdate, phonenumber, country, area, salary
from list_table_area where area >= 'Four' """)
-    val result_origin4 = sql("""select id, vin, logdate, phonenumber, country, area, salary
from list_table_area_origin where area >= 'Four' """)
+    val result_origin4 = sql("""select id, vin, logdate, phonenumber, country, area, salary
from list_table_area_origin where area >= 'Four' and area <> 'America'  """)
     checkAnswer(result_after4, result_origin4)
 
     val result_after5 = sql("""select id, vin, logdate, phonenumber, country, area, salary
from list_table_area where area > 'Four' """)
-    val result_origin5 = sql("""select id, vin, logdate, phonenumber, country, area, salary
from list_table_area_origin where area > 'Four' """)
+    val result_origin5 = sql("""select id, vin, logdate, phonenumber, country, area, salary
from list_table_area_origin where area > 'Four' and area <> 'America'  """)
     checkAnswer(result_after5, result_origin5)
   }
 
@@ -531,7 +532,7 @@ class TestAlterPartitionTable extends QueryTest with BeforeAndAfterAll
{
     val partitionIds = partitionInfo.getPartitionIds
     val rangeInfo = partitionInfo.getRangeInfo
     assert(partitionIds == List(0, 1, 2, 3, 5, 6).map(Integer.valueOf(_)).asJava)
-    assert(partitionInfo.getMAX_PARTITION == 6)
+    assert(partitionInfo.getMaxPartitionId == 6)
     assert(partitionInfo.getNumPartitions == 6)
     assert(rangeInfo.get(0) == "2014/01/01")
     assert(rangeInfo.get(1) == "2015/01/01")
@@ -569,7 +570,7 @@ class TestAlterPartitionTable extends QueryTest with BeforeAndAfterAll
{
     val partitionIds1 = partitionInfo1.getPartitionIds
     val rangeInfo1 = partitionInfo1.getRangeInfo
     assert(partitionIds1 == List(0, 1, 2, 3, 5).map(Integer.valueOf(_)).asJava)
-    assert(partitionInfo1.getMAX_PARTITION == 6)
+    assert(partitionInfo1.getMaxPartitionId == 6)
     assert(partitionInfo1.getNumPartitions == 5)
     assert(rangeInfo1.get(0) == "2014/01/01")
     assert(rangeInfo1.get(1) == "2015/01/01")
@@ -589,7 +590,7 @@ class TestAlterPartitionTable extends QueryTest with BeforeAndAfterAll
{
     val partitionIds = partitionInfo.getPartitionIds
     val rangeInfo = partitionInfo.getRangeInfo
     assert(partitionIds == List(0, 1, 2, 3, 5, 6).map(Integer.valueOf(_)).asJava)
-    assert(partitionInfo.getMAX_PARTITION == 6)
+    assert(partitionInfo.getMaxPartitionId == 6)
     assert(partitionInfo.getNumPartitions == 6)
     assert(rangeInfo.get(0) == "2014/01/01")
     assert(rangeInfo.get(1) == "2015/01/01")
@@ -627,7 +628,7 @@ class TestAlterPartitionTable extends QueryTest with BeforeAndAfterAll
{
     val partitionIds1 = partitionInfo1.getPartitionIds
     val rangeInfo1 = partitionInfo1.getRangeInfo
     assert(partitionIds1 == List(0, 1, 2, 3, 5).map(Integer.valueOf(_)).asJava)
-    assert(partitionInfo1.getMAX_PARTITION == 6)
+    assert(partitionInfo1.getMaxPartitionId == 6)
     assert(partitionInfo1.getNumPartitions == 5)
     assert(rangeInfo1.get(0) == "2014/01/01")
     assert(rangeInfo1.get(1) == "2015/01/01")
@@ -645,7 +646,7 @@ class TestAlterPartitionTable extends QueryTest with BeforeAndAfterAll
{
     val partitionIds2 = partitionInfo2.getPartitionIds
     val rangeInfo2 = partitionInfo2.getRangeInfo
     assert(partitionIds2 == List(0, 1, 2, 5).map(Integer.valueOf(_)).asJava)
-    assert(partitionInfo2.getMAX_PARTITION == 6)
+    assert(partitionInfo2.getMaxPartitionId == 6)
     assert(partitionInfo2.getNumPartitions == 4)
     assert(rangeInfo2.get(0) == "2014/01/01")
     assert(rangeInfo2.get(1) == "2015/01/01")
@@ -662,7 +663,7 @@ class TestAlterPartitionTable extends QueryTest with BeforeAndAfterAll
{
     val partitionIds3 = partitionInfo3.getPartitionIds
     val rangeInfo3 = partitionInfo3.getRangeInfo
     assert(partitionIds3 == List(0, 1, 2).map(Integer.valueOf(_)).asJava)
-    assert(partitionInfo3.getMAX_PARTITION == 6)
+    assert(partitionInfo3.getMaxPartitionId == 6)
     assert(partitionInfo3.getNumPartitions == 3)
     assert(rangeInfo3.get(0) == "2014/01/01")
     assert(rangeInfo3.get(1) == "2015/01/01")


Mime
View raw message