carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From kunalkap...@apache.org
Subject [1/2] carbondata git commit: [CARBONDATA-2585][CARBONDATA-2586][Local Dictionary]Added test cases for local dictionary support for alter table, set, unset and preaggregate
Date Fri, 06 Jul 2018 09:20:24 GMT
Repository: carbondata
Updated Branches:
  refs/heads/master a7c4b4878 -> 806e9b5a8


http://git-wip-us.apache.org/repos/asf/carbondata/blob/806e9b5a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportCreateTableTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportCreateTableTest.scala
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportCreateTableTest.scala
index 692b95b..5398613 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportCreateTableTest.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/localdictionary/LocalDictionarySupportCreateTableTest.scala
@@ -1,3 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.carbondata.spark.testsuite.localdictionary
 
 import org.apache.spark.sql.test.util.QueryTest
@@ -77,8 +94,7 @@ class LocalDictionarySupportCreateTableTest extends QueryTest with BeforeAndAfte
     assert(exception.getMessage
       .contains(
         "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column:  does not exist in table.
" +
-        "Please check " +
-        "create table statement"))
+        "Please check the DDL."))
   }
 
   test("test local dictionary custom configurations for local dict columns _004") {
@@ -95,9 +111,7 @@ class LocalDictionarySupportCreateTableTest extends QueryTest with BeforeAndAfte
     assert(exception1.getMessage
       .contains(
         "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column: abc does not exist in
table. " +
-        "Please check " +
-        "create table " +
-        "statement"))
+        "Please check the DDL."))
   }
 
   test("test local dictionary custom configurations for local dict columns _005") {
@@ -471,8 +485,7 @@ class LocalDictionarySupportCreateTableTest extends QueryTest with BeforeAndAfte
     assert(exception.getMessage
       .contains(
         "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column:  does not exist in table.
" +
-        "Please check " +
-        "create table statement"))
+        "Please check the DDL."))
 
   }
 
@@ -490,9 +503,7 @@ class LocalDictionarySupportCreateTableTest extends QueryTest with BeforeAndAfte
     assert(exception1.getMessage
       .contains(
         "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column: abc does not exist in
table. " +
-        "Please check " +
-        "create table " +
-        "statement"))
+        "Please check the DDL."))
   }
 
   test("test local dictionary custom configurations when enabled for local dict columns _005")
{
@@ -563,7 +574,7 @@ class LocalDictionarySupportCreateTableTest extends QueryTest with BeforeAndAfte
     assert(exception.getMessage
       .contains(
         "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE contains Duplicate Columns: name.
" +
-        "Please check create table statement."))
+        "Please check the DDL."))
   }
 
   test(
@@ -582,8 +593,7 @@ class LocalDictionarySupportCreateTableTest extends QueryTest with BeforeAndAfte
     assert(exception.getMessage
       .contains(
         "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column:  does not exist in table.
" +
-        "Please check " +
-        "create table statement"))
+        "Please check the DDL."))
 
   }
 
@@ -603,9 +613,7 @@ class LocalDictionarySupportCreateTableTest extends QueryTest with BeforeAndAfte
     assert(exception1.getMessage
       .contains(
         "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column: abc does not exist in
table. " +
-        "Please check " +
-        "create table " +
-        "statement"))
+        "Please check the DDL."))
   }
 
   test(
@@ -1675,8 +1683,7 @@ class LocalDictionarySupportCreateTableTest extends QueryTest with BeforeAndAfte
     assert(exception.getMessage
       .contains(
         "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column:  does not exist in table.
" +
-        "Please check " +
-        "create table statement"))
+        "Please check the DDL."))
 
   }
 
@@ -1703,9 +1710,7 @@ class LocalDictionarySupportCreateTableTest extends QueryTest with BeforeAndAfte
     assert(exception1.getMessage
       .contains(
         "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column: abc does not exist in
table. " +
-        "Please check " +
-        "create table " +
-        "statement"))
+        "Please check the DDL."))
   }
 
   test(
@@ -1829,8 +1834,7 @@ class LocalDictionarySupportCreateTableTest extends QueryTest with BeforeAndAfte
     assert(exception.getMessage
       .contains(
         "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column:  does not exist in table.
" +
-        "Please check " +
-        "create table statement"))
+        "Please check the DDL."))
   }
 
   test(
@@ -1856,9 +1860,7 @@ class LocalDictionarySupportCreateTableTest extends QueryTest with BeforeAndAfte
     assert(exception1.getMessage
       .contains(
         "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column: abc does not exist in
table. " +
-        "Please check " +
-        "create table " +
-        "statement"))
+        "Please check the DDL."))
   }
 
   test(
@@ -2341,311 +2343,6 @@ class LocalDictionarySupportCreateTableTest extends QueryTest with
BeforeAndAfte
         "local_dictionary_include are not of string dataType."))
   }
 
-  test("test alter table add column") {
-    sql("drop table if exists local1")
-    sql(
-      """
-        | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='true',
-        | 'local_dictionary_threshold'='20000','local_dictionary_include'='city','no_inverted_index'='name')
-      """.stripMargin)
-    sql("alter table local1 add columns (alt string) tblproperties('local_dictionary_include'='alt')")
-    val descLoc = sql("describe formatted local1").collect
-    descLoc.find(_.get(0).toString.contains("Local Dictionary Threshold")) match {
-      case Some(row) => assert(row.get(1).toString.contains("20000"))
-    }
-    descLoc.find(_.get(0).toString.contains("Local Dictionary Enabled")) match {
-      case Some(row) => assert(row.get(1).toString.contains("true"))
-    }
-    descLoc.find(_.get(0).toString.contains("Local Dictionary Include")) match {
-      case Some(row) => assert(row.get(1).toString.contains("city,alt"))
-    }
-  }
-
-  test("test alter table add column default configs for local dictionary") {
-    sql("drop table if exists local1")
-    sql(
-      """
-        | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='true',
-        | 'local_dictionary_threshold'='20000','no_inverted_index'='name')
-      """.stripMargin)
-    sql("alter table local1 add columns (alt string)")
-    val descLoc = sql("describe formatted local1").collect
-    descLoc.find(_.get(0).toString.contains("Local Dictionary Threshold")) match {
-      case Some(row) => assert(row.get(1).toString.contains("20000"))
-    }
-    descLoc.find(_.get(0).toString.contains("Local Dictionary Enabled")) match {
-      case Some(row) => assert(row.get(1).toString.contains("true"))
-    }
-    descLoc.find(_.get(0).toString.contains("Local Dictionary Include")) match {
-      case Some(row) => assert(row.get(1).toString.contains("name,city,alt"))
-    }
-  }
-
-  test("test alter table add column where same column is in dictionary include and local
dictionary include") {
-    sql("drop table if exists local1")
-    sql(
-      """
-        | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='true',
-        | 'local_dictionary_threshold'='20000','local_dictionary_include'='city','no_inverted_index'='name')
-      """.stripMargin)
-    val exception = intercept[MalformedCarbonCommandException] {
-      sql(
-        "alter table local1 add columns (alt string) tblproperties('local_dictionary_include'='alt','dictionary_include'='alt')")
-    }
-    assert(exception.getMessage
-      .contains(
-        "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column: alt specified in Dictionary
" +
-        "include. Local Dictionary will not be generated for Dictionary include columns.
Please " +
-        "check create table statement."))
-  }
-
-  test("test alter table add column where duplicate columns present in local dictionary include")
{
-    sql("drop table if exists local1")
-    sql(
-      """
-        | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='true',
-        | 'local_dictionary_threshold'='20000','local_dictionary_include'='city','no_inverted_index'='name')
-      """.stripMargin)
-    val exception = intercept[MalformedCarbonCommandException] {
-      sql(
-        "alter table local1 add columns (alt string) tblproperties('local_dictionary_include'='alt,alt')")
-    }
-    assert(exception.getMessage
-      .contains(
-        "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE contains Duplicate Columns: alt.
" +
-        "Please check create table statement."))
-  }
-
-  test("test alter table add column where duplicate columns present in local dictionary include/exclude")
-  {
-    sql("drop table if exists local1")
-    sql(
-      """
-        | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='true',
-        | 'local_dictionary_threshold'='20000','local_dictionary_include'='city',
-        | 'no_inverted_index'='name')
-      """.stripMargin)
-    val exception1 = intercept[MalformedCarbonCommandException] {
-      sql(
-        "alter table local1 add columns (alt string) tblproperties" +
-        "('local_dictionary_include'='abc')")
-    }
-    assert(exception1.getMessage
-      .contains(
-        "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column: abc does not exist in
table. " +
-        "Please check create table statement."))
-    val exception2 = intercept[MalformedCarbonCommandException] {
-      sql(
-        "alter table local1 add columns (alt string) tblproperties" +
-        "('local_dictionary_exclude'='abc')")
-    }
-    assert(exception2.getMessage
-      .contains(
-        "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column: abc does not exist in
table. " +
-        "Please check create table statement."))
-  }
-
-  test("test alter table add column for datatype validation")
-  {
-    sql("drop table if exists local1")
-    sql(
-      """ | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='true',
-        | 'local_dictionary_include'='city', 'no_inverted_index'='name')
-      """.stripMargin)
-    val exception = intercept[MalformedCarbonCommandException] {
-      sql(
-        "alter table local1 add columns (alt string,abc int) tblproperties" +
-        "('local_dictionary_include'='abc')")
-    }
-    assert(exception.getMessage
-      .contains(
-        "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column: abc is not a String/complex
" +
-        "datatype column. LOCAL_DICTIONARY_COLUMN should be no dictionary string/complex
datatype" +
-        " column.Please check create table statement."))
-  }
-
-  test("test alter table add column where duplicate columns are present in local dictionary
include and exclude")
-  {
-    sql("drop table if exists local1")
-    sql(
-      """
-        | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='true',
-        | 'local_dictionary_include'='city', 'no_inverted_index'='name')
-      """.stripMargin)
-    val exception = intercept[MalformedCarbonCommandException] {
-      sql(
-        "alter table local1 add columns (alt string,abc string) tblproperties" +
-        "('local_dictionary_include'='abc','local_dictionary_exclude'='alt,abc')")
-    }
-    assert(exception.getMessage
-      .contains(
-        "Column ambiguity as duplicate column(s):abc is present in LOCAL_DICTIONARY_INCLUDE
" +
-        "and LOCAL_DICTIONARY_EXCLUDE. Duplicate columns are not allowed."))
-  }
-
-  test("test alter table add column unsupported table property")
-  {
-    sql("drop table if exists local1")
-    sql(
-      """
-        | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='true',
-        | 'local_dictionary_include'='city', 'no_inverted_index'='name')
-      """.stripMargin)
-    val exception = intercept[MalformedCarbonCommandException] {
-      sql(
-        "alter table local1 add columns (alt string,abc string) tblproperties" +
-        "('local_dictionary_enable'='abc')")
-    }
-    assert(exception.getMessage
-      .contains(
-        "Unsupported Table property in add column: local_dictionary_enable"))
-    val exception1 = intercept[MalformedCarbonCommandException] {
-      sql(
-        "alter table local1 add columns (alt string,abc string) tblproperties" +
-        "('local_dictionary_threshold'='10000')")
-    }
-    assert(exception1.getMessage
-      .contains(
-        "Unsupported Table property in add column: local_dictionary_threshold"))
-  }
-
-  test("test alter table add column when main table is disabled for local dictionary")
-  {
-    sql("drop table if exists local1")
-    sql(
-      """
-        | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false',
-        | 'local_dictionary_include'='city', 'no_inverted_index'='name')
-      """.stripMargin)
-    sql(
-      "alter table local1 add columns (alt string,abc string) tblproperties" +
-      "('local_dictionary_include'='abc')")
-    val descLoc = sql("describe formatted local1").collect
-    descLoc.find(_.get(0).toString.contains("Local Dictionary Enabled")) match {
-      case Some(row) => assert(row.get(1).toString.contains("false"))
-    }
-
-    checkExistence(sql("DESC FORMATTED local1"), false,
-      "Local Dictionary Include")
-  }
-
-  test("test local dictionary threshold for boundary values") {
-    sql("drop table if exists local1")
-    sql(
-      """
-        | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_threshold'='300000')
-      """.stripMargin)
-    val descLoc = sql("describe formatted local1").collect
-    descLoc.find(_.get(0).toString.contains("Local Dictionary Threshold")) match {
-      case Some(row) => assert(row.get(1).toString.contains("10000"))
-    }
-    sql("drop table if exists local1")
-    sql(
-      """
-        | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_threshold'='500')
-      """.stripMargin)
-    val descLoc1 = sql("describe formatted local1").collect
-    descLoc1.find(_.get(0).toString.contains("Local Dictionary Threshold")) match {
-      case Some(row) => assert(row.get(1).toString.contains("10000"))
-    }
-  }
-
-  test("test alter table add column for local dictionary include and exclude configs")
-  {
-    sql("drop table if exists local1")
-    sql(
-      """
-        | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='true',
-        | 'local_dictionary_include'='city', 'no_inverted_index'='name')
-      """.stripMargin)
-    sql(
-      "alter table local1 add columns (alt string,abc string) tblproperties" +
-      "('local_dictionary_include'='abc','local_dictionary_exclude'='alt')")
-    val descLoc = sql("describe formatted local1").collect
-    descLoc.find(_.get(0).toString.contains("Local Dictionary Enabled")) match {
-      case Some(row) => assert(row.get(1).toString.contains("true"))
-    }
-    descLoc.find(_.get(0).toString.contains("Local Dictionary Include")) match {
-      case Some(row) => assert(row.get(1).toString.contains("city,abc"))
-    }
-    descLoc.find(_.get(0).toString.contains("Local Dictionary Exclude")) match {
-      case Some(row) => assert(row.get(1).toString.contains("alt"))
-    }
-  }
-
-  test("test preaggregate table local dictionary enabled table")
-  {
-    sql("drop table if exists local1")
-    sql("CREATE TABLE local1 (id Int, date date, country string, phonetype string, " +
-        "serialname String,salary int ) STORED BY 'org.apache.carbondata.format' " +
-        "tblproperties('dictionary_include'='country','local_dictionary_enable'='true','local_dictionary_include'
= 'phonetype','local_dictionary_exclude' ='serialname')")
-    sql("create datamap PreAggCount on table local1 using 'preaggregate' as " +
-        "select country,count(salary) as count from local1 group by country")
-    val descLoc = sql("describe formatted local1_PreAggCount").collect
-    descLoc.find(_.get(0).toString.contains("Local Dictionary Threshold")) match {
-      case Some(row) => assert(row.get(1).toString.contains("10000"))
-    }
-    descLoc.find(_.get(0).toString.contains("Local Dictionary Include")) match {
-      case Some(row) => assert(row.get(1).toString.contains("phonetype"))
-    }
-    descLoc.find(_.get(0).toString.contains("Local Dictionary Exclude")) match {
-      case Some(row) => assert(row.get(1).toString.contains("serialname"))
-    }
-    descLoc.find(_.get(0).toString.contains("Local Dictionary Enabled")) match {
-      case Some(row) => assert(row.get(1).toString.contains("true"))
-    }
-  }
-
-  test("test local dictionary foer varchar datatype columns") {
-    sql("drop table if exists local1")
-    sql(
-      """
-        | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_include'='city',
-        | 'LONG_STRING_COLUMNS'='city')
-      """.stripMargin)
-    val descLoc = sql("describe formatted local1").collect
-    descLoc.find(_.get(0).toString.contains("Local Dictionary Include")) match {
-      case Some(row) => assert(row.get(1).toString.contains("city"))
-    }
-    descLoc.find(_.get(0).toString.contains("Local Dictionary Threshold")) match {
-      case Some(row) => assert(row.get(1).toString.contains("10000"))
-    }
-  }
-
-  test("test local dictionary describe formatted only with default configs")
-  {
-    sql("drop table if exists local1")
-    sql(
-      """
-        | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'carbondata'
-      """.stripMargin)
-
-    val descLoc = sql("describe formatted local1").collect
-    descLoc.find(_.get(0).toString.contains("Local Dictionary Enabled")) match {
-      case Some(row) => assert(row.get(1).toString.contains("true"))
-    }
-    descLoc.find(_.get(0).toString.contains("Local Dictionary Threshold")) match {
-      case Some(row) => assert(row.get(1).toString.contains("10000"))
-    }
-    descLoc.find(_.get(0).toString.contains("Local Dictionary Include")) match {
-      case Some(row) => assert(row.get(1).toString.contains("name,city"))
-    }
-  }
-
   override protected def afterAll(): Unit = {
     sql("DROP TABLE IF EXISTS LOCAL1")
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/806e9b5a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala
b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala
index 6a73140..b3f56a2 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CarbonScalaUtil.scala
@@ -636,7 +636,7 @@ object CarbonScalaUtil {
       val errMsg =
         "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE contains Duplicate Columns: "
+
         duplicateColumns.mkString(",") +
-        ". Please check create table statement."
+        ". Please check the DDL."
       throw new MalformedCarbonCommandException(errMsg)
     }
 
@@ -652,8 +652,7 @@ object CarbonScalaUtil {
           val errormsg = "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column: " +
                          commonColumn.mkString(",") +
                          " specified in Dictionary include. Local Dictionary will not be
" +
-                         "generated for Dictionary include columns. Please check create table
" +
-                         "statement."
+                         "generated for Dictionary include columns. Please check the DDL."
           throw new MalformedCarbonCommandException(errormsg)
         }
       }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/806e9b5a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
b/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
index 6bca7a4..84fd25f 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
@@ -452,7 +452,7 @@ abstract class CarbonDDLSqlParser extends AbstractCarbonSparkSQLParser
{
     localDictColumns.foreach { distCol =>
       if (!fields.exists(x => x.column.equalsIgnoreCase(distCol.trim))) {
         val errormsg = "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column: " + distCol.trim
+
-                       " does not exist in table. Please check create table statement."
+                       " does not exist in table. Please check the DDL."
         throw new MalformedCarbonCommandException(errormsg)
       }
     }
@@ -467,8 +467,7 @@ abstract class CarbonDDLSqlParser extends AbstractCarbonSparkSQLParser
{
         val errormsg = "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column: " +
                        dictColm.trim +
                        " is not a String/complex datatype column. LOCAL_DICTIONARY_COLUMN
should " +
-                       "be no dictionary string/complex datatype column.Please check create
table" +
-                       " statement."
+                       "be no dictionary string/complex datatype column.Please check the
DDL."
         throw new MalformedCarbonCommandException(errormsg)
       }
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/806e9b5a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
index 8c7de56..8a014ce 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
@@ -21,6 +21,8 @@ import java.util
 import java.util.UUID
 
 import scala.collection.JavaConverters._
+import scala.collection.mutable
+import scala.collection.mutable.ListBuffer
 
 import org.apache.spark.SparkContext
 import org.apache.spark.sql.SQLContext
@@ -35,8 +37,10 @@ import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier
 import org.apache.carbondata.core.metadata.datatype.{DataType, DataTypes, DecimalType}
 import org.apache.carbondata.core.metadata.encoder.Encoding
 import org.apache.carbondata.core.metadata.schema._
-import org.apache.carbondata.core.metadata.schema.table.{CarbonTable, RelationIdentifier,
TableInfo, TableSchema}
-import org.apache.carbondata.core.metadata.schema.table.column.{ColumnSchema, ParentColumnTableRelation}
+import org.apache.carbondata.core.metadata.schema.table.{CarbonTable, RelationIdentifier,
+  TableInfo, TableSchema}
+import org.apache.carbondata.core.metadata.schema.table.column.{ColumnSchema,
+  ParentColumnTableRelation}
 import org.apache.carbondata.core.service.impl.ColumnUniqueIdGenerator
 import org.apache.carbondata.core.statusmanager.{LoadMetadataDetails, SegmentUpdateStatusManager}
 import org.apache.carbondata.core.util.{CarbonUtil, DataTypeUtil}
@@ -278,14 +282,14 @@ class AlterTableColumnSchemaGenerator(
     // Its based on the dimension name and measure name
     allColumns.filter(x => !x.isInvisible).groupBy(_.getColumnName)
       .foreach(f => if (f._2.size > 1) {
-      val name = f._1
-      LOGGER.error(s"Duplicate column found with name: $name")
-      LOGGER.audit(
-        s"Validation failed for Create/Alter Table Operation " +
-        s"for ${ dbName }.${ alterTableModel.tableName }. " +
-        s"Duplicate column found with name: $name")
-      sys.error(s"Duplicate column found with name: $name")
-    })
+        val name = f._1
+        LOGGER.error(s"Duplicate column found with name: $name")
+        LOGGER.audit(
+          s"Validation failed for Create/Alter Table Operation " +
+          s"for ${ dbName }.${ alterTableModel.tableName }. " +
+          s"Duplicate column found with name: $name")
+        sys.error(s"Duplicate column found with name: $name")
+      })
 
     if (newCols.exists(_.getDataType.isComplexType)) {
       LOGGER.error(s"Complex column cannot be added")
@@ -299,6 +303,81 @@ class AlterTableColumnSchemaGenerator(
     val columnValidator = CarbonSparkFactory.getCarbonColumnValidator
     columnValidator.validateColumns(allColumns)
 
+
+    def getLocalDictColumnList(tableProperties: scala.collection.mutable.Map[String, String],
+        columns: scala.collection.mutable.ListBuffer[ColumnSchema]): (scala.collection.mutable
+    .ListBuffer[ColumnSchema], scala.collection.mutable.ListBuffer[ColumnSchema]) = {
+      val includeColumns = new scala.collection.mutable.ListBuffer[ColumnSchema]
+      val excludeColumns = new scala.collection.mutable.ListBuffer[ColumnSchema]
+      val localDictIncludeColumns = if (tableProperties
+        .get(CarbonCommonConstants.LOCAL_DICTIONARY_INCLUDE).isDefined) {
+        tableProperties(CarbonCommonConstants.LOCAL_DICTIONARY_INCLUDE)
+      } else {
+        null
+      }
+      val localDictExcludeColumns = if (tableProperties
+        .get(CarbonCommonConstants.LOCAL_DICTIONARY_EXCLUDE).isDefined) {
+        tableProperties(CarbonCommonConstants.LOCAL_DICTIONARY_EXCLUDE)
+      } else {
+        null
+      }
+      if (null != localDictIncludeColumns) {
+        if (null == localDictExcludeColumns) {
+          columns.foreach { column =>
+            if (localDictIncludeColumns.contains(column.getColumnName)) {
+              includeColumns.append(column)
+            } else {
+              if (column.getDataType.equals(DataTypes.STRING) ||
+                  column.getDataType.toString.equals("ARRAY") ||
+                  column.getDataType.toString.equals("STRUCT")) {
+                excludeColumns.append(column)
+              }
+            }
+          }
+        } else {
+          columns.foreach { column =>
+            if (localDictIncludeColumns.contains(column.getColumnName) &&
+                !localDictExcludeColumns.contains(column.getColumnName)) {
+              includeColumns.append(column)
+            } else if (localDictExcludeColumns.contains(column.getColumnName)) {
+              excludeColumns.append(column)
+            }
+          }
+        }
+      } else {
+        if (null == localDictExcludeColumns) {
+          columns.foreach { column =>
+            if (column.getDataType.equals(DataTypes.STRING) ||
+                column.getDataType.toString.equals("ARRAY") ||
+                column.getDataType.toString.equals("STRUCT")) {
+              includeColumns.append(column)
+            }
+          }
+        } else {
+          columns.foreach { column =>
+            if (!localDictExcludeColumns.contains(column.getColumnName) &&
+                (column.getDataType.equals(DataTypes.STRING) ||
+                 column.getDataType.toString.equals("ARRAY") ||
+                 column.getDataType.toString.equals("STRUCT"))) {
+              includeColumns.append(column)
+            } else if (localDictExcludeColumns.contains(column.getColumnName)) {
+              excludeColumns.append(column)
+            }
+          }
+        }
+      }
+
+      (includeColumns, excludeColumns)
+    }
+
+    val columnsWithoutNewCols = new scala.collection.mutable.ListBuffer[ColumnSchema]
+    allColumns.foreach { column =>
+      if (!newCols.exists(x => x.getColumnName.equalsIgnoreCase(column.getColumnName)))
{
+        columnsWithoutNewCols += column
+      }
+    }
+
+
     if (alterTableModel.tableProperties != null) {
       CarbonUtil
         .setLocalDictColumnsToWrapperSchema(newCols.asJava,
@@ -306,18 +385,47 @@ class AlterTableColumnSchemaGenerator(
           tableSchema.getTableProperties.get(CarbonCommonConstants.LOCAL_DICTIONARY_ENABLE))
     }
 
+    val includeExcludeColOfMainTable = getLocalDictColumnList(tableSchema.getTableProperties
+      .asScala,
+      columnsWithoutNewCols)
+    val alterMutableTblProperties: scala.collection.mutable.Map[String, String] = mutable
+      .Map(alterTableModel.tableProperties.toSeq: _*)
+    val includeExcludeColOfAlterTable = getLocalDictColumnList(alterMutableTblProperties,
+      newCols.to[mutable.ListBuffer])
+
+    // Append all Local Dictionary Include and Exclude columns of Alter Table to that of
Main Table
+    includeExcludeColOfMainTable._1.appendAll(includeExcludeColOfAlterTable._1)
+    includeExcludeColOfMainTable._2.appendAll(includeExcludeColOfAlterTable._2)
+
+    val localDictionaryIncludeColumn = new StringBuilder
+    val localDictionaryExcludeColumn = new StringBuilder
+    includeExcludeColOfMainTable._1.foreach { column =>
+      localDictionaryIncludeColumn.append(column.getColumnName).append(",")
+    }
+    includeExcludeColOfMainTable._2.foreach { column =>
+      localDictionaryExcludeColumn.append(column.getColumnName).append(",")
+    }
+
     // populate table properties map
     val tablePropertiesMap = tableSchema.getTableProperties
     alterTableModel.tableProperties.foreach {
-      x => val value = tablePropertiesMap.get(x._1)
+      case (key, mapValue) =>
+        val value = tablePropertiesMap.get(key)
         if (null != value) {
-          if (value != x._2) {
-            tablePropertiesMap.put(x._1, value + "," + x._2)
+          if (value != mapValue) {
+            tablePropertiesMap.put(key, value + "," + mapValue)
           }
         } else {
-          tablePropertiesMap.put(x._1, x._2)
+          tablePropertiesMap.put(key, mapValue)
         }
     }
+
+    // The Final Map should contain the combined Local Dictionary Include and
+    // Local Dictionary Exclude Columns from both Main table and Alter table
+    tablePropertiesMap
+      .put(CarbonCommonConstants.LOCAL_DICTIONARY_INCLUDE, localDictionaryIncludeColumn.toString())
+    tablePropertiesMap
+      .put(CarbonCommonConstants.LOCAL_DICTIONARY_EXCLUDE, localDictionaryExcludeColumn.toString())
     // This part will create dictionary file for all newly added dictionary columns
     // if valid default value is provided,
     // then that value will be included while creating dictionary file
@@ -341,7 +449,7 @@ class AlterTableColumnSchemaGenerator(
           }
         }
         else if (elem._1.equalsIgnoreCase("no_inverted_index") &&
-          (elem._2.split(",").contains(col.getColumnName))) {
+                 (elem._2.split(",").contains(col.getColumnName))) {
           col.getEncodingList.remove(Encoding.INVERTED_INDEX)
         }
       }
@@ -391,7 +499,7 @@ object TableNewProcessor {
     if (dataType == DataTypes.DATE) {
       encoders.add(Encoding.DIRECT_DICTIONARY)
     }
-    if (dataType == DataTypes.TIMESTAMP && ! highCardinalityDims.contains(colName))
{
+    if (dataType == DataTypes.TIMESTAMP && !highCardinalityDims.contains(colName))
{
       encoders.add(Encoding.DIRECT_DICTIONARY)
     }
     columnSchema.setEncodingList(encoders)
@@ -456,7 +564,7 @@ class TableNewProcessor(cm: TableModel) {
     columnSchema.setDataType(dataType)
     columnSchema.setColumnName(colName)
     val isParentColumnRelation = map.isDefined && map.get.get(field).isDefined
-    if(!isParentColumnRelation) {
+    if (!isParentColumnRelation) {
       val highCardinalityDims = cm.highcardinalitydims.getOrElse(Seq())
       if (highCardinalityDims.contains(colName)) {
         encoders.remove(Encoding.DICTIONARY)
@@ -482,7 +590,7 @@ class TableNewProcessor(cm: TableModel) {
     if (isVarcharColumn(colName)) {
       columnSchema.setDataType(DataTypes.VARCHAR)
     }
-    if(isParentColumnRelation) {
+    if (isParentColumnRelation) {
       val dataMapField = map.get.get(field).get
       columnSchema.setFunction(dataMapField.aggregateFunction)
       val columnRelationList = dataMapField.columnTableRelationList.get
@@ -606,7 +714,7 @@ class TableNewProcessor(cm: TableModel) {
           cm.dataMapRelation.get.get(field).get.aggregateFunction.equalsIgnoreCase("sum")
||
           cm.dataMapRelation.get.get(field).get.aggregateFunction.equals("avg") ||
           cm.dataMapRelation.get.get(field).get.aggregateFunction.equals("count")
-        if(!isAggFunPresent) {
+        if (!isAggFunPresent) {
           cm.parentTable.get.getColumnByName(
             cm.parentTable.get.getTableName,
             cm.dataMapRelation.get.get(field).get.columnTableRelationList.get(0).parentColumnName)
@@ -713,9 +821,12 @@ class TableNewProcessor(cm: TableModel) {
               colSchema
             } else {
               LOGGER.error(s"Bucket field must be dimension column and " +
-                           s"should not be measure or complex column: ${colSchema.getColumnName}")
+                           s"should not be measure or complex column: ${ colSchema.getColumnName
}")
               CarbonException.analysisException(s"Bucket field must be dimension column and
" +
-                        s"should not be measure or complex column: ${colSchema.getColumnName}")
+                                                s"should not be measure or complex column:
${
+                                                  colSchema
+                                                    .getColumnName
+                                                }")
             }
           case _ =>
             LOGGER.error(s"Bucket field is not present in table columns")
@@ -750,13 +861,14 @@ class TableNewProcessor(cm: TableModel) {
 
   /**
    * Method to check to get the encoder from parent or not
+   *
    * @param field column field
    * @return get encoder from parent
    */
-  private def getEncoderFromParent(field: Field) : Boolean = {
-     cm.parentTable.isDefined &&
-        cm.dataMapRelation.get.get(field).isDefined &&
-        cm.dataMapRelation.get.get(field).get.columnTableRelationList.size==1
+  private def getEncoderFromParent(field: Field): Boolean = {
+    cm.parentTable.isDefined &&
+    cm.dataMapRelation.get.get(field).isDefined &&
+    cm.dataMapRelation.get.get(field).get.columnTableRelationList.size == 1
   }
 
   //  For checking if the specified col group columns are specified in fields list.

http://git-wip-us.apache.org/repos/asf/carbondata/blob/806e9b5a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDropColumnCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDropColumnCommand.scala
b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDropColumnCommand.scala
index a82298c..7022e98 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDropColumnCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/schema/CarbonAlterTableDropColumnCommand.scala
@@ -75,6 +75,16 @@ private[sql] case class CarbonAlterTableDropColumnCommand(
                                                   s"$partitionColumns")
         }
       }
+
+      // Check if column to be dropped is of complex dataType
+      alterTableDropColumnModel.columns.foreach { column =>
+        if (carbonTable.getColumnByName(alterTableDropColumnModel.tableName, column).getDataType
+          .isComplexType) {
+          val errMsg = "Complex column cannot be dropped"
+          throw new MalformedCarbonCommandException(errMsg)
+        }
+      }
+
       val tableColumns = carbonTable.getCreateOrderColumn(tableName).asScala
       var dictionaryColumns = Seq[org.apache.carbondata.core.metadata.schema.table.column
       .ColumnSchema]()

http://git-wip-us.apache.org/repos/asf/carbondata/blob/806e9b5a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
index 6b11f51..2aa4446 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
@@ -29,6 +29,7 @@ import org.apache.spark.sql.hive.CarbonRelation
 import org.codehaus.jackson.map.ObjectMapper
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.metadata.datatype.DataTypes
 import org.apache.carbondata.core.metadata.encoder.Encoding
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema
 import org.apache.carbondata.core.util.CarbonUtil
@@ -128,26 +129,27 @@ private[sql] case class CarbonDescribeFormattedCommand(
       localDictThreshold = localDictionaryThreshold { 0 }
       results ++= Seq(("Local Dictionary Threshold", localDictThreshold, ""))
       val columns = carbonTable.getTableInfo.getFactTable.getListOfColumns.asScala
+      val builder = new StringBuilder
+      columns.foreach { column =>
+        if (column.isLocalDictColumn && !column.isInvisible) {
+          builder.append(column.getColumnName).append(",")
+        }
+      }
+      results ++=
+      Seq(("Local Dictionary Include", getDictColumnString(builder.toString().split(",")),
""))
       if (tblProps.asScala
-        .get(CarbonCommonConstants.LOCAL_DICTIONARY_INCLUDE).isDefined) {
-        val allLocalDictColumns = tblProps.asScala(CarbonCommonConstants.LOCAL_DICTIONARY_INCLUDE)
-          .split(",")
-        results ++= Seq(("Local Dictionary Include", getDictColumnString(allLocalDictColumns),
""))
-      } else {
+        .get(CarbonCommonConstants.LOCAL_DICTIONARY_EXCLUDE).isDefined) {
+        val columns = carbonTable.getTableInfo.getFactTable.getListOfColumns.asScala
         val builder = new StringBuilder
         columns.foreach { column =>
-          if (column.isLocalDictColumn) {
+          if (!column.isLocalDictColumn && !column.isInvisible &&
+              (column.getDataType.equals(DataTypes.STRING) ||
+               column.getDataType.equals(DataTypes.VARCHAR))) {
             builder.append(column.getColumnName).append(",")
           }
         }
         results ++=
-        Seq(("Local Dictionary Include", getDictColumnString(builder.toString().split(",")),
""))
-      }
-      if (tblProps.asScala
-        .get(CarbonCommonConstants.LOCAL_DICTIONARY_EXCLUDE).isDefined) {
-        val allLocalDictColumns = tblProps.asScala(CarbonCommonConstants.LOCAL_DICTIONARY_EXCLUDE)
-          .split(",")
-        results ++= Seq(("Local Dictionary Exclude", getDictColumnString(allLocalDictColumns),
""))
+        Seq(("Local Dictionary Exclude", getDictColumnString(builder.toString().split(",")),
""))
       }
     }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/806e9b5a/integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala
b/integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala
index 76cea55..4659c39 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/util/AlterTableUtil.scala
@@ -672,11 +672,12 @@ object AlterTableUtil {
   def ValidateSetTablePropertiesForLocalDict(tblPropertiesMap: mutable.Map[String, String],
       carbonTable: CarbonTable,
       property: (String, String)): Unit = {
+    var primitiveComplexChildColumns = new mutable.HashSet[String]
     var localDictColumns: Seq[String] = Seq[String]()
     var dictIncludeColumns: Seq[String] = Seq[String]()
 
     val allColumns = carbonTable.getTableInfo.getFactTable.getListOfColumns.asScala
-    localDictColumns = property._2.toString.split(",").map(_.trim)
+    localDictColumns = property._2.toString.toLowerCase.split(",").map(_.trim)
 
     CarbonScalaUtil.validateLocalDictionaryColumns(tblPropertiesMap, localDictColumns)
 
@@ -684,7 +685,7 @@ object AlterTableUtil {
     localDictColumns.foreach { distCol =>
       if (!allColumns.exists(x => x.getColumnName.equalsIgnoreCase(distCol.trim))) {
         val errormsg = "LOCAL_DICTIONARY_INCLUDE/LOCAL_DICTIONARY_EXCLUDE column: " + distCol.trim
+
-                       " does not exist in table. Please check create table statement."
+                       " does not exist in table. Please check the DDL."
         throw new MalformedCarbonCommandException(errormsg)
       }
     }
@@ -720,7 +721,7 @@ object AlterTableUtil {
     }
 
     /**
-     * check whether any child column present in comples type column is string type
+     * check whether any child column present in complex type column is string type
      *
      * @param schemas
      * @return
@@ -732,13 +733,15 @@ object AlterTableUtil {
       schemas.foreach { column =>
         if (childColumnCount > 0) {
           if (column.getDataType.equals(DataTypes.STRING)) {
+            primitiveComplexChildColumns.add(column.getColumnName)
             numberOfPrimitiveColumns += 1
             childColumnCount -= 1
           } else {
             childColumnCount -= 1
           }
         }
-        if (localDictColumns.exists(x => x.equalsIgnoreCase(column.getColumnName)) &&
+        if ((localDictColumns.exists(x => x.equalsIgnoreCase(column.getColumnName)) ||
+             primitiveComplexChildColumns.contains(column.getColumnName)) &&
             column.getNumberOfChild > 0) {
           childColumnCount = column.getNumberOfChild
         }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/806e9b5a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/DropColumnTestCases.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/DropColumnTestCases.scala
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/DropColumnTestCases.scala
index 58c4821..a713c7b 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/DropColumnTestCases.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/DropColumnTestCases.scala
@@ -112,6 +112,15 @@ class DropColumnTestCases extends Spark2QueryTest with BeforeAndAfterAll
{
     sql("drop table if exists preaggMain_preagg1")
   }
 
+  test("test dropping of complex column should throw exception") {
+    sql("drop table if exists maintbl")
+    sql("create table maintbl (a string, b string, c struct<si:int>) stored by 'carbondata'")
+    assert(intercept[ProcessMetaDataException] {
+      sql("alter table maintbl drop columns(b,c)").show
+    }.getMessage.contains("Complex column cannot be dropped"))
+    sql("drop table if exists maintbl")
+  }
+
   override def afterAll {
     sql("DROP TABLE IF EXISTS dropcolumntest")
     sql("DROP TABLE IF EXISTS hivetable")


Mime
View raw message