carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ravipes...@apache.org
Subject [1/2] incubator-carbondata git commit: added validation for bucket number is not negative
Date Sun, 15 Jan 2017 17:05:48 GMT
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master 11c2b33c2 -> 7d2159681


added validation for bucket number is not negative

formatted the code

corrected the table name for testbucketing class

refactor the code

reutilized the imports

removed the java style


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/2b4f5069
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/2b4f5069
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/2b4f5069

Branch: refs/heads/master
Commit: 2b4f5069500ed0206189deebe94776187f18c0fe
Parents: 11c2b33
Author: anubhav100 <anubhav.tarar@knoldus.in>
Authored: Tue Jan 10 11:01:52 2017 +0530
Committer: ravipesala <ravi.pesala@gmail.com>
Committed: Sun Jan 15 22:34:37 2017 +0530

----------------------------------------------------------------------
 .../org/apache/spark/sql/CarbonSource.scala     | 16 +++++++++----
 .../spark/sql/parser/CarbonSparkSqlParser.scala | 24 ++++++++++++--------
 .../bucketing/TableBucketingTestCase.scala      | 19 ++++++++++++++++
 3 files changed, 44 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/2b4f5069/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
index 328c49c..b44b066 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
@@ -135,14 +135,20 @@ class CarbonSource extends CreatableRelationProvider
           f
         }
         val map = scala.collection.mutable.Map[String, String]()
-        parameters.foreach { x => map.put(x._1, x._2) }
-        val bucketFields = {
-          if (options.isBucketingEnabled) {
-            Some(BucketFields(options.bucketColumns.split(","), options.bucketNumber))
+        parameters.foreach { parameter => map.put(parameter._1, parameter._2) }
+        val bucketFields = if (options.isBucketingEnabled) {
+            if (options.bucketNumber.toString.contains("-") ||
+                options.bucketNumber.toString.contains("+") ) {
+              throw new MalformedCarbonCommandException("INVALID NUMBER OF BUCKETS SPECIFIED"
+
+                                                        options.bucketNumber.toString)
+            }
+            else {
+              Some(BucketFields(options.bucketColumns.split(","), options.bucketNumber))
+            }
           } else {
             None
           }
-        }
+
         val cm = TableCreator.prepareTableModel(false, Option(dbName),
           tableName, fields, Nil, bucketFields, map)
         CreateTable(cm, false).run(sparkSession)

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/2b4f5069/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParser.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParser.scala
b/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParser.scala
index 3dcf2d9..04de23d 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParser.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParser.scala
@@ -41,10 +41,6 @@ class CarbonSparkSqlParser(conf: SQLConf) extends AbstractSqlParser {
 
   private val substitutor = new VariableSubstitution(conf)
 
-  protected override def parse[T](command: String)(toResult: SqlBaseParser => T): T =
{
-    super.parse(substitutor.substitute(command))(toResult)
-  }
-
   override def parsePlan(sqlText: String): LogicalPlan = {
     try {
       super.parsePlan(sqlText)
@@ -55,6 +51,10 @@ class CarbonSparkSqlParser(conf: SQLConf) extends AbstractSqlParser {
         astBuilder.parser.parse(sqlText)
     }
   }
+
+  protected override def parse[T](command: String)(toResult: SqlBaseParser => T): T =
{
+    super.parse(substitutor.substitute(command))(toResult)
+  }
 }
 
 class CarbonSqlAstBuilder(conf: SQLConf) extends SparkSqlAstBuilder(conf) {
@@ -124,7 +124,7 @@ class CarbonSqlAstBuilder(conf: SQLConf) extends SparkSqlAstBuilder(conf)
{
           f.scale = scale
           f.dataType = Some("decimal")
         }
-        if(f.dataType.getOrElse("").startsWith("char")) {
+        if (f.dataType.getOrElse("").startsWith("char")) {
           f.dataType = Some("char")
         }
         f.rawSchema = x
@@ -136,12 +136,16 @@ class CarbonSqlAstBuilder(conf: SQLConf) extends SparkSqlAstBuilder(conf)
{
         throw new MalformedCarbonCommandException("Invalid table properties")
       }
       val options = new CarbonOption(properties)
-      val bucketFields = {
-        if (options.isBucketingEnabled) {
+      val bucketFields = if (options.isBucketingEnabled) {
+        if (options.bucketNumber.toString.contains("-") ||
+            options.bucketNumber.toString.contains("+")) {
+          throw new MalformedCarbonCommandException("INVALID NUMBER OF BUCKETS SPECIFIED")
+        }
+        else {
           Some(BucketFields(options.bucketColumns.split(","), options.bucketNumber))
-        } else {
-          None
         }
+      } else {
+        None
       }
 
       val tableProperties = mutable.Map[String, String]()
@@ -184,7 +188,7 @@ class CarbonSqlAstBuilder(conf: SQLConf) extends SparkSqlAstBuilder(conf)
{
       operationNotAllowed(
         s"Values must be specified for key(s): ${ badKeys.mkString("[", ",", "]") }", ctx)
     }
-    props.map{ case (key, value) =>
+    props.map { case (key, value) =>
       (key.toLowerCase, value.toLowerCase)
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/2b4f5069/integration/spark2/src/test/scala/org/apache/spark/carbondata/bucketing/TableBucketingTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/bucketing/TableBucketingTestCase.scala
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/bucketing/TableBucketingTestCase.scala
index b3bab49..daecee5 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/bucketing/TableBucketingTestCase.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/bucketing/TableBucketingTestCase.scala
@@ -28,6 +28,7 @@ import org.apache.carbondata.core.carbon.metadata.CarbonMetadata
 import org.apache.carbondata.core.carbon.metadata.schema.table.CarbonTable
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.util.CarbonProperties
+import org.apache.carbondata.spark.exception.MalformedCarbonCommandException
 
 class TableBucketingTestCase extends QueryTest with BeforeAndAfterAll {
 
@@ -42,6 +43,7 @@ class TableBucketingTestCase extends QueryTest with BeforeAndAfterAll {
     sql("DROP TABLE IF EXISTS t6")
     sql("DROP TABLE IF EXISTS t7")
     sql("DROP TABLE IF EXISTS t8")
+    sql("DROP TABLE IF EXISTS t9")
   }
 
   test("test create table with buckets") {
@@ -63,6 +65,23 @@ class TableBucketingTestCase extends QueryTest with BeforeAndAfterAll {
     }
   }
 
+  test("must be unable to create if number of buckets is in negative number") {
+    try {
+      sql(
+        """
+           CREATE TABLE t9
+           (ID Int, date Timestamp, country String,
+           name String, phonetype String, serialname String, salary Int)
+           USING org.apache.spark.sql.CarbonSource
+           OPTIONS("bucketnumber"="-1", "bucketcolumns"="name", "tableName"="t9")
+        """)
+      assert(false)
+    }
+    catch {
+      case malformedCarbonCommandException: MalformedCarbonCommandException => assert(true)
+    }
+  }
+
   test("test create table with no bucket join of carbon tables") {
     sql(
       """


Mime
View raw message