carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jack...@apache.org
Subject [21/50] [abbrv] carbondata git commit: [CARBONDATA-1943][PARTITION] Fix Load static partition with LOAD COMMAND creates multiple partitions
Date Sun, 07 Jan 2018 03:05:29 GMT
[CARBONDATA-1943][PARTITION] Fix Load static partition with LOAD COMMAND creates multiple partitions

When using the LOAD syntax without options then spark creates LoadDataCommand so passing of
partitions information to CarbonLoadDataCommand is missing in that case. This PR fixes it.

This closes #1731


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/a7742024
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/a7742024
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/a7742024

Branch: refs/heads/carbonstore
Commit: a7742024086fc2d9dce12e863f8ebd4c0e5d26df
Parents: a51ad30
Author: ravipesala <ravi.pesala@gmail.com>
Authored: Wed Dec 27 22:56:04 2017 +0530
Committer: Jacky Li <jacky.likun@qq.com>
Committed: Wed Jan 3 15:04:14 2018 +0800

----------------------------------------------------------------------
 .../StandardPartitionTableLoadingTestCase.scala  | 18 ++++++++++++++++++
 .../sql/execution/strategy/DDLStrategy.scala     | 19 +++++++++++++------
 2 files changed, 31 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/a7742024/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableLoadingTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableLoadingTestCase.scala
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableLoadingTestCase.scala
index b7010e5..d3ea5aa 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableLoadingTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableLoadingTestCase.scala
@@ -254,6 +254,23 @@ class StandardPartitionTableLoadingTestCase extends QueryTest with BeforeAndAfte
     }
   }
 
+  test("load static partition table for one static partition column with load syntax issue")
{
+    sql(
+      """
+        | CREATE TABLE loadstaticpartitiononeissue (empname String, designation String, doj
Timestamp,
+        |  workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
+        |  projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance
int,
+        |  utilization int,salary int)
+        | PARTITIONED BY (empno int)
+        | STORED BY 'org.apache.carbondata.format'
+      """.stripMargin)
+
+    sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE loadstaticpartitiononeissue
PARTITION(empno='1')""")
+    val df = sql("show partitions loadstaticpartitiononeissue")
+    assert(df.collect().length == 1)
+    checkExistence(df, true,  "empno=1")
+  }
+
 
   override def afterAll = {
     dropTable
@@ -272,6 +289,7 @@ class StandardPartitionTableLoadingTestCase extends QueryTest with BeforeAndAfte
     sql("drop table if exists loadstaticpartitionone")
     sql("drop table if exists loadstaticpartitiononeoverwrite")
     sql("drop table if exists streamingpartitionedtable")
+    sql("drop table if exists loadstaticpartitiononeissue")
   }
 
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a7742024/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
index 45f0f0a..684a749 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
@@ -51,12 +51,19 @@ class DDLStrategy(sparkSession: SparkSession) extends SparkStrategy {
           .tableExists(identifier)(sparkSession) =>
         ExecutedCommandExec(
           CarbonLoadDataCommand(
-            identifier.database,
-            identifier.table.toLowerCase,
-            path,
-            Seq(),
-            Map(),
-            isOverwrite)) :: Nil
+            databaseNameOp = identifier.database,
+            tableName = identifier.table.toLowerCase,
+            factPathFromUser = path,
+            dimFilesPath = Seq(),
+            options = Map(),
+            isOverwriteTable = isOverwrite,
+            inputSqlString = null,
+            dataFrame = None,
+            updateModel = None,
+            tableInfoOp = None,
+            internalOptions = Map.empty,
+            partition = partition.getOrElse(Map.empty).map { case (col, value) =>
+              (col, Some(value))})) :: Nil
       case alter@AlterTableRenameCommand(oldTableIdentifier, newTableIdentifier, _) =>
         val dbOption = oldTableIdentifier.database.map(_.toLowerCase)
         val tableIdentifier = TableIdentifier(oldTableIdentifier.table.toLowerCase(), dbOption)


Mime
View raw message