carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From chenliang...@apache.org
Subject carbondata git commit: [CARBONDATA-1916]Correct the database location path during carbon drop database
Date Wed, 03 Jan 2018 10:47:41 GMT
Repository: carbondata
Updated Branches:
  refs/heads/master 2d6eb12f5 -> 3b6f26c69


[CARBONDATA-1916]Correct the database location path during carbon drop database

Correct the database location path during carbon drop database, when drop database is called,
to delete the databsae directory, the path formed is wrong, so when drop datasbe is executed,
operation is successful , but the database directory is still present in hdfs.

This closes #1688


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/3b6f26c6
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/3b6f26c6
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/3b6f26c6

Branch: refs/heads/master
Commit: 3b6f26c696f34805b72719cdb40fce4efed33a12
Parents: 2d6eb12
Author: akashrn5 <akashnilugal@gmail.com>
Authored: Wed Dec 20 10:58:23 2017 +0530
Committer: chenliang613 <chenliang613@huawei.com>
Committed: Wed Jan 3 18:47:23 2018 +0800

----------------------------------------------------------------------
 .../org/apache/carbondata/core/util/CarbonUtil.java     |  3 +--
 .../testsuite/deleteTable/TestDeleteTableNewDDL.scala   | 12 +++++++++++-
 .../spark/sql/execution/strategy/DDLStrategy.scala      | 10 ++++++++--
 .../sql/hive/execution/command/CarbonHiveCommands.scala | 11 +++++++----
 4 files changed, 27 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/3b6f26c6/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index f87b7e8..a9d2cad 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -2044,9 +2044,8 @@ public final class CarbonUtil {
     }
   }
 
-  public static void dropDatabaseDirectory(String dbName, String storePath)
+  public static void dropDatabaseDirectory(String databasePath)
       throws IOException, InterruptedException {
-    String databasePath = storePath + File.separator + dbName;
     FileFactory.FileType fileType = FileFactory.getFileType(databasePath);
     if (FileFactory.isFileExist(databasePath, fileType)) {
       CarbonFile dbPath = FileFactory.getCarbonFile(databasePath, fileType);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3b6f26c6/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
index 7b51438..2f30215 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
@@ -47,7 +47,17 @@ class TestDeleteTableNewDDL extends QueryTest with BeforeAndAfterAll {
   test("drop table Test with new DDL") {
     sql("drop table table1")
   }
-  
+
+  test("test drop database") {
+    var dbName = "dropdb_test"
+    sql(s"drop database if exists $dbName cascade")
+    sql(s"create database $dbName")
+    sql(s"drop database $dbName")
+    assert(intercept[Exception] {
+      sql(s"use $dbName")
+    }.getMessage.contains("Database 'dropdb_test' not found"))
+  }
+
   test("test drop database cascade command") {
     sql("drop database if exists testdb cascade")
     sql("create database testdb")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3b6f26c6/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
index 684a749..71da25b 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
@@ -18,7 +18,7 @@ package org.apache.spark.sql.execution.strategy
 
 import org.apache.spark.sql._
 import org.apache.spark.sql.catalyst.TableIdentifier
-import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation
+import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, UnresolvedRelation}
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
 import org.apache.spark.sql.execution.{SparkPlan, SparkStrategy}
 import org.apache.spark.sql.execution.command._
@@ -85,7 +85,13 @@ class DDLStrategy(sparkSession: SparkSession) extends SparkStrategy {
       partition, child: LogicalPlan, overwrite, _) =>
         ExecutedCommandExec(CarbonInsertIntoCommand(relation, child, overwrite, partition))
:: Nil
       case createDb@CreateDatabaseCommand(dbName, ifNotExists, _, _, _) =>
-        FileUtils.createDatabaseDirectory(dbName, CarbonProperties.getStorePath)
+        val dbLocation = try {
+          CarbonEnv.getDatabaseLocation(dbName, sparkSession)
+        } catch {
+          case e: NoSuchDatabaseException =>
+            CarbonProperties.getStorePath
+        }
+        FileUtils.createDatabaseDirectory(dbName, dbLocation)
         ExecutedCommandExec(createDb) :: Nil
       case drop@DropDatabaseCommand(dbName, ifExists, isCascade) =>
         ExecutedCommandExec(CarbonDropDatabaseCommand(drop)) :: Nil

http://git-wip-us.apache.org/repos/asf/carbondata/blob/3b6f26c6/integration/spark2/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala
b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala
index 6761e92..7ca34af 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala
@@ -34,6 +34,7 @@ case class CarbonDropDatabaseCommand(command: DropDatabaseCommand)
   override val output: Seq[Attribute] = command.output
 
   override def run(sparkSession: SparkSession): Seq[Row] = {
+    var rows: Seq[Row] = Seq()
     val dbName = command.databaseName
     var tablesInDB: Seq[TableIdentifier] = null
     if (sparkSession.sessionState.catalog.listDatabases().exists(_.equalsIgnoreCase(dbName)))
{
@@ -44,8 +45,10 @@ case class CarbonDropDatabaseCommand(command: DropDatabaseCommand)
       databaseLocation = CarbonEnv.getDatabaseLocation(dbName, sparkSession)
     } catch {
       case e: NoSuchDatabaseException =>
-        // ignore the exception as exception will be handled by hive command.run
-      databaseLocation = CarbonProperties.getStorePath
+        // if database not found and ifExists true return empty
+        if (command.ifExists) {
+          return rows
+        }
     }
     // DropHiveDB command will fail if cascade is false and one or more table exists in database
     if (command.cascade && tablesInDB != null) {
@@ -53,8 +56,8 @@ case class CarbonDropDatabaseCommand(command: DropDatabaseCommand)
         CarbonDropTableCommand(true, tableName.database, tableName.table).run(sparkSession)
       }
     }
-    CarbonUtil.dropDatabaseDirectory(dbName.toLowerCase, databaseLocation)
-    val rows = command.run(sparkSession)
+    rows = command.run(sparkSession)
+    CarbonUtil.dropDatabaseDirectory(databaseLocation)
     rows
   }
 }


Mime
View raw message