Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 1A0F2200B3B for ; Mon, 11 Jul 2016 19:02:22 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id 1885D160A78; Mon, 11 Jul 2016 17:02:22 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 3CEAC160A5E for ; Mon, 11 Jul 2016 19:02:21 +0200 (CEST) Received: (qmail 75396 invoked by uid 500); 11 Jul 2016 17:02:20 -0000 Mailing-List: contact commits-help@spark.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list commits@spark.apache.org Received: (qmail 75387 invoked by uid 99); 11 Jul 2016 17:02:20 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 11 Jul 2016 17:02:20 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id E6CDCDFFF8; Mon, 11 Jul 2016 17:02:19 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: hvanhovell@apache.org To: commits@spark.apache.org Message-Id: <948f1e922abe4962b4734967ffcd595d@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: spark git commit: [SPARK-16459][SQL] Prevent dropping current database Date: Mon, 11 Jul 2016 17:02:19 +0000 (UTC) archived-at: Mon, 11 Jul 2016 17:02:22 -0000 Repository: spark Updated Branches: refs/heads/branch-2.0 7e4ba66d9 -> f97dd8a8f [SPARK-16459][SQL] Prevent dropping current database This PR prevents dropping the current database to avoid errors like the followings. ```scala scala> sql("create database delete_db") scala> sql("use delete_db") scala> sql("drop database delete_db") scala> sql("create table t as select 1") org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException: Database `delete_db` not found; ``` Pass the Jenkins tests including an updated testcase. Author: Dongjoon Hyun Closes #14115 from dongjoon-hyun/SPARK-16459. (cherry picked from commit 7ac79da0e4607f7f89a3617edf53c2b174b378e8) Signed-off-by: Herman van Hovell Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f97dd8a8 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f97dd8a8 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f97dd8a8 Branch: refs/heads/branch-2.0 Commit: f97dd8a8fd61ab1964b4a7dc4fd0ddecf801c612 Parents: 7e4ba66 Author: Dongjoon Hyun Authored: Mon Jul 11 15:15:47 2016 +0200 Committer: Herman van Hovell Committed: Mon Jul 11 19:02:01 2016 +0200 ---------------------------------------------------------------------- .../sql/catalyst/catalog/SessionCatalog.scala | 20 +++++++++++++------- .../spark/sql/execution/command/DDLSuite.scala | 9 +++++++++ .../hive/HiveContextCompatibilitySuite.scala | 1 + .../spark/sql/hive/execution/HiveDDLSuite.scala | 2 ++ 4 files changed, 25 insertions(+), 7 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/f97dd8a8/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala ---------------------------------------------------------------------- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala index e1d4991..0f7e15f 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala @@ -34,6 +34,10 @@ import org.apache.spark.sql.catalyst.expressions.{Expression, ExpressionInfo} import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias} import org.apache.spark.sql.catalyst.util.StringUtils +object SessionCatalog { + val DEFAULT_DATABASE = "default" +} + /** * An internal catalog that is used by a Spark Session. This internal catalog serves as a * proxy to the underlying metastore (e.g. Hive Metastore) and it also manages temporary @@ -47,6 +51,7 @@ class SessionCatalog( functionRegistry: FunctionRegistry, conf: CatalystConf, hadoopConf: Configuration) extends Logging { + import SessionCatalog._ import CatalogTypes.TablePartitionSpec // For testing only. @@ -77,7 +82,7 @@ class SessionCatalog( // the corresponding item in the current database. @GuardedBy("this") protected var currentDb = { - val defaultName = "default" + val defaultName = DEFAULT_DATABASE val defaultDbDefinition = CatalogDatabase(defaultName, "default database", conf.warehousePath, Map()) // Initialize default database if it doesn't already exist @@ -146,8 +151,10 @@ class SessionCatalog( def dropDatabase(db: String, ignoreIfNotExists: Boolean, cascade: Boolean): Unit = { val dbName = formatDatabaseName(db) - if (dbName == "default") { + if (dbName == DEFAULT_DATABASE) { throw new AnalysisException(s"Can not drop default database") + } else if (dbName == getCurrentDatabase) { + throw new AnalysisException(s"Can not drop current database `${dbName}`") } externalCatalog.dropDatabase(dbName, ignoreIfNotExists, cascade) } @@ -878,14 +885,14 @@ class SessionCatalog( * This is mainly used for tests. */ private[sql] def reset(): Unit = synchronized { - val default = "default" - listDatabases().filter(_ != default).foreach { db => + setCurrentDatabase(DEFAULT_DATABASE) + listDatabases().filter(_ != DEFAULT_DATABASE).foreach { db => dropDatabase(db, ignoreIfNotExists = false, cascade = true) } - listTables(default).foreach { table => + listTables(DEFAULT_DATABASE).foreach { table => dropTable(table, ignoreIfNotExists = false) } - listFunctions(default).map(_._1).foreach { func => + listFunctions(DEFAULT_DATABASE).map(_._1).foreach { func => if (func.database.isDefined) { dropFunction(func, ignoreIfNotExists = false) } else { @@ -902,7 +909,6 @@ class SessionCatalog( require(functionBuilder.isDefined, s"built-in function '$f' is missing function builder") functionRegistry.registerFunction(f, expressionInfo.get, functionBuilder.get) } - setCurrentDatabase(default) } } http://git-wip-us.apache.org/repos/asf/spark/blob/f97dd8a8/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala ---------------------------------------------------------------------- diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index 7d1f1d1..b4294ed 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -1270,6 +1270,15 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { "WITH SERDEPROPERTIES ('spark.sql.sources.me'='anything')") } + test("drop current database") { + sql("CREATE DATABASE temp") + sql("USE temp") + val m = intercept[AnalysisException] { + sql("DROP DATABASE temp") + }.getMessage + assert(m.contains("Can not drop current database `temp`")) + } + test("drop default database") { Seq("true", "false").foreach { caseSensitive => withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive) { http://git-wip-us.apache.org/repos/asf/spark/blob/f97dd8a8/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala ---------------------------------------------------------------------- diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala index 3aa8174..57363b7 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala @@ -93,6 +93,7 @@ class HiveContextCompatibilitySuite extends SparkFunSuite with BeforeAndAfterEac hc.sql("DROP TABLE mee_table") val tables2 = hc.sql("SHOW TABLES IN mee_db").collect().map(_.getString(0)) assert(tables2.isEmpty) + hc.sql("USE default") hc.sql("DROP DATABASE mee_db CASCADE") val databases3 = hc.sql("SHOW DATABASES").collect().map(_.getString(0)) assert(databases3.toSeq == Seq("default")) http://git-wip-us.apache.org/repos/asf/spark/blob/f97dd8a8/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala ---------------------------------------------------------------------- diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala index 93e50f4..343d7ba 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala @@ -472,6 +472,7 @@ class HiveDDLSuite sql(s"DROP TABLE $tabName") assert(tmpDir.listFiles.isEmpty) + sql("USE default") sql(s"DROP DATABASE $dbName") assert(!fs.exists(new Path(tmpDir.toString))) } @@ -526,6 +527,7 @@ class HiveDDLSuite assert(!tableDirectoryExists(TableIdentifier(tabName), Option(expectedDBLocation))) } + sql(s"USE default") val sqlDropDatabase = s"DROP DATABASE $dbName ${if (cascade) "CASCADE" else "RESTRICT"}" if (tableExists && !cascade) { val message = intercept[AnalysisException] { --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org For additional commands, e-mail: commits-help@spark.apache.org