spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From marmb...@apache.org
Subject spark git commit: [SPARK-14981][SQL] Throws exception if DESC is specified for sorting columns
Date Fri, 29 Apr 2016 21:52:37 GMT
Repository: spark
Updated Branches:
  refs/heads/master 8ebae466a -> a04b1de5f


[SPARK-14981][SQL] Throws exception if DESC is specified for sorting columns

## What changes were proposed in this pull request?

Currently Spark SQL doesn't support sorting columns in descending order. However, the parser
accepts the syntax and silently drops sorting directions. This PR fixes this by throwing an
exception if `DESC` is specified as sorting direction of a sorting column.

## How was this patch tested?

A test case is added to test the invalid sorting order by checking exception message.

Author: Cheng Lian <lian@databricks.com>

Closes #12759 from liancheng/spark-14981.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/a04b1de5
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/a04b1de5
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/a04b1de5

Branch: refs/heads/master
Commit: a04b1de5faa5270f48ef0ca1fbaf630ed72c3918
Parents: 8ebae46
Author: Cheng Lian <lian@databricks.com>
Authored: Fri Apr 29 14:52:32 2016 -0700
Committer: Michael Armbrust <michael@databricks.com>
Committed: Fri Apr 29 14:52:32 2016 -0700

----------------------------------------------------------------------
 .../spark/sql/execution/SparkSqlParser.scala    | 13 ++++++++--
 .../sql/hive/MetastoreDataSourcesSuite.scala    | 26 ++++++++++----------
 .../sql/hive/execution/SQLQuerySuite.scala      | 17 +++++++++++++
 3 files changed, 41 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/a04b1de5/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index c8e1003..8128a6e 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -746,9 +746,18 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
     BucketSpec(
       ctx.INTEGER_VALUE.getText.toInt,
       visitIdentifierList(ctx.identifierList),
-      Option(ctx.orderedIdentifierList).toSeq
+      Option(ctx.orderedIdentifierList)
+        .toSeq
         .flatMap(_.orderedIdentifier.asScala)
-        .map(_.identifier.getText))
+        .map { orderedIdCtx =>
+          Option(orderedIdCtx.ordering).map(_.getText).foreach { dir =>
+            if (dir.toLowerCase != "asc") {
+              throw parseException("Only ASC ordering is supported for sorting columns",
ctx)
+            }
+          }
+
+          orderedIdCtx.identifier.getText
+        })
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/a04b1de5/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
index cb10002..d6c98ea 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
@@ -948,9 +948,9 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with
TestHiv
 
         sql(
           s"""CREATE TABLE t USING PARQUET
-              |OPTIONS (PATH '$path')
-              |PARTITIONED BY (a)
-              |AS SELECT 1 AS a, 2 AS b
+             |OPTIONS (PATH '$path')
+             |PARTITIONED BY (a)
+             |AS SELECT 1 AS a, 2 AS b
            """.stripMargin
         )
 
@@ -972,9 +972,9 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with
TestHiv
 
         sql(
           s"""CREATE TABLE t USING PARQUET
-              |OPTIONS (PATH '$path')
-              |CLUSTERED BY (a) SORTED BY (b) INTO 2 BUCKETS
-              |AS SELECT 1 AS a, 2 AS b
+             |OPTIONS (PATH '$path')
+             |CLUSTERED BY (a) SORTED BY (b) INTO 2 BUCKETS
+             |AS SELECT 1 AS a, 2 AS b
            """.stripMargin
         )
 
@@ -992,9 +992,9 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with
TestHiv
 
         sql(
           s"""CREATE TABLE t USING PARQUET
-              |OPTIONS (PATH '$path')
-              |CLUSTERED BY (a) INTO 2 BUCKETS
-              |AS SELECT 1 AS a, 2 AS b
+             |OPTIONS (PATH '$path')
+             |CLUSTERED BY (a) INTO 2 BUCKETS
+             |AS SELECT 1 AS a, 2 AS b
            """.stripMargin
         )
 
@@ -1016,10 +1016,10 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils
with TestHiv
 
         sql(
           s"""CREATE TABLE t USING PARQUET
-              |OPTIONS (PATH '$path')
-              |PARTITIONED BY (a)
-              |CLUSTERED BY (b) SORTED BY (c) INTO 2 BUCKETS
-              |AS SELECT 1 AS a, 2 AS b, 3 AS c
+             |OPTIONS (PATH '$path')
+             |PARTITIONED BY (a)
+             |CLUSTERED BY (b) SORTED BY (c) INTO 2 BUCKETS
+             |AS SELECT 1 AS a, 2 AS b, 3 AS c
            """.stripMargin
         )
 

http://git-wip-us.apache.org/repos/asf/spark/blob/a04b1de5/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index 100cb3c..f20ab36 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -24,6 +24,7 @@ import org.apache.hadoop.fs.Path
 import org.apache.spark.sql._
 import org.apache.spark.sql.catalyst.TableIdentifier
 import org.apache.spark.sql.catalyst.analysis.{EliminateSubqueryAliases, FunctionRegistry}
+import org.apache.spark.sql.catalyst.parser.ParseException
 import org.apache.spark.sql.execution.datasources.{HadoopFsRelation, LogicalRelation}
 import org.apache.spark.sql.functions._
 import org.apache.spark.sql.hive.{HiveUtils, MetastoreRelation}
@@ -1488,4 +1489,20 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton
{
       "Once a managed table has been dropped, " +
         "dirs of this table should also have been deleted.")
   }
+
+  test("SPARK-14981: DESC not supported for sorting columns") {
+    withTable("t") {
+      val cause = intercept[ParseException] {
+        sql(
+          """CREATE TABLE t USING PARQUET
+            |OPTIONS (PATH '/path/to/file')
+            |CLUSTERED BY (a) SORTED BY (b DESC) INTO 2 BUCKETS
+            |AS SELECT 1 AS a, 2 AS b
+          """.stripMargin
+        )
+      }
+
+      assert(cause.getMessage.contains("Only ASC ordering is supported for sorting columns"))
+    }
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


Mime
View raw message