spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From marmb...@apache.org
Subject spark git commit: [SPARK-9511] [SQL] Fixed Table Name Parsing
Date Mon, 03 Aug 2015 18:18:06 GMT
Repository: spark
Updated Branches:
  refs/heads/branch-1.5 b41a32718 -> 4de833e9e


[SPARK-9511] [SQL] Fixed Table Name Parsing

The issue was that the tokenizer was parsing "1one" into the numeric 1 using the code on line
110. I added another case to accept strings that start with a number and then have a letter
somewhere else in it as well.

Author: Joseph Batchik <joseph.batchik@cloudera.com>

Closes #7844 from JDrit/parse_error and squashes the following commits:

b8ca12f [Joseph Batchik] fixed parsing issue by adding another case

(cherry picked from commit dfe7bd168d9bcf8c53f993f459ab473d893457b0)
Signed-off-by: Michael Armbrust <michael@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/4de833e9
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/4de833e9
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/4de833e9

Branch: refs/heads/branch-1.5
Commit: 4de833e9e81415832c0556d8f1b9e3c3ae48cafa
Parents: b41a327
Author: Joseph Batchik <joseph.batchik@cloudera.com>
Authored: Mon Aug 3 11:17:38 2015 -0700
Committer: Michael Armbrust <michael@databricks.com>
Committed: Mon Aug 3 11:17:59 2015 -0700

----------------------------------------------------------------------
 .../spark/sql/catalyst/AbstractSparkSQLParser.scala       |  2 ++
 .../test/scala/org/apache/spark/sql/SQLQuerySuite.scala   | 10 ++++++++++
 2 files changed, 12 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/4de833e9/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
index d494ae7..5898a5f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
@@ -104,6 +104,8 @@ class SqlLexical extends StdLexical {
   override lazy val token: Parser[Token] =
     ( identChar ~ (identChar | digit).* ^^
       { case first ~ rest => processIdent((first :: rest).mkString) }
+    | digit.* ~ identChar ~ (identChar | digit).* ^^
+      { case first ~ middle ~ rest => processIdent((first ++ (middle :: rest)).mkString)
}
     | rep1(digit) ~ ('.' ~> digit.*).? ^^ {
         case i ~ None => NumericLit(i.mkString)
         case i ~ Some(d) => FloatLit(i.mkString + "." + d.mkString)

http://git-wip-us.apache.org/repos/asf/spark/blob/4de833e9/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index bbadc20..f1abae0 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -1604,4 +1604,14 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll with SQLTestUtils
{
     checkAnswer(df.select(-df("i")),
       Row(new CalendarInterval(-(12 * 3 - 3), -(7L * MICROS_PER_WEEK + 123))))
   }
+
+  test("SPARK-9511: error with table starting with number") {
+    val df = sqlContext.sparkContext.parallelize(1 to 10).map(i => (i, i.toString))
+      .toDF("num", "str")
+    df.registerTempTable("1one")
+
+    checkAnswer(sqlContext.sql("select count(num) from 1one"), Row(10))
+
+    sqlContext.dropTempTable("1one")
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


Mime
View raw message